hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8a58853ac66bc8f5b8cfad78774a49e43b593fba | 2,786 | py | Python | src/mem/slicc/ast/TypeDeclAST.py | qianlong4526888/haha | 01baf923693873c11ae072ce4dde3d8f1d7b6239 | [
"BSD-3-Clause"
] | 135 | 2016-10-21T03:31:49.000Z | 2022-03-25T01:22:20.000Z | src/mem/slicc/ast/TypeDeclAST.py | qianlong4526888/haha | 01baf923693873c11ae072ce4dde3d8f1d7b6239 | [
"BSD-3-Clause"
] | 148 | 2018-07-20T00:58:36.000Z | 2021-11-16T01:52:33.000Z | src/mem/slicc/ast/TypeDeclAST.py | qianlong4526888/haha | 01baf923693873c11ae072ce4dde3d8f1d7b6239 | [
"BSD-3-Clause"
] | 48 | 2016-12-08T12:03:13.000Z | 2022-02-16T09:16:13.000Z | # Copyright (c) 1999-2008 Mark D. Hill and David A. Wood
# Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from slicc.ast.DeclAST import DeclAST
from slicc.symbols.Type import Type
| 39.8 | 72 | 0.709261 |
8a59e89d09e32fec1b404a96ad1edf1ccd223adb | 8,871 | py | Python | tests/test_preempt_return.py | vpv11110000/pyss | bc2226e2e66e0b551a09ae6ab6835b0bb6c7f32b | [
"MIT"
] | null | null | null | tests/test_preempt_return.py | vpv11110000/pyss | bc2226e2e66e0b551a09ae6ab6835b0bb6c7f32b | [
"MIT"
] | 2 | 2017-09-05T11:12:05.000Z | 2017-09-07T19:23:15.000Z | tests/test_preempt_return.py | vpv11110000/pyss | bc2226e2e66e0b551a09ae6ab6835b0bb6c7f32b | [
"MIT"
] | null | null | null | # #!/usr/bin/python
# -*- coding: utf-8 -*-
# test_preempt_return.py
# pylint: disable=line-too-long,missing-docstring,bad-whitespace, unused-argument, too-many-locals
import sys
import os
import random
import unittest
DIRNAME_MODULE = os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0])))) + os.sep
sys.path.append(DIRNAME_MODULE)
sys.path.append(DIRNAME_MODULE + "pyss" + os.sep)
from pyss import pyssobject
from pyss.pyss_model import PyssModel
from pyss.segment import Segment
from pyss.generate import Generate
from pyss.terminate import Terminate
from pyss import logger
from pyss.table import Table
from pyss.handle import Handle
from pyss.enter import Enter
from pyss.leave import Leave
from pyss.storage import Storage
from pyss.advance import Advance
from pyss.preempt import Preempt
from pyss.g_return import GReturn
from pyss.facility import Facility
from pyss.seize import Seize
from pyss.release import Release
from pyss.transfer import Transfer
from pyss.test import Test
from pyss.pyss_const import *
if __name__ == '__main__':
unittest.main(module="test_preempt_return")
| 35.342629 | 106 | 0.578289 |
8a5ab5ed3f3ad80694d11c3e4b2aca3d095ca892 | 2,400 | py | Python | python/ray/rllib/ddpg2/ddpg_evaluator.py | songqing/ray | 166000b089ee15d44635ebca00f12320f51ce587 | [
"Apache-2.0"
] | 1 | 2018-06-25T08:00:51.000Z | 2018-06-25T08:00:51.000Z | python/ray/rllib/ddpg2/ddpg_evaluator.py | songqing/ray | 166000b089ee15d44635ebca00f12320f51ce587 | [
"Apache-2.0"
] | 1 | 2018-01-26T05:11:04.000Z | 2018-01-26T05:11:04.000Z | python/ray/rllib/ddpg2/ddpg_evaluator.py | songqing/ray | 166000b089ee15d44635ebca00f12320f51ce587 | [
"Apache-2.0"
] | 1 | 2020-10-16T08:42:32.000Z | 2020-10-16T08:42:32.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import ray
from ray.rllib.ddpg2.models import DDPGModel
from ray.rllib.models.catalog import ModelCatalog
from ray.rllib.optimizers import PolicyEvaluator
from ray.rllib.utils.filter import NoFilter
from ray.rllib.utils.process_rollout import process_rollout
from ray.rllib.utils.sampler import SyncSampler
RemoteDDPGEvaluator = ray.remote(DDPGEvaluator)
| 31.578947 | 77 | 0.675417 |
8a5d2dc08b304db2757537f331d99b9fccf16fe7 | 3,064 | py | Python | python/sysmap/graph.py | harryherold/sysmap | 293e5f0dc22ed709c8fd5c170662e433c039eeab | [
"BSD-3-Clause"
] | 1 | 2020-05-08T13:55:31.000Z | 2020-05-08T13:55:31.000Z | python/sysmap/graph.py | harryherold/sysmap | 293e5f0dc22ed709c8fd5c170662e433c039eeab | [
"BSD-3-Clause"
] | 3 | 2020-01-16T10:30:28.000Z | 2020-01-27T11:23:49.000Z | python/sysmap/graph.py | harryherold/sysmap | 293e5f0dc22ed709c8fd5c170662e433c039eeab | [
"BSD-3-Clause"
] | 1 | 2020-01-16T09:08:14.000Z | 2020-01-16T09:08:14.000Z | from graphviz import Digraph
from collections import namedtuple
| 29.461538 | 92 | 0.582572 |
8a5d63158988a4154bd4df2b897b694d5cad31f9 | 46,478 | py | Python | alembic/versions/1d092815507a_add_huawei_2g_managedobjects.py | bodastage/bts-database | 96df7915621dd46daf55016eedf5cfc84dd0e3a2 | [
"Apache-2.0"
] | 1 | 2019-08-30T01:20:14.000Z | 2019-08-30T01:20:14.000Z | alembic/versions/1d092815507a_add_huawei_2g_managedobjects.py | bodastage/bts-database | 96df7915621dd46daf55016eedf5cfc84dd0e3a2 | [
"Apache-2.0"
] | 1 | 2018-05-30T09:29:24.000Z | 2018-05-30T10:04:37.000Z | alembic/versions/1d092815507a_add_huawei_2g_managedobjects.py | bodastage/bts-database | 96df7915621dd46daf55016eedf5cfc84dd0e3a2 | [
"Apache-2.0"
] | 3 | 2018-03-10T23:29:30.000Z | 2019-02-19T22:11:09.000Z | """Add Huawei 2G managedobjects
Revision ID: 1d092815507a
Revises: 3fa514f1b7a9
Create Date: 2018-02-13 01:38:59.965000
"""
from alembic import op
import sqlalchemy as sa
import datetime
# revision identifiers, used by Alembic.
revision = '1d092815507a'
down_revision = '3fa514f1b7a9'
branch_labels = None
depends_on = None
| 103.055432 | 133 | 0.570162 |
8a5d6681ce10a3af268cfdb475c6d9aff87499c6 | 1,211 | py | Python | png/imageRecognition_Simple.py | tanthanadon/senior | 89fc24889b34860982b551e5ea5e0d3550505f65 | [
"MIT"
] | null | null | null | png/imageRecognition_Simple.py | tanthanadon/senior | 89fc24889b34860982b551e5ea5e0d3550505f65 | [
"MIT"
] | 5 | 2020-03-04T13:49:10.000Z | 2020-03-20T04:06:23.000Z | png/imageRecognition_Simple.py | tanthanadon/senior | 89fc24889b34860982b551e5ea5e0d3550505f65 | [
"MIT"
] | null | null | null | from math import sqrt
from skimage import data
from skimage.feature import blob_dog, blob_log, blob_doh
from skimage.color import rgb2gray
from skimage import io
import matplotlib.pyplot as plt
image = io.imread("star.jpg")
image_gray = rgb2gray(image)
blobs_log = blob_log(image_gray, max_sigma=30, num_sigma=10, threshold=.1)
# Compute radii in the 3rd column.
blobs_log[:, 2] = blobs_log[:, 2] * sqrt(2)
blobs_dog = blob_dog(image_gray, max_sigma=30, threshold=.1)
blobs_dog[:, 2] = blobs_dog[:, 2] * sqrt(2)
blobs_doh = blob_doh(image_gray, max_sigma=30, threshold=.01)
blobs_list = [blobs_log, blobs_dog, blobs_doh]
colors = ['yellow', 'lime', 'red']
titles = ['Laplacian of Gaussian', 'Difference of Gaussian',
'Determinant of Hessian']
sequence = zip(blobs_list, colors, titles)
fig, axes = plt.subplots(1, 3, figsize=(9, 3), sharex=True, sharey=True)
ax = axes.ravel()
for idx, (blobs, color, title) in enumerate(sequence):
ax[idx].set_title(title)
ax[idx].imshow(image)
for blob in blobs:
y, x, r = blob
c = plt.Circle((x, y), r, color=color, linewidth=2, fill=False)
ax[idx].add_patch(c)
ax[idx].set_axis_off()
plt.tight_layout()
plt.show() | 28.833333 | 74 | 0.696945 |
8a5e3b1295194140be07e7851df9a2e6e39cc960 | 529 | py | Python | Day22_Pong/ball.py | syt1209/PythonProjects | 0409dbd3c0b0ddf00debc38875059c828eb31dec | [
"MIT"
] | 1 | 2021-02-16T00:59:29.000Z | 2021-02-16T00:59:29.000Z | Day22_Pong/ball.py | syt1209/PythonProjects | 0409dbd3c0b0ddf00debc38875059c828eb31dec | [
"MIT"
] | null | null | null | Day22_Pong/ball.py | syt1209/PythonProjects | 0409dbd3c0b0ddf00debc38875059c828eb31dec | [
"MIT"
] | null | null | null | from turtle import Turtle
SPEED = 10
| 20.346154 | 49 | 0.551985 |
8a5eacf969c02364f5e4daefab7f03dd79ff6a0f | 447 | py | Python | programs/combine/jry2/treedef.py | lsrcz/SyGuS | 5aab1b2c324d8a3c20e51f8acb2866190a1431d3 | [
"MIT"
] | 1 | 2021-07-11T08:32:32.000Z | 2021-07-11T08:32:32.000Z | programs/combine/jry2/treedef.py | lsrcz/SyGuS | 5aab1b2c324d8a3c20e51f8acb2866190a1431d3 | [
"MIT"
] | null | null | null | programs/combine/jry2/treedef.py | lsrcz/SyGuS | 5aab1b2c324d8a3c20e51f8acb2866190a1431d3 | [
"MIT"
] | 1 | 2020-12-20T16:08:10.000Z | 2020-12-20T16:08:10.000Z | from jry2.semantics import Expr
| 20.318182 | 80 | 0.630872 |
8a5ec6dd61aef0b828a5fdf8e68715be0262b256 | 103,584 | py | Python | src/sage/modular/dirichlet.py | hsm207/sage | 020bd59ec28717bfab9af44d2231c53da1ff99f1 | [
"BSL-1.0"
] | 1 | 2021-10-18T01:24:04.000Z | 2021-10-18T01:24:04.000Z | src/sage/modular/dirichlet.py | hsm207/sage | 020bd59ec28717bfab9af44d2231c53da1ff99f1 | [
"BSL-1.0"
] | null | null | null | src/sage/modular/dirichlet.py | hsm207/sage | 020bd59ec28717bfab9af44d2231c53da1ff99f1 | [
"BSL-1.0"
] | null | null | null | # -*- coding: utf-8 -*-
r"""
Dirichlet characters
A :class:`DirichletCharacter` is the extension of a homomorphism
.. MATH::
(\ZZ/N\ZZ)^* \to R^*,
for some ring `R`, to the map `\ZZ/N\ZZ \to R` obtained by sending
those `x\in\ZZ/N\ZZ` with `\gcd(N,x)>1` to `0`.
EXAMPLES::
sage: G = DirichletGroup(35)
sage: x = G.gens()
sage: e = x[0]*x[1]^2; e
Dirichlet character modulo 35 of conductor 35 mapping 22 |--> zeta12^3, 31 |--> zeta12^2 - 1
sage: e.order()
12
This illustrates a canonical coercion::
sage: e = DirichletGroup(5, QQ).0
sage: f = DirichletGroup(5,CyclotomicField(4)).0
sage: e*f
Dirichlet character modulo 5 of conductor 5 mapping 2 |--> -zeta4
AUTHORS:
- William Stein (2005-09-02): Fixed bug in comparison of Dirichlet
characters. It was checking that their values were the same, but
not checking that they had the same level!
- William Stein (2006-01-07): added more examples
- William Stein (2006-05-21): added examples of everything; fix a
*lot* of tiny bugs and design problem that became clear when
creating examples.
- Craig Citro (2008-02-16): speed up __call__ method for
Dirichlet characters, miscellaneous fixes
- Julian Rueth (2014-03-06): use UniqueFactory to cache DirichletGroups
"""
# ****************************************************************************
# Copyright (C) 2004-2006 William Stein <[email protected]>
# Copyright (C) 2014 Julian Rueth <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# https://www.gnu.org/licenses/
# ****************************************************************************
from __future__ import print_function
import sage.categories.all as cat
from sage.misc.all import prod
import sage.misc.prandom as random
import sage.modules.free_module as free_module
import sage.modules.free_module_element as free_module_element
import sage.rings.all as rings
import sage.rings.number_field.number_field as number_field
from sage.libs.pari import pari
from sage.categories.map import Map
from sage.rings.rational_field import is_RationalField
from sage.rings.complex_mpfr import is_ComplexField
from sage.rings.qqbar import is_AlgebraicField
from sage.rings.ring import is_Ring
from sage.misc.functional import round
from sage.misc.cachefunc import cached_method
from sage.misc.fast_methods import WithEqualityById
from sage.structure.element import MultiplicativeGroupElement
from sage.structure.gens_py import multiplicative_iterator
from sage.structure.parent import Parent
from sage.structure.sequence import Sequence
from sage.structure.factory import UniqueFactory
from sage.structure.richcmp import richcmp
from sage.arith.all import (binomial, bernoulli, kronecker, factor, gcd,
lcm, fundamental_discriminant, euler_phi, factorial, valuation)
def trivial_character(N, base_ring=rings.RationalField()):
r"""
Return the trivial character of the given modulus, with values in the given
base ring.
EXAMPLES::
sage: t = trivial_character(7)
sage: [t(x) for x in [0..20]]
[0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1]
sage: t(1).parent()
Rational Field
sage: trivial_character(7, Integers(3))(1).parent()
Ring of integers modulo 3
"""
return DirichletGroup(N, base_ring)(1)
TrivialCharacter = trivial_character
def kronecker_character(d):
"""
Return the quadratic Dirichlet character (d/.) of minimal
conductor.
EXAMPLES::
sage: kronecker_character(97*389*997^2)
Dirichlet character modulo 37733 of conductor 37733 mapping 1557 |--> -1, 37346 |--> -1
::
sage: a = kronecker_character(1)
sage: b = DirichletGroup(2401,QQ)(a) # NOTE -- over QQ!
sage: b.modulus()
2401
AUTHORS:
- Jon Hanke (2006-08-06)
"""
d = rings.Integer(d)
if d == 0:
raise ValueError("d must be nonzero")
D = fundamental_discriminant(d)
G = DirichletGroup(abs(D), rings.RationalField())
return G([kronecker(D,u) for u in G.unit_gens()])
def kronecker_character_upside_down(d):
"""
Return the quadratic Dirichlet character (./d) of conductor d, for
d0.
EXAMPLES::
sage: kronecker_character_upside_down(97*389*997^2)
Dirichlet character modulo 37506941597 of conductor 37733 mapping 13533432536 |--> -1, 22369178537 |--> -1, 14266017175 |--> 1
AUTHORS:
- Jon Hanke (2006-08-06)
"""
d = rings.Integer(d)
if d <= 0:
raise ValueError("d must be positive")
G = DirichletGroup(d, rings.RationalField())
return G([kronecker(u.lift(),d) for u in G.unit_gens()])
def is_DirichletCharacter(x):
r"""
Return True if x is of type DirichletCharacter.
EXAMPLES::
sage: from sage.modular.dirichlet import is_DirichletCharacter
sage: is_DirichletCharacter(trivial_character(3))
True
sage: is_DirichletCharacter([1])
False
"""
return isinstance(x, DirichletCharacter)
def _pari_conversion(self):
r"""
Prepare data for the conversion of the character to Pari.
OUTPUT:
pair (G, v) where G is `(\ZZ / N \ZZ)^*` where `N` is the modulus
EXAMPLES::
sage: chi4 = DirichletGroup(4).gen()
sage: chi4._pari_conversion()
([[4, [0]], [2, [2], [3]], [[2]~, Vecsmall([2])],
[[4], [[1, matrix(0,2)]], Mat(1), [3], [2], [0]], Mat(1)], [1])
sage: chi = DirichletGroup(24)([1,-1,-1]); chi
Dirichlet character modulo 24 of conductor 24
mapping 7 |--> 1, 13 |--> -1, 17 |--> -1
sage: chi._pari_conversion()
([[24, [0]], [8, [2, 2, 2], [7, 13, 17]],
[[2, 2, 3]~, Vecsmall([3, 3, 1])],
[[8, 8, 3], [[1, matrix(0,2)], [1, matrix(0,2)], [2, Mat([2, 1])]],
[1, 0, 0; 0, 1, 0; 0, 0, 1], [7, 13, 17], [2, 2, 2], [0, 0, 0]],
[1, 0, 0; 0, 1, 0; 0, 0, 1]], [0, 1, 1])
"""
G = pari.znstar(self.modulus(), 1)
pari_orders = G[1][1]
pari_gens = G[1][2]
# one should use the following, but this does not work
# pari_orders = G.cyc()
# pari_gens = G.gen()
values_on_gens = (self(x) for x in pari_gens)
# now compute the input for pari (list of exponents)
P = self.parent()
if is_ComplexField(P.base_ring()):
zeta = P.zeta()
zeta_argument = zeta.argument()
v = [int(x.argument() / zeta_argument) for x in values_on_gens]
else:
dlog = P._zeta_dlog
v = [dlog[x] for x in values_on_gens]
m = P.zeta_order()
v = [(vi * oi) // m for vi, oi in zip(v, pari_orders)]
return (G, v)
def conrey_number(self):
r"""
Return the Conrey number for this character.
This is a positive integer coprime to q that identifies a
Dirichlet character of modulus q.
See https://www.lmfdb.org/knowledge/show/character.dirichlet.conrey
EXAMPLES::
sage: chi4 = DirichletGroup(4).gen()
sage: chi4.conrey_number()
3
sage: chi = DirichletGroup(24)([1,-1,-1]); chi
Dirichlet character modulo 24 of conductor 24
mapping 7 |--> 1, 13 |--> -1, 17 |--> -1
sage: chi.conrey_number()
5
sage: chi = DirichletGroup(60)([1,-1,I])
sage: chi.conrey_number()
17
sage: chi = DirichletGroup(420)([1,-1,-I,1])
sage: chi.conrey_number()
113
TESTS::
sage: eps1 = DirichletGroup(5)([-1])
sage: eps2 = DirichletGroup(5,QQ)([-1])
sage: eps1.conrey_number() == eps2.conrey_number()
True
"""
G, v = self._pari_conversion()
return pari.znconreyexp(G, v).sage()
def lmfdb_page(self):
r"""
Open the LMFDB web page of the character in a browser.
See https://www.lmfdb.org
EXAMPLES::
sage: E = DirichletGroup(4).gen()
sage: E.lmfdb_page() # optional -- webbrowser
"""
import webbrowser
lmfdb_url = 'https://www.lmfdb.org/Character/Dirichlet/{}/{}'
url = lmfdb_url.format(self.modulus(), self.conrey_number())
webbrowser.open(url)
def galois_orbit(self, sort=True):
r"""
Return the orbit of this character under the action of the absolute
Galois group of the prime subfield of the base ring.
EXAMPLES::
sage: G = DirichletGroup(30); e = G.1
sage: e.galois_orbit()
[Dirichlet character modulo 30 of conductor 5 mapping 11 |--> 1, 7 |--> -zeta4,
Dirichlet character modulo 30 of conductor 5 mapping 11 |--> 1, 7 |--> zeta4]
Another example::
sage: G = DirichletGroup(13)
sage: G.galois_orbits()
[
[Dirichlet character modulo 13 of conductor 1 mapping 2 |--> 1],
...,
[Dirichlet character modulo 13 of conductor 13 mapping 2 |--> -1]
]
sage: e = G.0
sage: e
Dirichlet character modulo 13 of conductor 13 mapping 2 |--> zeta12
sage: e.galois_orbit()
[Dirichlet character modulo 13 of conductor 13 mapping 2 |--> zeta12,
Dirichlet character modulo 13 of conductor 13 mapping 2 |--> -zeta12^3 + zeta12,
Dirichlet character modulo 13 of conductor 13 mapping 2 |--> zeta12^3 - zeta12,
Dirichlet character modulo 13 of conductor 13 mapping 2 |--> -zeta12]
sage: e = G.0^2; e
Dirichlet character modulo 13 of conductor 13 mapping 2 |--> zeta12^2
sage: e.galois_orbit()
[Dirichlet character modulo 13 of conductor 13 mapping 2 |--> zeta12^2, Dirichlet character modulo 13 of conductor 13 mapping 2 |--> -zeta12^2 + 1]
A non-example::
sage: chi = DirichletGroup(7, Integers(9), zeta = Integers(9)(2)).0
sage: chi.galois_orbit()
Traceback (most recent call last):
...
TypeError: Galois orbits only defined if base ring is an integral domain
"""
if not self.base_ring().is_integral_domain():
raise TypeError("Galois orbits only defined if base ring is an integral domain")
k = self.order()
if k <= 2:
return [self]
P = self.parent()
z = self.element()
o = int(z.additive_order())
Auts = set([m % o for m in P._automorphisms()])
v = [P.element_class(P, m * z, check=False) for m in Auts]
if sort:
v.sort()
return v
def gauss_sum(self, a=1):
r"""
Return a Gauss sum associated to this Dirichlet character.
The Gauss sum associated to `\chi` is
.. MATH::
g_a(\chi) = \sum_{r \in \ZZ/m\ZZ} \chi(r)\,\zeta^{ar},
where `m` is the modulus of `\chi` and `\zeta` is a primitive
`m^{th}` root of unity.
FACTS: If the modulus is a prime `p` and the character is
nontrivial, then the Gauss sum has absolute value `\sqrt{p}`.
CACHING: Computed Gauss sums are *not* cached with this character.
EXAMPLES::
sage: G = DirichletGroup(3)
sage: e = G([-1])
sage: e.gauss_sum(1)
2*zeta6 - 1
sage: e.gauss_sum(2)
-2*zeta6 + 1
sage: norm(e.gauss_sum())
3
::
sage: G = DirichletGroup(13)
sage: e = G.0
sage: e.gauss_sum()
-zeta156^46 + zeta156^45 + zeta156^42 + zeta156^41 + 2*zeta156^40 + zeta156^37 - zeta156^36 - zeta156^34 - zeta156^33 - zeta156^31 + 2*zeta156^30 + zeta156^28 - zeta156^24 - zeta156^22 + zeta156^21 + zeta156^20 - zeta156^19 + zeta156^18 - zeta156^16 - zeta156^15 - 2*zeta156^14 - zeta156^10 + zeta156^8 + zeta156^7 + zeta156^6 + zeta156^5 - zeta156^4 - zeta156^2 - 1
sage: factor(norm(e.gauss_sum()))
13^24
TESTS:
The field of algebraic numbers is supported (:trac:`19056`)::
sage: G = DirichletGroup(7, QQbar)
sage: G[1].gauss_sum()
-2.440133358345538? + 1.022618791871794?*I
Check that :trac:`19060` is fixed::
sage: K.<z> = CyclotomicField(8)
sage: G = DirichletGroup(13, K)
sage: chi = G([z^2])
sage: chi.gauss_sum()
zeta52^22 + zeta52^21 + zeta52^19 - zeta52^16 + zeta52^15 + zeta52^14 + zeta52^12 - zeta52^11 - zeta52^10 - zeta52^7 - zeta52^5 + zeta52^4
Check that :trac:`25127` is fixed::
sage: G = DirichletGroup(1)
sage: chi = G.one()
sage: chi.gauss_sum()
1
.. SEEALSO::
- :func:`sage.arith.misc.gauss_sum` for general finite fields
- :func:`sage.rings.padics.misc.gauss_sum` for a `p`-adic version
"""
G = self.parent()
K = G.base_ring()
chi = self
m = G.modulus()
if is_ComplexField(K):
return self.gauss_sum_numerical(a=a)
elif is_AlgebraicField(K):
L = K
zeta = L.zeta(m)
elif number_field.is_CyclotomicField(K) or is_RationalField(K):
chi = chi.minimize_base_ring()
n = lcm(m, G.zeta_order())
L = rings.CyclotomicField(n)
zeta = L.gen(0) ** (n // m)
else:
raise NotImplementedError("Gauss sums only currently implemented when the base ring is a cyclotomic field, QQ, QQbar, or a complex field")
zeta = zeta ** a
g = L(chi(0))
z = L.one()
for c in chi.values()[1:]:
z *= zeta
g += L(c)*z
return g
def gauss_sum_numerical(self, prec=53, a=1):
r"""
Return a Gauss sum associated to this Dirichlet character as an
approximate complex number with prec bits of precision.
INPUT:
- ``prec`` -- integer (default: 53), *bits* of precision
- ``a`` -- integer, as for :meth:`gauss_sum`.
The Gauss sum associated to `\chi` is
.. MATH::
g_a(\chi) = \sum_{r \in \ZZ/m\ZZ} \chi(r)\,\zeta^{ar},
where `m` is the modulus of `\chi` and `\zeta` is a primitive
`m^{th}` root of unity.
EXAMPLES::
sage: G = DirichletGroup(3)
sage: e = G.0
sage: abs(e.gauss_sum_numerical())
1.7320508075...
sage: sqrt(3.0)
1.73205080756888
sage: e.gauss_sum_numerical(a=2)
-...e-15 - 1.7320508075...*I
sage: e.gauss_sum_numerical(a=2, prec=100)
4.7331654313260708324703713917e-30 - 1.7320508075688772935274463415*I
sage: G = DirichletGroup(13)
sage: H = DirichletGroup(13, CC)
sage: e = G.0
sage: f = H.0
sage: e.gauss_sum_numerical()
-3.07497205... + 1.8826966926...*I
sage: f.gauss_sum_numerical()
-3.07497205... + 1.8826966926...*I
sage: abs(e.gauss_sum_numerical())
3.60555127546...
sage: abs(f.gauss_sum_numerical())
3.60555127546...
sage: sqrt(13.0)
3.60555127546399
TESTS:
The field of algebraic numbers is supported (:trac:`19056`)::
sage: G = DirichletGroup(7, QQbar)
sage: G[1].gauss_sum_numerical()
-2.44013335834554 + 1.02261879187179*I
"""
G = self.parent()
K = G.base_ring()
if is_ComplexField(K):
phi = lambda t : t
CC = K
elif is_AlgebraicField(K):
from sage.rings.complex_mpfr import ComplexField
CC = ComplexField(prec)
phi = CC.coerce_map_from(K)
elif number_field.is_CyclotomicField(K) or is_RationalField(K):
phi = K.complex_embedding(prec)
CC = phi.codomain()
else:
raise NotImplementedError("Gauss sums only currently implemented when the base ring is a cyclotomic field, QQ, QQbar, or a complex field")
zeta = CC.zeta(G.modulus()) ** a
g = phi(self(0))
z = CC.one()
for c in self.values()[1:]:
z *= zeta
g += phi(c)*z
return g
def jacobi_sum(self, char, check=True):
r"""
Return the Jacobi sum associated to these Dirichlet characters
(i.e., J(self,char)).
This is defined as
.. MATH::
J(\chi, \psi) = \sum_{a \in \ZZ / N\ZZ} \chi(a) \psi(1-a)
where `\chi` and `\psi` are both characters modulo `N`.
EXAMPLES::
sage: D = DirichletGroup(13)
sage: e = D.0
sage: f = D[-2]
sage: e.jacobi_sum(f)
3*zeta12^2 + 2*zeta12 - 3
sage: f.jacobi_sum(e)
3*zeta12^2 + 2*zeta12 - 3
sage: p = 7
sage: DP = DirichletGroup(p)
sage: f = DP.0
sage: e.jacobi_sum(f)
Traceback (most recent call last):
...
NotImplementedError: Characters must be from the same Dirichlet Group.
sage: all_jacobi_sums = [(DP[i].values_on_gens(),DP[j].values_on_gens(),DP[i].jacobi_sum(DP[j]))
....: for i in range(p-1) for j in range(i, p-1)]
sage: for s in all_jacobi_sums:
....: print(s)
((1,), (1,), 5)
((1,), (zeta6,), -1)
((1,), (zeta6 - 1,), -1)
((1,), (-1,), -1)
((1,), (-zeta6,), -1)
((1,), (-zeta6 + 1,), -1)
((zeta6,), (zeta6,), -zeta6 + 3)
((zeta6,), (zeta6 - 1,), 2*zeta6 + 1)
((zeta6,), (-1,), -2*zeta6 - 1)
((zeta6,), (-zeta6,), zeta6 - 3)
((zeta6,), (-zeta6 + 1,), 1)
((zeta6 - 1,), (zeta6 - 1,), -3*zeta6 + 2)
((zeta6 - 1,), (-1,), 2*zeta6 + 1)
((zeta6 - 1,), (-zeta6,), -1)
((zeta6 - 1,), (-zeta6 + 1,), -zeta6 - 2)
((-1,), (-1,), 1)
((-1,), (-zeta6,), -2*zeta6 + 3)
((-1,), (-zeta6 + 1,), 2*zeta6 - 3)
((-zeta6,), (-zeta6,), 3*zeta6 - 1)
((-zeta6,), (-zeta6 + 1,), -2*zeta6 + 3)
((-zeta6 + 1,), (-zeta6 + 1,), zeta6 + 2)
Let's check that trivial sums are being calculated correctly::
sage: N = 13
sage: D = DirichletGroup(N)
sage: g = D(1)
sage: g.jacobi_sum(g)
11
sage: sum([g(x)*g(1-x) for x in IntegerModRing(N)])
11
And sums where exactly one character is nontrivial (see :trac:`6393`)::
sage: G = DirichletGroup(5); X=G.list(); Y=X[0]; Z=X[1]
sage: Y.jacobi_sum(Z)
-1
sage: Z.jacobi_sum(Y)
-1
Now let's take a look at a non-prime modulus::
sage: N = 9
sage: D = DirichletGroup(N)
sage: g = D(1)
sage: g.jacobi_sum(g)
3
We consider a sum with values in a finite field::
sage: g = DirichletGroup(17, GF(9,'a')).0
sage: g.jacobi_sum(g**2)
2*a
TESTS:
This shows that :trac:`6393` has been fixed::
sage: G = DirichletGroup(5); X = G.list(); Y = X[0]; Z = X[1]
sage: # Y is trivial and Z is quartic
sage: sum([Y(x)*Z(1-x) for x in IntegerModRing(5)])
-1
sage: # The value -1 above is the correct value of the Jacobi sum J(Y, Z).
sage: Y.jacobi_sum(Z); Z.jacobi_sum(Y)
-1
-1
"""
if check:
if self.parent() != char.parent():
raise NotImplementedError("Characters must be from the same Dirichlet Group.")
return sum([self(x) * char(1-x) for x in rings.IntegerModRing(self.modulus())])
def kloosterman_sum(self, a=1, b=0):
r"""
Return the "twisted" Kloosterman sum associated to this Dirichlet character.
This includes Gauss sums, classical Kloosterman sums, Sali sums, etc.
The Kloosterman sum associated to `\chi` and the integers a,b is
.. MATH::
K(a,b,\chi) = \sum_{r \in (\ZZ/m\ZZ)^\times} \chi(r)\,\zeta^{ar+br^{-1}},
where `m` is the modulus of `\chi` and `\zeta` is a primitive
`m` th root of unity. This reduces to the Gauss sum if `b=0`.
This method performs an exact calculation and returns an element of a
suitable cyclotomic field; see also :meth:`.kloosterman_sum_numerical`,
which gives an inexact answer (but is generally much quicker).
CACHING: Computed Kloosterman sums are *not* cached with this
character.
EXAMPLES::
sage: G = DirichletGroup(3)
sage: e = G([-1])
sage: e.kloosterman_sum(3,5)
-2*zeta6 + 1
sage: G = DirichletGroup(20)
sage: e = G([1 for u in G.unit_gens()])
sage: e.kloosterman_sum(7,17)
-2*zeta20^6 + 2*zeta20^4 + 4
TESTS::
sage: G = DirichletGroup(20, UniversalCyclotomicField())
sage: e = G([1 for u in G.unit_gens()])
sage: e.kloosterman_sum(7,17)
-2*E(5) - 4*E(5)^2 - 4*E(5)^3 - 2*E(5)^4
sage: G = DirichletGroup(12, QQbar)
sage: e = G.gens()[0]
sage: e.kloosterman_sum(5,11)
Traceback (most recent call last):
...
NotImplementedError: Kloosterman sums not implemented over this ring
"""
G = self.parent()
zo = G.zeta_order()
m = G.modulus()
g = 0
L = rings.CyclotomicField(m.lcm(zo))
zeta = L.gen(0)
try:
self(1) * zeta**(a+b)
except TypeError:
raise NotImplementedError('Kloosterman sums not implemented '
'over this ring')
n = zeta.multiplicative_order()
zeta = zeta**(n // m)
for c in m.coprime_integers(m):
e = rings.Mod(c, m)
g += self(c) * zeta**int(a*e + b*e**(-1))
return g
def kloosterman_sum_numerical(self, prec=53, a=1, b=0):
r"""
Return the Kloosterman sum associated to this Dirichlet character as
an approximate complex number with prec bits of precision.
See also :meth:`.kloosterman_sum`, which calculates the sum
exactly (which is generally slower).
INPUT:
- ``prec`` -- integer (default: 53), *bits* of precision
- ``a`` -- integer, as for :meth:`.kloosterman_sum`
- ``b`` -- integer, as for :meth:`.kloosterman_sum`.
EXAMPLES::
sage: G = DirichletGroup(3)
sage: e = G.0
The real component of the numerical value of e is near zero::
sage: v=e.kloosterman_sum_numerical()
sage: v.real() < 1.0e15
True
sage: v.imag()
1.73205080756888
sage: G = DirichletGroup(20)
sage: e = G.1
sage: e.kloosterman_sum_numerical(53,3,11)
3.80422606518061 - 3.80422606518061*I
"""
G = self.parent()
K = G.base_ring()
if not (number_field.is_CyclotomicField(K) or is_RationalField(K)):
raise NotImplementedError("Kloosterman sums only currently implemented when the base ring is a cyclotomic field or QQ.")
phi = K.complex_embedding(prec)
CC = phi.codomain()
g = 0
m = G.modulus()
zeta = CC.zeta(m)
for c in m.coprime_integers(m):
e = rings.Mod(c, m)
z = zeta ** int(a*e + b*(e**(-1)))
g += phi(self(c))*z
return g
def kernel(self):
r"""
Return the kernel of this character.
OUTPUT: Currently the kernel is returned as a list. This may
change.
EXAMPLES::
sage: G.<a,b> = DirichletGroup(20)
sage: a.kernel()
[1, 9, 13, 17]
sage: b.kernel()
[1, 11]
"""
one = self.base_ring().one()
return [x for x in range(self.modulus()) if self(x) == one]
def maximize_base_ring(self):
r"""
Let
.. MATH::
\varepsilon : (\ZZ/N\ZZ)^* \to \QQ(\zeta_n)
be a Dirichlet character. This function returns an equal Dirichlet
character
.. MATH::
\chi : (\ZZ/N\ZZ)^* \to \QQ(\zeta_m)
where `m` is the least common multiple of `n` and
the exponent of `(\ZZ/N\ZZ)^*`.
EXAMPLES::
sage: G.<a,b> = DirichletGroup(20,QQ)
sage: b.maximize_base_ring()
Dirichlet character modulo 20 of conductor 5 mapping 11 |--> 1, 17 |--> -1
sage: b.maximize_base_ring().base_ring()
Cyclotomic Field of order 4 and degree 2
sage: DirichletGroup(20).base_ring()
Cyclotomic Field of order 4 and degree 2
"""
g = rings.IntegerModRing(self.modulus()).unit_group_exponent()
if g == 1:
g = 2
z = self.base_ring().zeta()
n = z.multiplicative_order()
m = lcm(g,n)
if n == m:
return self
K = rings.CyclotomicField(m)
return self.change_ring(K)
def minimize_base_ring(self):
r"""
Return a Dirichlet character that equals this one, but over as
small a subfield (or subring) of the base ring as possible.
.. note::
This function is currently only implemented when the base
ring is a number field. It's the identity function in
characteristic p.
EXAMPLES::
sage: G = DirichletGroup(13)
sage: e = DirichletGroup(13).0
sage: e.base_ring()
Cyclotomic Field of order 12 and degree 4
sage: e.minimize_base_ring().base_ring()
Cyclotomic Field of order 12 and degree 4
sage: (e^2).minimize_base_ring().base_ring()
Cyclotomic Field of order 6 and degree 2
sage: (e^3).minimize_base_ring().base_ring()
Cyclotomic Field of order 4 and degree 2
sage: (e^12).minimize_base_ring().base_ring()
Rational Field
TESTS:
Check that :trac:`18479` is fixed::
sage: f = Newforms(Gamma1(25), names='a')[1]
sage: eps = f.character()
sage: eps.minimize_base_ring() == eps
True
A related bug (see :trac:`18086`)::
sage: K.<a,b>=NumberField([x^2 + 1, x^2 - 3])
sage: chi = DirichletGroup(7, K).0
sage: chi.minimize_base_ring()
Dirichlet character modulo 7 of conductor 7 mapping 3 |--> -1/2*b*a + 1/2
"""
R = self.base_ring()
if R.is_prime_field():
return self
p = R.characteristic()
if p:
K = rings.IntegerModRing(p)
elif self.order() <= 2:
K = rings.QQ
elif (isinstance(R, number_field.NumberField_generic)
and euler_phi(self.order()) < R.absolute_degree()):
K = rings.CyclotomicField(self.order())
else:
return self
try:
return self.change_ring(K)
except (TypeError, ValueError, ArithmeticError):
return self
def modulus(self):
"""
The modulus of this character.
EXAMPLES::
sage: e = DirichletGroup(100, QQ).0
sage: e.modulus()
100
sage: e.conductor()
4
"""
return self.parent().modulus()
def level(self):
"""
Synonym for modulus.
EXAMPLES::
sage: e = DirichletGroup(100, QQ).0
sage: e.level()
100
"""
return self.modulus()
def primitive_character(self):
"""
Returns the primitive character associated to self.
EXAMPLES::
sage: e = DirichletGroup(100).0; e
Dirichlet character modulo 100 of conductor 4 mapping 51 |--> -1, 77 |--> 1
sage: e.conductor()
4
sage: f = e.primitive_character(); f
Dirichlet character modulo 4 of conductor 4 mapping 3 |--> -1
sage: f.modulus()
4
"""
return self.restrict(self.conductor())
def restrict(self, M):
"""
Returns the restriction of this character to a Dirichlet character
modulo the divisor M of the modulus, which must also be a multiple
of the conductor of this character.
EXAMPLES::
sage: e = DirichletGroup(100).0
sage: e.modulus()
100
sage: e.conductor()
4
sage: e.restrict(20)
Dirichlet character modulo 20 of conductor 4 mapping 11 |--> -1, 17 |--> 1
sage: e.restrict(4)
Dirichlet character modulo 4 of conductor 4 mapping 3 |--> -1
sage: e.restrict(50)
Traceback (most recent call last):
...
ValueError: conductor(=4) must divide M(=50)
"""
M = int(M)
if self.modulus()%M != 0:
raise ValueError("M(=%s) must divide the modulus(=%s)"%(M,self.modulus()))
if M%self.conductor() != 0:
raise ValueError("conductor(=%s) must divide M(=%s)"%(self.conductor(),M))
H = DirichletGroup(M, self.base_ring())
return H(self)
def __setstate__(self, state):
r"""
Restore a pickled element from ``state``.
TESTS::
sage: e = DirichletGroup(16)([-1, 1])
sage: loads(dumps(e)) == e
True
"""
# values_on_gens() used an explicit cache __values_on_gens in the past
# we need to set the cache of values_on_gens() from that if we encounter it in a pickle
values_on_gens_key = '_DirichletCharacter__values_on_gens'
values_on_gens = None
state_dict = state[1]
if values_on_gens_key in state_dict:
values_on_gens = state_dict[values_on_gens_key]
del state_dict[values_on_gens_key]
# element() used an explicit cache __element in the past
# we need to set the cache of element() from that if we encounter it in a pickle
element_key = '_DirichletCharacter__element'
element = None
if element_key in state_dict:
element = state_dict[element_key]
del state_dict[element_key]
super(DirichletCharacter, self).__setstate__(state)
if values_on_gens is not None:
self.values_on_gens.set_cache(values_on_gens)
if element is not None:
self.element.set_cache(element)
class DirichletGroupFactory(UniqueFactory):
r"""
Construct a group of Dirichlet characters modulo `N`.
INPUT:
- ``N`` -- positive integer
- ``base_ring`` -- commutative ring; the value ring for the
characters in this group (default: the cyclotomic field
`\QQ(\zeta_n)`, where `n` is the exponent of `(\ZZ/N\ZZ)^*`)
- ``zeta`` -- (optional) root of unity in ``base_ring``
- ``zeta_order`` -- (optional) positive integer; this must be the
order of ``zeta`` if both are specified
- ``names`` -- ignored (needed so ``G.<...> = DirichletGroup(...)``
notation works)
- ``integral`` -- boolean (default: ``False``); whether to replace
the default cyclotomic field by its rings of integers as the
base ring. This is ignored if ``base_ring`` is not ``None``.
OUTPUT:
The group of Dirichlet characters modulo `N` with values in a
subgroup `V` of the multiplicative group `R^*` of ``base_ring``.
This is the group of homomorphisms `(\ZZ/N\ZZ)^* \to V` with
pointwise multiplication. The group `V` is determined as follows:
- If both ``zeta`` and ``zeta_order`` are omitted, then `V` is
taken to be `R^*`, or equivalently its `n`-torsion subgroup,
where `n` is the exponent of `(\ZZ/N\ZZ)^*`. Many operations,
such as finding a set of generators for the group, are only
implemented if `V` is cyclic and a generator for `V` can be
found.
- If ``zeta`` is specified, then `V` is taken to be the cyclic
subgroup of `R^*` generated by ``zeta``. If ``zeta_order`` is
also given, it must be the multiplicative order of ``zeta``;
this is useful if the base ring is not exact or if the order of
``zeta`` is very large.
- If ``zeta`` is not specified but ``zeta_order`` is, then `V` is
taken to be the group of roots of unity of order dividing
``zeta_order`` in `R`. In this case, `R` must be a domain (so
`V` is cyclic), and `V` must have order ``zeta_order``.
Furthermore, a generator ``zeta`` of `V` is computed, and an
error is raised if such ``zeta`` cannot be found.
EXAMPLES:
The default base ring is a cyclotomic field of order the exponent
of `(\ZZ/N\ZZ)^*`::
sage: DirichletGroup(20)
Group of Dirichlet characters modulo 20 with values in Cyclotomic Field of order 4 and degree 2
We create the group of Dirichlet character mod 20 with values in
the rational numbers::
sage: G = DirichletGroup(20, QQ); G
Group of Dirichlet characters modulo 20 with values in Rational Field
sage: G.order()
4
sage: G.base_ring()
Rational Field
The elements of G print as lists giving the values of the character
on the generators of `(Z/NZ)^*`::
sage: list(G)
[Dirichlet character modulo 20 of conductor 1 mapping 11 |--> 1, 17 |--> 1, Dirichlet character modulo 20 of conductor 4 mapping 11 |--> -1, 17 |--> 1, Dirichlet character modulo 20 of conductor 5 mapping 11 |--> 1, 17 |--> -1, Dirichlet character modulo 20 of conductor 20 mapping 11 |--> -1, 17 |--> -1]
Next we construct the group of Dirichlet character mod 20, but with
values in `\QQ(\zeta_n)`::
sage: G = DirichletGroup(20)
sage: G.1
Dirichlet character modulo 20 of conductor 5 mapping 11 |--> 1, 17 |--> zeta4
We next compute several invariants of ``G``::
sage: G.gens()
(Dirichlet character modulo 20 of conductor 4 mapping 11 |--> -1, 17 |--> 1, Dirichlet character modulo 20 of conductor 5 mapping 11 |--> 1, 17 |--> zeta4)
sage: G.unit_gens()
(11, 17)
sage: G.zeta()
zeta4
sage: G.zeta_order()
4
In this example we create a Dirichlet group with values in a
number field::
sage: R.<x> = PolynomialRing(QQ)
sage: K.<a> = NumberField(x^4 + 1)
sage: DirichletGroup(5, K)
Group of Dirichlet characters modulo 5 with values in Number Field in a with defining polynomial x^4 + 1
An example where we give ``zeta``, but not its order::
sage: G = DirichletGroup(5, K, a); G
Group of Dirichlet characters modulo 5 with values in the group of order 8 generated by a in Number Field in a with defining polynomial x^4 + 1
sage: G.list()
[Dirichlet character modulo 5 of conductor 1 mapping 2 |--> 1, Dirichlet character modulo 5 of conductor 5 mapping 2 |--> a^2, Dirichlet character modulo 5 of conductor 5 mapping 2 |--> -1, Dirichlet character modulo 5 of conductor 5 mapping 2 |--> -a^2]
We can also restrict the order of the characters, either with or
without specifying a root of unity::
sage: DirichletGroup(5, K, zeta=-1, zeta_order=2)
Group of Dirichlet characters modulo 5 with values in the group of order 2 generated by -1 in Number Field in a with defining polynomial x^4 + 1
sage: DirichletGroup(5, K, zeta_order=2)
Group of Dirichlet characters modulo 5 with values in the group of order 2 generated by -1 in Number Field in a with defining polynomial x^4 + 1
::
sage: G.<e> = DirichletGroup(13)
sage: loads(G.dumps()) == G
True
::
sage: G = DirichletGroup(19, GF(5))
sage: loads(G.dumps()) == G
True
We compute a Dirichlet group over a large prime field::
sage: p = next_prime(10^40)
sage: g = DirichletGroup(19, GF(p)); g
Group of Dirichlet characters modulo 19 with values in Finite Field of size 10000000000000000000000000000000000000121
Note that the root of unity has small order, i.e., it is not the
largest order root of unity in the field::
sage: g.zeta_order()
2
::
sage: r4 = CyclotomicField(4).ring_of_integers()
sage: G = DirichletGroup(60, r4)
sage: G.gens()
(Dirichlet character modulo 60 of conductor 4 mapping 31 |--> -1, 41 |--> 1, 37 |--> 1, Dirichlet character modulo 60 of conductor 3 mapping 31 |--> 1, 41 |--> -1, 37 |--> 1, Dirichlet character modulo 60 of conductor 5 mapping 31 |--> 1, 41 |--> 1, 37 |--> zeta4)
sage: val = G.gens()[2].values_on_gens()[2] ; val
zeta4
sage: parent(val)
Gaussian Integers in Cyclotomic Field of order 4 and degree 2
sage: r4.residue_field(r4.ideal(29).factor()[0][0])(val)
17
sage: r4.residue_field(r4.ideal(29).factor()[0][0])(val) * GF(29)(3)
22
sage: r4.residue_field(r4.ideal(29).factor()[0][0])(G.gens()[2].values_on_gens()[2]) * 3
22
sage: parent(r4.residue_field(r4.ideal(29).factor()[0][0])(G.gens()[2].values_on_gens()[2]) * 3)
Residue field of Fractional ideal (-2*zeta4 + 5)
::
sage: DirichletGroup(60, integral=True)
Group of Dirichlet characters modulo 60 with values in Gaussian Integers in Cyclotomic Field of order 4 and degree 2
sage: parent(DirichletGroup(60, integral=True).gens()[2].values_on_gens()[2])
Gaussian Integers in Cyclotomic Field of order 4 and degree 2
If the order of ``zeta`` cannot be determined automatically, we
can specify it using ``zeta_order``::
sage: DirichletGroup(7, CC, zeta=exp(2*pi*I/6))
Traceback (most recent call last):
...
NotImplementedError: order of element not known
sage: DirichletGroup(7, CC, zeta=exp(2*pi*I/6), zeta_order=6)
Group of Dirichlet characters modulo 7 with values in the group of order 6 generated by 0.500000000000000 + 0.866025403784439*I in Complex Field with 53 bits of precision
If the base ring is not a domain (in which case the group of roots
of unity is not necessarily cyclic), some operations still work,
such as creation of elements::
sage: G = DirichletGroup(5, Zmod(15)); G
Group of Dirichlet characters modulo 5 with values in Ring of integers modulo 15
sage: chi = G([13]); chi
Dirichlet character modulo 5 of conductor 5 mapping 2 |--> 13
sage: chi^2
Dirichlet character modulo 5 of conductor 5 mapping 2 |--> 4
sage: chi.multiplicative_order()
4
Other operations only work if ``zeta`` is specified::
sage: G.gens()
Traceback (most recent call last):
...
NotImplementedError: factorization of polynomials over rings with composite characteristic is not implemented
sage: G = DirichletGroup(5, Zmod(15), zeta=2); G
Group of Dirichlet characters modulo 5 with values in the group of order 4 generated by 2 in Ring of integers modulo 15
sage: G.gens()
(Dirichlet character modulo 5 of conductor 5 mapping 2 |--> 2,)
TESTS:
Dirichlet groups are cached, creating two groups with the same parameters
yields the same object::
sage: DirichletGroup(60) is DirichletGroup(60)
True
"""
def create_key(self, N, base_ring=None, zeta=None, zeta_order=None,
names=None, integral=False):
"""
Create a key that uniquely determines a Dirichlet group.
TESTS::
sage: DirichletGroup.create_key(60)
(Cyclotomic Field of order 4 and degree 2, 60, None, None)
An example to illustrate that ``base_ring`` is a part of the key::
sage: k = DirichletGroup.create_key(2, base_ring=QQ); k
(Rational Field, 2, None, None)
sage: l = DirichletGroup.create_key(2, base_ring=CC); l
(Complex Field with 53 bits of precision, 2, None, None)
sage: k == l
False
sage: G = DirichletGroup.create_object(None, k); G
Group of Dirichlet characters modulo 2 with values in Rational Field
sage: H = DirichletGroup.create_object(None, l); H
Group of Dirichlet characters modulo 2 with values in Complex Field with 53 bits of precision
sage: G == H
False
If ``base_ring`` was not be a part of the key, the keys would compare
equal and the caching would be broken::
sage: k = k[1:]; k
(2, None, None)
sage: l = l[1:]; l
(2, None, None)
sage: k == l
True
sage: DirichletGroup(2, base_ring=QQ) is DirichletGroup(2, base_ring=CC)
False
If the base ring is not an integral domain, an error will be
raised if only ``zeta_order`` is specified::
sage: DirichletGroup(17, Integers(15))
Group of Dirichlet characters modulo 17 with values in Ring of integers modulo 15
sage: DirichletGroup(17, Integers(15), zeta_order=4)
Traceback (most recent call last):
...
ValueError: base ring (= Ring of integers modulo 15) must be an integral domain if only zeta_order is specified
sage: G = DirichletGroup(17, Integers(15), zeta=7); G
Group of Dirichlet characters modulo 17 with values in the group of order 4 generated by 7 in Ring of integers modulo 15
sage: G.order()
4
sage: DirichletGroup(-33)
Traceback (most recent call last):
...
ValueError: modulus should be positive
"""
modulus = rings.Integer(N)
if modulus <= 0:
raise ValueError('modulus should be positive')
if base_ring is None:
if not (zeta is None and zeta_order is None):
raise ValueError("zeta and zeta_order must be None if base_ring not specified")
e = rings.IntegerModRing(modulus).unit_group_exponent()
base_ring = rings.CyclotomicField(e)
if integral:
base_ring = base_ring.ring_of_integers()
if not is_Ring(base_ring):
raise TypeError("base_ring (= %s) must be a ring" % base_ring)
# If either zeta or zeta_order is given, compute the other.
if zeta is not None:
zeta = base_ring(zeta)
if zeta_order is None:
zeta_order = zeta.multiplicative_order()
elif zeta_order is not None:
if not base_ring.is_integral_domain():
raise ValueError("base ring (= %s) must be an integral domain if only zeta_order is specified"
% base_ring)
zeta_order = rings.Integer(zeta_order)
zeta = base_ring.zeta(zeta_order)
return (base_ring, modulus, zeta, zeta_order)
def create_object(self, version, key, **extra_args):
"""
Create the object from the key (extra arguments are ignored). This is
only called if the object was not found in the cache.
TESTS::
sage: K = CyclotomicField(4)
sage: DirichletGroup.create_object(None, (K, 60, K.gen(), 4))
Group of Dirichlet characters modulo 60 with values in the group of order 4 generated by zeta4 in Cyclotomic Field of order 4 and degree 2
"""
base_ring, modulus, zeta, zeta_order = key
return DirichletGroup_class(base_ring, modulus, zeta, zeta_order)
DirichletGroup = DirichletGroupFactory("DirichletGroup")
def is_DirichletGroup(x):
"""
Returns True if x is a Dirichlet group.
EXAMPLES::
sage: from sage.modular.dirichlet import is_DirichletGroup
sage: is_DirichletGroup(DirichletGroup(11))
True
sage: is_DirichletGroup(11)
False
sage: is_DirichletGroup(DirichletGroup(11).0)
False
"""
return isinstance(x, DirichletGroup_class)
def base_extend(self, R):
"""
Return the base extension of ``self`` to ``R``.
INPUT:
- ``R`` -- either a ring admitting a *coercion* map from the
base ring of ``self``, or a ring homomorphism with the base
ring of ``self`` as its domain
EXAMPLES::
sage: G = DirichletGroup(7,QQ); G
Group of Dirichlet characters modulo 7 with values in Rational Field
sage: H = G.base_extend(CyclotomicField(6)); H
Group of Dirichlet characters modulo 7 with values in Cyclotomic Field of order 6 and degree 2
Note that the root of unity can change::
sage: H.zeta()
zeta6
This method (in contrast to :meth:`change_ring`) requires a
coercion map to exist::
sage: G.base_extend(ZZ)
Traceback (most recent call last):
...
TypeError: no coercion map from Rational Field to Integer Ring is defined
Base-extended Dirichlet groups do not silently get roots of
unity with smaller order than expected (:trac:`6018`)::
sage: G = DirichletGroup(10, QQ).base_extend(CyclotomicField(4))
sage: H = DirichletGroup(10, CyclotomicField(4))
sage: G is H
True
sage: G3 = DirichletGroup(31, CyclotomicField(3))
sage: G5 = DirichletGroup(31, CyclotomicField(5))
sage: K30 = CyclotomicField(30)
sage: G3.gen(0).base_extend(K30) * G5.gen(0).base_extend(K30)
Dirichlet character modulo 31 of conductor 31 mapping 3 |--> -zeta30^7 + zeta30^5 + zeta30^4 + zeta30^3 - zeta30 - 1
When a root of unity is specified, base extension still works
if the new base ring is not an integral domain::
sage: f = DirichletGroup(17, ZZ, zeta=-1).0
sage: g = f.base_extend(Integers(15))
sage: g(3)
14
sage: g.parent().zeta()
14
"""
if not (isinstance(R, Map)
or R.has_coerce_map_from(self.base_ring())):
raise TypeError("no coercion map from %s to %s is defined"
% (self.base_ring(), R))
return self.change_ring(R)
def _element_constructor_(self, x):
"""
Construct a Dirichlet character from `x`.
EXAMPLES::
sage: G = DirichletGroup(13)
sage: K = G.base_ring()
sage: G(1)
Dirichlet character modulo 13 of conductor 1 mapping 2 |--> 1
sage: G([-1])
Dirichlet character modulo 13 of conductor 13 mapping 2 |--> -1
sage: G([K.0])
Dirichlet character modulo 13 of conductor 13 mapping 2 |--> zeta12
sage: G(0)
Traceback (most recent call last):
...
TypeError: cannot convert 0 to an element of Group of Dirichlet characters modulo 13 with values in Cyclotomic Field of order 12 and degree 4
sage: G = DirichletGroup(6)
sage: G(DirichletGroup(3).0)
Dirichlet character modulo 6 of conductor 3 mapping 5 |--> -1
sage: G(DirichletGroup(15).0)
Dirichlet character modulo 6 of conductor 3 mapping 5 |--> -1
sage: G(DirichletGroup(15).1)
Traceback (most recent call last):
...
TypeError: conductor must divide modulus
sage: H = DirichletGroup(16, QQ); H(DirichletGroup(16).1)
Traceback (most recent call last):
...
TypeError: Unable to coerce zeta4 to a rational
"""
R = self.base_ring()
try:
if x == R.one():
x = [R.one()] * len(self.unit_gens())
except (TypeError, ValueError, ArithmeticError):
pass
if isinstance(x, list): # list of values on each unit generator
return self.element_class(self, x)
elif not isinstance(x, DirichletCharacter):
raise TypeError("cannot convert %s to an element of %s" % (x, self))
elif not x.conductor().divides(self.modulus()):
raise TypeError("conductor must divide modulus")
a = []
for u in self.unit_gens():
v = u.lift()
# have to do this, since e.g., unit gens mod 11 are not units mod 22.
while x.modulus().gcd(v) != 1:
v += self.modulus()
a.append(R(x(v)))
return self.element_class(self, a)
def _coerce_map_from_(self, X):
"""
Decide whether there is a coercion map from `X`.
There is conversion between Dirichlet groups of different
moduli, but no coercion. This implies that Dirichlet
characters of different moduli do not compare as equal.
TESTS::
sage: trivial_character(6) == trivial_character(3) # indirect doctest
False
sage: trivial_character(3) == trivial_character(9)
False
sage: trivial_character(3) == DirichletGroup(3, QQ).0^2
True
"""
return (isinstance(X, DirichletGroup_class) and
self.modulus() == X.modulus() and
self.base_ring().has_coerce_map_from(X.base_ring()) and
(self._zeta is None or
(X._zeta is not None and
self.base_ring()(X._zeta) in self._zeta_powers)))
def __len__(self):
"""
Return the number of elements of this Dirichlet group. This is the
same as self.order().
EXAMPLES::
sage: len(DirichletGroup(20))
8
sage: len(DirichletGroup(20, QQ))
4
sage: len(DirichletGroup(20, GF(5)))
8
sage: len(DirichletGroup(20, GF(2)))
1
sage: len(DirichletGroup(20, GF(3)))
4
"""
return self.order()
def _repr_(self):
"""
Return a print representation of this group, which can be renamed.
EXAMPLES::
sage: G = DirichletGroup(11)
sage: repr(G) # indirect doctest
'Group of Dirichlet characters modulo 11 with values in Cyclotomic Field of order 10 and degree 4'
sage: G.rename('Dir(11)')
sage: G
Dir(11)
"""
s = "Group of Dirichlet characters modulo %s with values in " % self.modulus()
if self._zeta is not None:
s += "the group of order %s generated by %s in " % (self._zeta_order, self._zeta)
s += str(self.base_ring())
return s
def exponent(self):
"""
Return the exponent of this group.
EXAMPLES::
sage: DirichletGroup(20).exponent()
4
sage: DirichletGroup(20,GF(3)).exponent()
2
sage: DirichletGroup(20,GF(2)).exponent()
1
sage: DirichletGroup(37).exponent()
36
"""
return self.zeta_order()
def galois_orbits(self, v=None, reps_only=False, sort=True, check=True):
"""
Return a list of the Galois orbits of Dirichlet characters in self,
or in v if v is not None.
INPUT:
- ``v`` - (optional) list of elements of self
- ``reps_only`` - (optional: default False) if True
only returns representatives for the orbits.
- ``sort`` - (optional: default True) whether to sort
the list of orbits and the orbits themselves (slightly faster if
False).
- ``check`` - (optional, default: True) whether or not
to explicitly coerce each element of v into self.
The Galois group is the absolute Galois group of the prime subfield
of Frac(R). If R is not a domain, an error will be raised.
EXAMPLES::
sage: DirichletGroup(20).galois_orbits()
[
[Dirichlet character modulo 20 of conductor 20 mapping 11 |--> -1, 17 |--> -1],
...,
[Dirichlet character modulo 20 of conductor 1 mapping 11 |--> 1, 17 |--> 1]
]
sage: DirichletGroup(17, Integers(6), zeta=Integers(6)(5)).galois_orbits()
Traceback (most recent call last):
...
TypeError: Galois orbits only defined if base ring is an integral domain
sage: DirichletGroup(17, Integers(9), zeta=Integers(9)(2)).galois_orbits()
Traceback (most recent call last):
...
TypeError: Galois orbits only defined if base ring is an integral domain
"""
if v is None:
v = self.list()
else:
if check:
v = [self(x) for x in v]
G = []
seen_so_far = set([])
for x in v:
z = x.element()
e = tuple(z) # change when there are immutable vectors (and below)
if e in seen_so_far:
continue
orbit = x.galois_orbit(sort=sort)
if reps_only:
G.append(x)
else:
G.append(orbit)
for z in orbit:
seen_so_far.add(tuple(z.element()))
G = Sequence(G, cr=True)
if sort:
G.sort()
return G
def gen(self, n=0):
"""
Return the n-th generator of self.
EXAMPLES::
sage: G = DirichletGroup(20)
sage: G.gen(0)
Dirichlet character modulo 20 of conductor 4 mapping 11 |--> -1, 17 |--> 1
sage: G.gen(1)
Dirichlet character modulo 20 of conductor 5 mapping 11 |--> 1, 17 |--> zeta4
sage: G.gen(2)
Traceback (most recent call last):
...
IndexError: n(=2) must be between 0 and 1
::
sage: G.gen(-1)
Traceback (most recent call last):
...
IndexError: n(=-1) must be between 0 and 1
"""
n = int(n)
g = self.gens()
if n<0 or n>=len(g):
raise IndexError("n(=%s) must be between 0 and %s"%(n,len(g)-1))
return g[n]
def integers_mod(self):
r"""
Returns the group of integers `\ZZ/N\ZZ`
where `N` is the modulus of self.
EXAMPLES::
sage: G = DirichletGroup(20)
sage: G.integers_mod()
Ring of integers modulo 20
"""
return self._integers
__iter__ = multiplicative_iterator
def list(self):
"""
Return a list of the Dirichlet characters in this group.
EXAMPLES::
sage: DirichletGroup(5).list()
[Dirichlet character modulo 5 of conductor 1 mapping 2 |--> 1,
Dirichlet character modulo 5 of conductor 5 mapping 2 |--> zeta4,
Dirichlet character modulo 5 of conductor 5 mapping 2 |--> -1,
Dirichlet character modulo 5 of conductor 5 mapping 2 |--> -zeta4]
"""
return self._list_from_iterator()
def modulus(self):
"""
Returns the modulus of self.
EXAMPLES::
sage: G = DirichletGroup(20)
sage: G.modulus()
20
"""
return self._modulus
def ngens(self):
"""
Returns the number of generators of self.
EXAMPLES::
sage: G = DirichletGroup(20)
sage: G.ngens()
2
"""
return len(self.gens())
def random_element(self):
"""
Return a random element of self.
The element is computed by multiplying a random power of each
generator together, where the power is between 0 and the order of
the generator minus 1, inclusive.
EXAMPLES::
sage: DirichletGroup(37).random_element()
Dirichlet character modulo 37 of conductor 37 mapping 2 |--> zeta36^4
sage: DirichletGroup(20).random_element()
Dirichlet character modulo 20 of conductor 4 mapping 11 |--> -1, 17 |--> 1
sage: DirichletGroup(60).random_element()
Dirichlet character modulo 60 of conductor 3 mapping 31 |--> 1, 41 |--> -1, 37 |--> 1
"""
e = self(1)
for i in range(self.ngens()):
g = self.gen(i)
n = random.randrange(g.order())
e *= g**n
return e
def unit_gens(self):
r"""
Returns the minimal generators for the units of
`(\ZZ/N\ZZ)^*`, where `N` is the
modulus of self.
EXAMPLES::
sage: DirichletGroup(37).unit_gens()
(2,)
sage: DirichletGroup(20).unit_gens()
(11, 17)
sage: DirichletGroup(60).unit_gens()
(31, 41, 37)
sage: DirichletGroup(20,QQ).unit_gens()
(11, 17)
"""
return self._integers.unit_gens()
| 34.562563 | 378 | 0.545557 |
8a5f3213a4f40ad41fa4289061fc1bfb9a560419 | 6,445 | py | Python | src/biotite/file.py | danijoo/biotite | 22072e64676e4e917236eac8493eed4c6a22cc33 | [
"BSD-3-Clause"
] | 208 | 2018-04-20T15:59:42.000Z | 2022-03-22T07:47:12.000Z | src/biotite/file.py | danielmuthama/biotite | cb238a8d8d7dc82b3bcea274d7d91d5c876badcd | [
"BSD-3-Clause"
] | 121 | 2017-11-15T14:52:07.000Z | 2022-03-30T16:31:41.000Z | src/biotite/file.py | danielmuthama/biotite | cb238a8d8d7dc82b3bcea274d7d91d5c876badcd | [
"BSD-3-Clause"
] | 49 | 2018-07-19T09:06:24.000Z | 2022-03-23T17:21:34.000Z | # This source code is part of the Biotite package and is distributed
# under the 3-Clause BSD License. Please see 'LICENSE.rst' for further
# information.
__name__ = "biotite"
__author__ = "Patrick Kunzmann"
__all__ = ["File", "TextFile", "InvalidFileError"]
import abc
import io
import warnings
from .copyable import Copyable
import copy
def __copy_fill__(self, clone):
super().__copy_fill__(clone)
clone.lines = copy.copy(self.lines)
def __str__(self):
return("\n".join(self.lines))
class InvalidFileError(Exception):
"""
Indicates that the file is not suitable for the requested action,
either because the file does not contain the required data or
because the file is malformed.
"""
pass
def wrap_string(text, width):
"""
A much simpler and hence much more efficient version of
`textwrap.wrap()`.
This function simply wraps the given `text` after `width`
characters, ignoring sentences, whitespaces, etc.
"""
lines = []
for i in range(0, len(text), width):
lines.append(text[i : i+width])
return lines
def is_binary(file):
if isinstance(file, io.BufferedIOBase):
return True
# for file wrappers, e.g. 'TemporaryFile'
elif hasattr(file, "file") and isinstance(file.file, io.BufferedIOBase):
return True
else:
return False
def is_text(file):
if isinstance(file, io.TextIOBase):
return True
# for file wrappers, e.g. 'TemporaryFile'
elif hasattr(file, "file") and isinstance(file.file, io.TextIOBase):
return True
else:
return False
| 29.162896 | 76 | 0.565244 |
8a5fa0d5b2ac5b94aac410a26a9a516f09e6dcbd | 4,918 | py | Python | src/cms/views/push_notifications/push_notification_sender.py | mckinly/cms-django | c9995a3bfab6ee2d02f2406a7f83cf91b7ccfcca | [
"Apache-2.0"
] | null | null | null | src/cms/views/push_notifications/push_notification_sender.py | mckinly/cms-django | c9995a3bfab6ee2d02f2406a7f83cf91b7ccfcca | [
"Apache-2.0"
] | 5 | 2021-02-10T02:41:20.000Z | 2022-03-12T00:56:56.000Z | src/cms/views/push_notifications/push_notification_sender.py | mckinly/cms-django | c9995a3bfab6ee2d02f2406a7f83cf91b7ccfcca | [
"Apache-2.0"
] | null | null | null | """
Module for sending Push Notifications
"""
import logging
import requests
from django.conf import settings
from ...models import PushNotificationTranslation
from ...models import Region
from ...constants import push_notifications as pnt_const
logger = logging.getLogger(__name__)
# pylint: disable=too-few-public-methods
| 34.391608 | 110 | 0.610207 |
8a5fbb70b61ec5fc6c7b862f0da3b78b40dc8aa0 | 984 | py | Python | tests/functional/index/create/test_03.py | reevespaul/firebird-qa | 98f16f425aa9ab8ee63b86172f959d63a2d76f21 | [
"MIT"
] | null | null | null | tests/functional/index/create/test_03.py | reevespaul/firebird-qa | 98f16f425aa9ab8ee63b86172f959d63a2d76f21 | [
"MIT"
] | null | null | null | tests/functional/index/create/test_03.py | reevespaul/firebird-qa | 98f16f425aa9ab8ee63b86172f959d63a2d76f21 | [
"MIT"
] | null | null | null | #coding:utf-8
#
# id: functional.index.create.03
# title: CREATE ASC INDEX
# decription: CREATE ASC INDEX
#
# Dependencies:
# CREATE DATABASE
# CREATE TABLE
# SHOW INDEX
# tracker_id:
# min_versions: []
# versions: 1.0
# qmid: functional.index.create.create_index_03
import pytest
from firebird.qa import db_factory, isql_act, Action
# version: 1.0
# resources: None
substitutions_1 = []
init_script_1 = """CREATE TABLE t( a INTEGER);
commit;"""
db_1 = db_factory(sql_dialect=3, init=init_script_1)
test_script_1 = """CREATE ASC INDEX test ON t(a);
SHOW INDEX test;"""
act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1)
expected_stdout_1 = """TEST INDEX ON T(A)"""
| 23.428571 | 70 | 0.654472 |
8a6044d55abd530531d11b4f87fb12061cb65199 | 370 | py | Python | app/logic/httpcommon/Page.py | imvu/bluesteel | ab52133249a693b3cd2d8593c5d47408a3b0fce6 | [
"MIT"
] | 10 | 2017-01-13T06:28:04.000Z | 2020-11-18T13:00:26.000Z | app/logic/httpcommon/Page.py | imvu/bluesteel | ab52133249a693b3cd2d8593c5d47408a3b0fce6 | [
"MIT"
] | null | null | null | app/logic/httpcommon/Page.py | imvu/bluesteel | ab52133249a693b3cd2d8593c5d47408a3b0fce6 | [
"MIT"
] | 2 | 2018-03-29T14:10:53.000Z | 2019-11-20T08:21:57.000Z | """ Page object file """
| 28.461538 | 108 | 0.648649 |
8a60852354e6415290eaf2e5371028a21ee46376 | 1,004 | py | Python | models/AI-Model-Zoo/VAI-1.3-Model-Zoo-Code/PyTorch/pt_personreid-res18_market1501_176_80_1.1G_1.3/code/core/data_manager.py | guochunhe/Vitis-AI | e86b6efae11f8703ee647e4a99004dc980b84989 | [
"Apache-2.0"
] | 1 | 2020-12-18T14:49:19.000Z | 2020-12-18T14:49:19.000Z | models/AI-Model-Zoo/VAI-1.3-Model-Zoo-Code/PyTorch/pt_personreid-res50_market1501_256_128_5.4G_1.3/code/core/data_manager.py | guochunhe/Vitis-AI | e86b6efae11f8703ee647e4a99004dc980b84989 | [
"Apache-2.0"
] | null | null | null | models/AI-Model-Zoo/VAI-1.3-Model-Zoo-Code/PyTorch/pt_personreid-res50_market1501_256_128_5.4G_1.3/code/core/data_manager.py | guochunhe/Vitis-AI | e86b6efae11f8703ee647e4a99004dc980b84989 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, absolute_import
import glob
import re
from os import path as osp
from .market1501 import Market1501
__factory = {
'market1501': Market1501
}
| 27.888889 | 74 | 0.737052 |
8a61523d34a63b6c1b5541a6127f60a7a5d5ec7e | 4,684 | py | Python | PyBank/.ipynb_checkpoints/Pymain-checkpoint.py | yash5OG/PythonChallengeW3-Y5 | 4a20ea5bae2d88af5a7d56f43ddc63ac64eaee67 | [
"MIT"
] | null | null | null | PyBank/.ipynb_checkpoints/Pymain-checkpoint.py | yash5OG/PythonChallengeW3-Y5 | 4a20ea5bae2d88af5a7d56f43ddc63ac64eaee67 | [
"MIT"
] | null | null | null | PyBank/.ipynb_checkpoints/Pymain-checkpoint.py | yash5OG/PythonChallengeW3-Y5 | 4a20ea5bae2d88af5a7d56f43ddc63ac64eaee67 | [
"MIT"
] | null | null | null | {
"cells": [
{
"cell_type": "code",
"execution_count": 64,
"metadata": {},
"outputs": [],
"source": [
"# Import libraries\n",
"import os, csv"
]
},
{
"cell_type": "code",
"execution_count": 65,
"metadata": {},
"outputs": [],
"source": [
"#variables for the script\n",
"months = [] #list of months\n",
"pl =[] #list of monthly PL\n",
"pl_changes = [] #list of P&L Changes\n",
"n_months = 0 #count of months\n",
"pl_total = 0 #total of P&L\n",
"plc = 0 #variable to track PL changes\n",
"avg_pl_change = 0 #average of changes in PL\n",
"maxpl = 0 #maximum increase in profits\n",
"minpl = 0 #maximum decrease in losses\n",
"max_i = 0 #index for max pl\n",
"min_i = 0 #index for min pl\n",
"\n",
"#read the resource file\n",
"bankcsv = os.path.join(\".\", \"Resources\", \"budget_data.csv\") #set path\n",
"\n",
"\n",
"#read file\n",
"with open(bankcsv, 'r') as csv_file:\n",
" csv_reader = csv.reader(csv_file,delimiter=\",\")\n",
" header = next(csv_reader)\n",
" \n",
" #for loop to update the counters and lists\n",
" for row in csv_reader:\n",
" n_months += 1\n",
" pl_total += int(row[1])\n",
" pl.append(row[1])\n",
" months.append(row[0])"
]
},
{
"cell_type": "code",
"execution_count": 66,
"metadata": {},
"outputs": [],
"source": [
"# loop to track the PL change values\n",
"pl_changes = [] \n",
"plc = int(pl[0])\n",
"for i in range(1, len(pl)):\n",
" pl_changes.append(int(pl[i]) - plc)\n",
" plc = int(pl[i])\n",
" i += 1\n",
"#print(pl_changes)"
]
},
{
"cell_type": "code",
"execution_count": 67,
"metadata": {},
"outputs": [],
"source": [
"#calculate the average PL Changes, max and min\n",
"avg_pl_change = sum(pl_changes) / len(pl_changes)\n",
"maxpl = max(pl_changes)\n",
"minpl = min(pl_changes)\n",
"#print(avg_pl_change, maxpl, minpl)\n",
"#print(pl_changes.index(maxpl))\n",
"#print(len(pl_changes))"
]
},
{
"cell_type": "code",
"execution_count": 68,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Financial Analysis\n",
"---------------------------------------------------------------------\n",
"Total Months: 86\n",
"Total: $38382578\n",
"Average Change: $-2315.12\n",
"Greatest Increase in Profits: Feb-2012 ($1926159)\n",
"Greatest Decrease in Profits: Sep-2013 ($-2196167)\n"
]
}
],
"source": [
"#find dates for max and min PL changes\n",
"max_i = pl_changes.index(maxpl) +1 #adding +1 since the changes are calculated one row above\n",
"min_i = pl_changes.index(minpl) +1\n",
"\n",
"maxmonth = months[max_i]\n",
"minmonth = months[min_i]\n",
"\n",
"#print output to the terminal\n",
"\n",
"print(\"Financial Analysis\")\n",
"print(\"-\"*69)\n",
"print(f\"Total Months: {n_months}\")\n",
"print(f\"Total: ${round(pl_total,2)}\")\n",
"print(f\"Average Change: ${round(avg_pl_change,2)}\")\n",
"print(f\"Greatest Increase in Profits: {maxmonth} (${maxpl})\")\n",
"print(f\"Greatest Decrease in Profits: {minmonth} (${minpl})\")\n"
]
},
{
"cell_type": "code",
"execution_count": 69,
"metadata": {},
"outputs": [],
"source": [
"# write summary to txt file\n",
"output = os.path.join(\".\",\"Analysis\", \"summary.txt\")\n",
"\n",
"# use \"\\n\" to create a new line\n",
"with open(output, 'w') as output:\n",
" output.write(\"Financial Analysis\\n\")\n",
" output.write(\"-\"*69 + \"\\n\")\n",
" output.write(f\"Total Months: {n_months}\\n\")\n",
" output.write(f\"Total: ${round(pl_total,2)}\\n\")\n",
" output.write(f\"Average Change: ${round(avg_pl_change,2)}\\n\")\n",
" output.write(f\"Greatest Increase in Profits: {maxmonth} (${maxpl})\\n\")\n",
" output.write(f\"Greatest Decrease in Profits: {minmonth} (${minpl})\\n\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.5"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
| 29.093168 | 104 | 0.51281 |
8a6266df7a1375925ee79de0d3567238f763ecfa | 165 | py | Python | xlib/api/win32/oleaut32/oleaut32.py | jkennedyvz/DeepFaceLive | 274c20808da089eb7fc0fc0e8abe649379a29ffe | [
"MIT"
] | null | null | null | xlib/api/win32/oleaut32/oleaut32.py | jkennedyvz/DeepFaceLive | 274c20808da089eb7fc0fc0e8abe649379a29ffe | [
"MIT"
] | null | null | null | xlib/api/win32/oleaut32/oleaut32.py | jkennedyvz/DeepFaceLive | 274c20808da089eb7fc0fc0e8abe649379a29ffe | [
"MIT"
] | null | null | null | from ctypes import POINTER, Structure
from ..wintypes import VARIANT, dll_import
| 20.625 | 56 | 0.739394 |
8a62e622419e3b5175ed6a324e076188b956be4c | 2,313 | py | Python | azure-devops/azext_devops/vstsCompressed/service_hooks/v4_0/models/__init__.py | vijayraavi/azure-devops-cli-extension | 88f1420c5815cb09bea15b050f4c553e0f326dad | [
"MIT"
] | null | null | null | azure-devops/azext_devops/vstsCompressed/service_hooks/v4_0/models/__init__.py | vijayraavi/azure-devops-cli-extension | 88f1420c5815cb09bea15b050f4c553e0f326dad | [
"MIT"
] | 37 | 2020-04-27T07:45:19.000Z | 2021-04-05T07:27:15.000Z | azure-devops/azext_devops/vstsCompressed/service_hooks/v4_0/models/__init__.py | vijayraavi/azure-devops-cli-extension | 88f1420c5815cb09bea15b050f4c553e0f326dad | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .models import Consumer
from .models import ConsumerAction
from .models import Event
from .models import EventTypeDescriptor
from .models import ExternalConfigurationDescriptor
from .models import FormattedEventMessage
from .models import IdentityRef
from .models import InputDescriptor
from .models import InputFilter
from .models import InputFilterCondition
from .models import InputValidation
from .models import InputValue
from .models import InputValues
from .models import InputValuesError
from .models import InputValuesQuery
from .models import Notification
from .models import NotificationDetails
from .models import NotificationResultsSummaryDetail
from .models import NotificationsQuery
from .models import NotificationSummary
from .models import Publisher
from .models import PublisherEvent
from .models import PublishersQuery
from .models import ReferenceLinks
from .models import ResourceContainer
from .models import SessionToken
from .models import Subscription
from .models import SubscriptionsQuery
from .models import VersionedResource
__all__ = [
'Consumer',
'ConsumerAction',
'Event',
'EventTypeDescriptor',
'ExternalConfigurationDescriptor',
'FormattedEventMessage',
'IdentityRef',
'InputDescriptor',
'InputFilter',
'InputFilterCondition',
'InputValidation',
'InputValue',
'InputValues',
'InputValuesError',
'InputValuesQuery',
'Notification',
'NotificationDetails',
'NotificationResultsSummaryDetail',
'NotificationsQuery',
'NotificationSummary',
'Publisher',
'PublisherEvent',
'PublishersQuery',
'ReferenceLinks',
'ResourceContainer',
'SessionToken',
'Subscription',
'SubscriptionsQuery',
'VersionedResource',
]
| 33.042857 | 94 | 0.685257 |
8a643abfeb244244f7979e846782a5b379e9f35e | 935 | py | Python | pizdyuk/pzd_logging.py | DeathAdder1999/Pizdyuk | 3fd7c71508c79b36e3cc801d78cd1a87eee5aa0b | [
"Apache-2.0"
] | 1 | 2021-05-06T20:23:08.000Z | 2021-05-06T20:23:08.000Z | pizdyuk/pzd_logging.py | aufdnb/Pizdyuk | 75096ffa54df831eb05360d7b39f49000d466f80 | [
"Apache-2.0"
] | null | null | null | pizdyuk/pzd_logging.py | aufdnb/Pizdyuk | 75096ffa54df831eb05360d7b39f49000d466f80 | [
"Apache-2.0"
] | null | null | null | import datetime as date
from pzd_utils import datetime_to_str
| 25.27027 | 66 | 0.632086 |
8a64487109643353c0e84bbee6dfb1cf09044927 | 834 | py | Python | beta_reconstruction/crystal_relations.py | LightForm-group/beta-reconstruction | 67584f75ee08690226595c5f9dc75dfd164a11a0 | [
"MIT"
] | null | null | null | beta_reconstruction/crystal_relations.py | LightForm-group/beta-reconstruction | 67584f75ee08690226595c5f9dc75dfd164a11a0 | [
"MIT"
] | 1 | 2020-01-07T12:41:26.000Z | 2020-01-07T12:50:40.000Z | beta_reconstruction/crystal_relations.py | LightForm-group/beta-reconstruction | 67584f75ee08690226595c5f9dc75dfd164a11a0 | [
"MIT"
] | null | null | null | import numpy as np
from defdap.quat import Quat
hex_syms = Quat.symEqv("hexagonal")
# subset of hexagonal symmetries that give unique orientations when the
# Burgers transformation is applied
unq_hex_syms = [
hex_syms[0],
hex_syms[5],
hex_syms[4],
hex_syms[2],
hex_syms[10],
hex_syms[11]
]
cubic_syms = Quat.symEqv("cubic")
# subset of cubic symmetries that give unique orientations when the
# Burgers transformation is applied
unq_cub_syms = [
cubic_syms[0],
cubic_syms[7],
cubic_syms[9],
cubic_syms[1],
cubic_syms[22],
cubic_syms[16],
cubic_syms[12],
cubic_syms[15],
cubic_syms[4],
cubic_syms[8],
cubic_syms[21],
cubic_syms[20]
]
# HCP -> BCC
burg_eulers = np.array([135, 90, 354.74]) * np.pi / 180
burg_trans = Quat.fromEulerAngles(*burg_eulers).conjugate
| 22.540541 | 71 | 0.689448 |
8a64819227bba93979e4413095e01b50e7c00dec | 13 | py | Python | a2.py | Changhong-Jiang/test | b907b984cbd9703711f52c9f497cf36b5b4e8752 | [
"MIT"
] | null | null | null | a2.py | Changhong-Jiang/test | b907b984cbd9703711f52c9f497cf36b5b4e8752 | [
"MIT"
] | 1 | 2020-02-28T08:15:58.000Z | 2020-02-28T08:16:41.000Z | a2.py | Changhong-Jiang/test | b907b984cbd9703711f52c9f497cf36b5b4e8752 | [
"MIT"
] | null | null | null | print('222')
| 6.5 | 12 | 0.615385 |
8a65b0ad04c9a2a75abc3c11ac9fc679788fe298 | 4,318 | py | Python | app/api/v1/views/auth_views.py | emdeechege/Questionaire-API | 1dd05dc25f96ea8ecdce82fe28449ea684991251 | [
"MIT"
] | null | null | null | app/api/v1/views/auth_views.py | emdeechege/Questionaire-API | 1dd05dc25f96ea8ecdce82fe28449ea684991251 | [
"MIT"
] | 20 | 2019-01-08T19:16:45.000Z | 2019-08-22T10:21:43.000Z | app/api/v1/views/auth_views.py | emdeechege/Questioner-API | 1dd05dc25f96ea8ecdce82fe28449ea684991251 | [
"MIT"
] | 1 | 2019-01-11T10:13:42.000Z | 2019-01-11T10:13:42.000Z | from flask import jsonify, Blueprint, request, json, make_response
from werkzeug.security import generate_password_hash, check_password_hash
from datetime import datetime
from ..utils.validators import Validation
from ..models.auth_models import Users
v1_auth_blueprint = Blueprint('auth', __name__, url_prefix='/api/v1')
USER = Users()
VALIDATOR = Validation()
| 28.596026 | 90 | 0.568782 |
8a66a4e65b6c15a92cb15d2436631fabac501551 | 4,314 | py | Python | pint/testsuite/test_definitions.py | s-avni/pint | 4e33d44437991bf7c5e30977643f42ebd6ed40da | [
"BSD-3-Clause"
] | null | null | null | pint/testsuite/test_definitions.py | s-avni/pint | 4e33d44437991bf7c5e30977643f42ebd6ed40da | [
"BSD-3-Clause"
] | null | null | null | pint/testsuite/test_definitions.py | s-avni/pint | 4e33d44437991bf7c5e30977643f42ebd6ed40da | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import division, unicode_literals, print_function, absolute_import
from pint.util import (UnitsContainer)
from pint.converters import (ScaleConverter, OffsetConverter)
from pint.definitions import (Definition, PrefixDefinition, UnitDefinition,
DimensionDefinition, AliasDefinition)
from pint.testsuite import BaseTestCase
| 44.474227 | 89 | 0.660176 |
8a678b6dfe1f80688ee851169cd059181b03b309 | 5,922 | py | Python | electrum/dnssec.py | Jesusown/electrum | 0df05dd914c823acae1828cad3b20bdeb13150e9 | [
"MIT"
] | 5,905 | 2015-01-02T17:05:36.000Z | 2022-03-29T07:28:29.000Z | electrum/dnssec.py | Jesusown/electrum | 0df05dd914c823acae1828cad3b20bdeb13150e9 | [
"MIT"
] | 6,097 | 2015-01-01T21:20:25.000Z | 2022-03-31T23:55:01.000Z | electrum/dnssec.py | Jesusown/electrum | 0df05dd914c823acae1828cad3b20bdeb13150e9 | [
"MIT"
] | 2,202 | 2015-01-02T18:31:25.000Z | 2022-03-28T15:35:03.000Z | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Check DNSSEC trust chain.
# Todo: verify expiration dates
#
# Based on
# http://backreference.org/2010/11/17/dnssec-verification-with-dig/
# https://github.com/rthalley/dnspython/blob/master/tests/test_dnssec.py
import dns
import dns.name
import dns.query
import dns.dnssec
import dns.message
import dns.resolver
import dns.rdatatype
import dns.rdtypes.ANY.NS
import dns.rdtypes.ANY.CNAME
import dns.rdtypes.ANY.DLV
import dns.rdtypes.ANY.DNSKEY
import dns.rdtypes.ANY.DS
import dns.rdtypes.ANY.NSEC
import dns.rdtypes.ANY.NSEC3
import dns.rdtypes.ANY.NSEC3PARAM
import dns.rdtypes.ANY.RRSIG
import dns.rdtypes.ANY.SOA
import dns.rdtypes.ANY.TXT
import dns.rdtypes.IN.A
import dns.rdtypes.IN.AAAA
from .logging import get_logger
_logger = get_logger(__name__)
# hard-coded trust anchors (root KSKs)
trust_anchors = [
# KSK-2017:
dns.rrset.from_text('.', 1 , 'IN', 'DNSKEY', '257 3 8 AwEAAaz/tAm8yTn4Mfeh5eyI96WSVexTBAvkMgJzkKTOiW1vkIbzxeF3+/4RgWOq7HrxRixHlFlExOLAJr5emLvN7SWXgnLh4+B5xQlNVz8Og8kvArMtNROxVQuCaSnIDdD5LKyWbRd2n9WGe2R8PzgCmr3EgVLrjyBxWezF0jLHwVN8efS3rCj/EWgvIWgb9tarpVUDK/b58Da+sqqls3eNbuv7pr+eoZG+SrDK6nWeL3c6H5Apxz7LjVc1uTIdsIXxuOLYA4/ilBmSVIzuDWfdRUfhHdY6+cn8HFRm+2hM8AnXGXws9555KrUB5qihylGa8subX2Nn6UwNR1AkUTV74bU='),
# KSK-2010:
dns.rrset.from_text('.', 15202, 'IN', 'DNSKEY', '257 3 8 AwEAAagAIKlVZrpC6Ia7gEzahOR+9W29euxhJhVVLOyQbSEW0O8gcCjF FVQUTf6v58fLjwBd0YI0EzrAcQqBGCzh/RStIoO8g0NfnfL2MTJRkxoX bfDaUeVPQuYEhg37NZWAJQ9VnMVDxP/VHL496M/QZxkjf5/Efucp2gaD X6RS6CXpoY68LsvPVjR0ZSwzz1apAzvN9dlzEheX7ICJBBtuA6G3LQpz W5hOA2hzCTMjJPJ8LbqF6dsV6DoBQzgul0sGIcGOYl7OyQdXfZ57relS Qageu+ipAdTTJ25AsRTAoub8ONGcLmqrAmRLKBP1dfwhYB4N7knNnulq QxA+Uk1ihz0='),
]
| 39.218543 | 418 | 0.700777 |
8a681bd50a01e317584f76158f59adbe05396fb6 | 61,870 | py | Python | specs/d3d11.py | ds-hwang/apitrace | b74347ebae0d033a013c4de3efb0e9165e9cea8f | [
"MIT"
] | 1 | 2017-06-07T15:28:36.000Z | 2017-06-07T15:28:36.000Z | specs/d3d11.py | jciehl/apitrace | 0e01acc36de14e9ca7c0ced258767ffb99ac96ea | [
"MIT"
] | null | null | null | specs/d3d11.py | jciehl/apitrace | 0e01acc36de14e9ca7c0ced258767ffb99ac96ea | [
"MIT"
] | 1 | 2021-05-21T18:27:29.000Z | 2021-05-21T18:27:29.000Z | ##########################################################################
#
# Copyright 2012 Jose Fonseca
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
from dxgi import *
from d3dcommon import *
from d3d11sdklayers import *
HRESULT = MAKE_HRESULT([
"D3D11_ERROR_FILE_NOT_FOUND",
"D3D11_ERROR_TOO_MANY_UNIQUE_STATE_OBJECTS",
"D3D11_ERROR_TOO_MANY_UNIQUE_VIEW_OBJECTS",
"D3D11_ERROR_DEFERRED_CONTEXT_MAP_WITHOUT_INITIAL_DISCARD",
"D3DERR_INVALIDCALL",
"D3DERR_WASSTILLDRAWING",
])
ID3D11DepthStencilState = Interface("ID3D11DepthStencilState", ID3D11DeviceChild)
ID3D11BlendState = Interface("ID3D11BlendState", ID3D11DeviceChild)
ID3D11RasterizerState = Interface("ID3D11RasterizerState", ID3D11DeviceChild)
ID3D11Resource = Interface("ID3D11Resource", ID3D11DeviceChild)
ID3D11Buffer = Interface("ID3D11Buffer", ID3D11Resource)
ID3D11Texture1D = Interface("ID3D11Texture1D", ID3D11Resource)
ID3D11Texture2D = Interface("ID3D11Texture2D", ID3D11Resource)
ID3D11Texture3D = Interface("ID3D11Texture3D", ID3D11Resource)
ID3D11View = Interface("ID3D11View", ID3D11DeviceChild)
ID3D11ShaderResourceView = Interface("ID3D11ShaderResourceView", ID3D11View)
ID3D11RenderTargetView = Interface("ID3D11RenderTargetView", ID3D11View)
ID3D11DepthStencilView = Interface("ID3D11DepthStencilView", ID3D11View)
ID3D11UnorderedAccessView = Interface("ID3D11UnorderedAccessView", ID3D11View)
ID3D11VertexShader = Interface("ID3D11VertexShader", ID3D11DeviceChild)
ID3D11HullShader = Interface("ID3D11HullShader", ID3D11DeviceChild)
ID3D11DomainShader = Interface("ID3D11DomainShader", ID3D11DeviceChild)
ID3D11GeometryShader = Interface("ID3D11GeometryShader", ID3D11DeviceChild)
ID3D11PixelShader = Interface("ID3D11PixelShader", ID3D11DeviceChild)
ID3D11ComputeShader = Interface("ID3D11ComputeShader", ID3D11DeviceChild)
ID3D11InputLayout = Interface("ID3D11InputLayout", ID3D11DeviceChild)
ID3D11SamplerState = Interface("ID3D11SamplerState", ID3D11DeviceChild)
ID3D11Asynchronous = Interface("ID3D11Asynchronous", ID3D11DeviceChild)
ID3D11Query = Interface("ID3D11Query", ID3D11Asynchronous)
ID3D11Predicate = Interface("ID3D11Predicate", ID3D11Query)
ID3D11Counter = Interface("ID3D11Counter", ID3D11Asynchronous)
ID3D11ClassInstance = Interface("ID3D11ClassInstance", ID3D11DeviceChild)
ID3D11ClassLinkage = Interface("ID3D11ClassLinkage", ID3D11DeviceChild)
ID3D11CommandList = Interface("ID3D11CommandList", ID3D11DeviceChild)
ID3D11Device = Interface("ID3D11Device", IUnknown)
D3D11_INPUT_CLASSIFICATION = Enum("D3D11_INPUT_CLASSIFICATION", [
"D3D11_INPUT_PER_VERTEX_DATA",
"D3D11_INPUT_PER_INSTANCE_DATA",
])
D3D11_INPUT_ELEMENT_ALIGNED_BYTE_OFFSET = FakeEnum(UINT, [
"D3D11_APPEND_ALIGNED_ELEMENT",
])
D3D11_INPUT_ELEMENT_DESC = Struct("D3D11_INPUT_ELEMENT_DESC", [
(LPCSTR, "SemanticName"),
(UINT, "SemanticIndex"),
(DXGI_FORMAT, "Format"),
(UINT, "InputSlot"),
(D3D11_INPUT_ELEMENT_ALIGNED_BYTE_OFFSET, "AlignedByteOffset"),
(D3D11_INPUT_CLASSIFICATION, "InputSlotClass"),
(UINT, "InstanceDataStepRate"),
])
D3D11_FILL_MODE = Enum("D3D11_FILL_MODE", [
"D3D11_FILL_WIREFRAME",
"D3D11_FILL_SOLID",
])
D3D11_PRIMITIVE_TOPOLOGY = Enum("D3D11_PRIMITIVE_TOPOLOGY", [
"D3D11_PRIMITIVE_TOPOLOGY_UNDEFINED",
"D3D11_PRIMITIVE_TOPOLOGY_POINTLIST",
"D3D11_PRIMITIVE_TOPOLOGY_LINELIST",
"D3D11_PRIMITIVE_TOPOLOGY_LINESTRIP",
"D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST",
"D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP",
"D3D11_PRIMITIVE_TOPOLOGY_LINELIST_ADJ",
"D3D11_PRIMITIVE_TOPOLOGY_LINESTRIP_ADJ",
"D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST_ADJ",
"D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP_ADJ",
"D3D11_PRIMITIVE_TOPOLOGY_1_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_2_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_3_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_4_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_5_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_6_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_7_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_8_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_9_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_10_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_11_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_12_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_13_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_14_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_15_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_16_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_17_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_18_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_19_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_20_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_21_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_22_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_23_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_24_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_25_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_26_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_27_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_28_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_29_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_30_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_31_CONTROL_POINT_PATCHLIST",
"D3D11_PRIMITIVE_TOPOLOGY_32_CONTROL_POINT_PATCHLIST",
])
D3D11_PRIMITIVE = Enum("D3D11_PRIMITIVE", [
"D3D11_PRIMITIVE_UNDEFINED",
"D3D11_PRIMITIVE_POINT",
"D3D11_PRIMITIVE_LINE",
"D3D11_PRIMITIVE_TRIANGLE",
"D3D11_PRIMITIVE_LINE_ADJ",
"D3D11_PRIMITIVE_TRIANGLE_ADJ",
"D3D11_PRIMITIVE_1_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_2_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_3_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_4_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_5_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_6_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_7_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_8_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_9_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_10_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_11_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_12_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_13_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_14_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_15_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_16_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_17_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_18_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_19_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_20_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_21_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_22_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_23_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_24_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_25_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_26_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_27_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_28_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_29_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_30_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_31_CONTROL_POINT_PATCH",
"D3D11_PRIMITIVE_32_CONTROL_POINT_PATCH",
])
D3D11_CULL_MODE = Enum("D3D11_CULL_MODE", [
"D3D11_CULL_NONE",
"D3D11_CULL_FRONT",
"D3D11_CULL_BACK",
])
D3D11_SO_DECLARATION_ENTRY = Struct("D3D11_SO_DECLARATION_ENTRY", [
(UINT, "Stream"),
(LPCSTR, "SemanticName"),
(UINT, "SemanticIndex"),
(BYTE, "StartComponent"),
(BYTE, "ComponentCount"),
(BYTE, "OutputSlot"),
])
D3D11_VIEWPORT = Struct("D3D11_VIEWPORT", [
(FLOAT, "TopLeftX"),
(FLOAT, "TopLeftY"),
(FLOAT, "Width"),
(FLOAT, "Height"),
(FLOAT, "MinDepth"),
(FLOAT, "MaxDepth"),
])
D3D11_RESOURCE_DIMENSION = Enum("D3D11_RESOURCE_DIMENSION", [
"D3D11_RESOURCE_DIMENSION_UNKNOWN",
"D3D11_RESOURCE_DIMENSION_BUFFER",
"D3D11_RESOURCE_DIMENSION_TEXTURE1D",
"D3D11_RESOURCE_DIMENSION_TEXTURE2D",
"D3D11_RESOURCE_DIMENSION_TEXTURE3D",
])
D3D11_SRV_DIMENSION = Enum("D3D11_SRV_DIMENSION", [
"D3D11_SRV_DIMENSION_UNKNOWN",
"D3D11_SRV_DIMENSION_BUFFER",
"D3D11_SRV_DIMENSION_TEXTURE1D",
"D3D11_SRV_DIMENSION_TEXTURE1DARRAY",
"D3D11_SRV_DIMENSION_TEXTURE2D",
"D3D11_SRV_DIMENSION_TEXTURE2DARRAY",
"D3D11_SRV_DIMENSION_TEXTURE2DMS",
"D3D11_SRV_DIMENSION_TEXTURE2DMSARRAY",
"D3D11_SRV_DIMENSION_TEXTURE3D",
"D3D11_SRV_DIMENSION_TEXTURECUBE",
"D3D11_SRV_DIMENSION_TEXTURECUBEARRAY",
"D3D11_SRV_DIMENSION_BUFFEREX",
])
D3D11_DSV_DIMENSION = Enum("D3D11_DSV_DIMENSION", [
"D3D11_DSV_DIMENSION_UNKNOWN",
"D3D11_DSV_DIMENSION_TEXTURE1D",
"D3D11_DSV_DIMENSION_TEXTURE1DARRAY",
"D3D11_DSV_DIMENSION_TEXTURE2D",
"D3D11_DSV_DIMENSION_TEXTURE2DARRAY",
"D3D11_DSV_DIMENSION_TEXTURE2DMS",
"D3D11_DSV_DIMENSION_TEXTURE2DMSARRAY",
])
D3D11_RTV_DIMENSION = Enum("D3D11_RTV_DIMENSION", [
"D3D11_RTV_DIMENSION_UNKNOWN",
"D3D11_RTV_DIMENSION_BUFFER",
"D3D11_RTV_DIMENSION_TEXTURE1D",
"D3D11_RTV_DIMENSION_TEXTURE1DARRAY",
"D3D11_RTV_DIMENSION_TEXTURE2D",
"D3D11_RTV_DIMENSION_TEXTURE2DARRAY",
"D3D11_RTV_DIMENSION_TEXTURE2DMS",
"D3D11_RTV_DIMENSION_TEXTURE2DMSARRAY",
"D3D11_RTV_DIMENSION_TEXTURE3D",
])
D3D11_UAV_DIMENSION = Enum("D3D11_UAV_DIMENSION", [
"D3D11_UAV_DIMENSION_UNKNOWN",
"D3D11_UAV_DIMENSION_BUFFER",
"D3D11_UAV_DIMENSION_TEXTURE1D",
"D3D11_UAV_DIMENSION_TEXTURE1DARRAY",
"D3D11_UAV_DIMENSION_TEXTURE2D",
"D3D11_UAV_DIMENSION_TEXTURE2DARRAY",
"D3D11_UAV_DIMENSION_TEXTURE3D",
])
D3D11_USAGE = Enum("D3D11_USAGE", [
"D3D11_USAGE_DEFAULT",
"D3D11_USAGE_IMMUTABLE",
"D3D11_USAGE_DYNAMIC",
"D3D11_USAGE_STAGING",
])
D3D11_BIND_FLAG = Flags(UINT, [
"D3D11_BIND_VERTEX_BUFFER",
"D3D11_BIND_INDEX_BUFFER",
"D3D11_BIND_CONSTANT_BUFFER",
"D3D11_BIND_SHADER_RESOURCE",
"D3D11_BIND_STREAM_OUTPUT",
"D3D11_BIND_RENDER_TARGET",
"D3D11_BIND_DEPTH_STENCIL",
"D3D11_BIND_UNORDERED_ACCESS",
])
D3D11_CPU_ACCESS_FLAG = Flags(UINT, [
"D3D11_CPU_ACCESS_WRITE",
"D3D11_CPU_ACCESS_READ",
])
D3D11_RESOURCE_MISC_FLAG = Flags(UINT, [
"D3D11_RESOURCE_MISC_GENERATE_MIPS",
"D3D11_RESOURCE_MISC_SHARED",
"D3D11_RESOURCE_MISC_TEXTURECUBE",
"D3D11_RESOURCE_MISC_DRAWINDIRECT_ARGS",
"D3D11_RESOURCE_MISC_BUFFER_ALLOW_RAW_VIEWS",
"D3D11_RESOURCE_MISC_BUFFER_STRUCTURED",
"D3D11_RESOURCE_MISC_RESOURCE_CLAMP",
"D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX",
"D3D11_RESOURCE_MISC_GDI_COMPATIBLE",
])
D3D11_MAP = Enum("D3D11_MAP", [
"D3D11_MAP_READ",
"D3D11_MAP_WRITE",
"D3D11_MAP_READ_WRITE",
"D3D11_MAP_WRITE_DISCARD",
"D3D11_MAP_WRITE_NO_OVERWRITE",
])
D3D11_MAP_FLAG = Flags(UINT, [
"D3D11_MAP_FLAG_DO_NOT_WAIT",
])
D3D11_RAISE_FLAG = Flags(UINT, [
"D3D11_RAISE_FLAG_DRIVER_INTERNAL_ERROR",
])
D3D11_CLEAR_FLAG = Flags(UINT, [
"D3D11_CLEAR_DEPTH",
"D3D11_CLEAR_STENCIL",
])
D3D11_RECT = Alias("D3D11_RECT", RECT)
D3D11_BOX = Struct("D3D11_BOX", [
(UINT, "left"),
(UINT, "top"),
(UINT, "front"),
(UINT, "right"),
(UINT, "bottom"),
(UINT, "back"),
])
ID3D11DeviceChild.methods += [
StdMethod(Void, "GetDevice", [Out(Pointer(ObjPointer(ID3D11Device)), "ppDevice")]),
StdMethod(HRESULT, "GetPrivateData", [(REFGUID, "guid"), Out(Pointer(UINT), "pDataSize"), Out(OpaquePointer(Void), "pData")]),
StdMethod(HRESULT, "SetPrivateData", [(REFGUID, "guid"), (UINT, "DataSize"), (OpaqueBlob(Const(Void), "DataSize"), "pData")]),
StdMethod(HRESULT, "SetPrivateDataInterface", [(REFGUID, "guid"), (OpaquePointer(Const(IUnknown)), "pData")]),
]
D3D11_COMPARISON_FUNC = Enum("D3D11_COMPARISON_FUNC", [
"D3D11_COMPARISON_NEVER",
"D3D11_COMPARISON_LESS",
"D3D11_COMPARISON_EQUAL",
"D3D11_COMPARISON_LESS_EQUAL",
"D3D11_COMPARISON_GREATER",
"D3D11_COMPARISON_NOT_EQUAL",
"D3D11_COMPARISON_GREATER_EQUAL",
"D3D11_COMPARISON_ALWAYS",
])
D3D11_DEPTH_WRITE_MASK = Enum("D3D11_DEPTH_WRITE_MASK", [
"D3D11_DEPTH_WRITE_MASK_ZERO",
"D3D11_DEPTH_WRITE_MASK_ALL",
])
D3D11_STENCIL_OP = Enum("D3D11_STENCIL_OP", [
"D3D11_STENCIL_OP_KEEP",
"D3D11_STENCIL_OP_ZERO",
"D3D11_STENCIL_OP_REPLACE",
"D3D11_STENCIL_OP_INCR_SAT",
"D3D11_STENCIL_OP_DECR_SAT",
"D3D11_STENCIL_OP_INVERT",
"D3D11_STENCIL_OP_INCR",
"D3D11_STENCIL_OP_DECR",
])
D3D11_DEPTH_STENCILOP_DESC = Struct("D3D11_DEPTH_STENCILOP_DESC", [
(D3D11_STENCIL_OP, "StencilFailOp"),
(D3D11_STENCIL_OP, "StencilDepthFailOp"),
(D3D11_STENCIL_OP, "StencilPassOp"),
(D3D11_COMPARISON_FUNC, "StencilFunc"),
])
D3D11_DEPTH_STENCIL_DESC = Struct("D3D11_DEPTH_STENCIL_DESC", [
(BOOL, "DepthEnable"),
(D3D11_DEPTH_WRITE_MASK, "DepthWriteMask"),
(D3D11_COMPARISON_FUNC, "DepthFunc"),
(BOOL, "StencilEnable"),
(UINT8, "StencilReadMask"),
(UINT8, "StencilWriteMask"),
(D3D11_DEPTH_STENCILOP_DESC, "FrontFace"),
(D3D11_DEPTH_STENCILOP_DESC, "BackFace"),
])
ID3D11DepthStencilState.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_DEPTH_STENCIL_DESC), "pDesc")]),
]
D3D11_BLEND = Enum("D3D11_BLEND", [
"D3D11_BLEND_ZERO",
"D3D11_BLEND_ONE",
"D3D11_BLEND_SRC_COLOR",
"D3D11_BLEND_INV_SRC_COLOR",
"D3D11_BLEND_SRC_ALPHA",
"D3D11_BLEND_INV_SRC_ALPHA",
"D3D11_BLEND_DEST_ALPHA",
"D3D11_BLEND_INV_DEST_ALPHA",
"D3D11_BLEND_DEST_COLOR",
"D3D11_BLEND_INV_DEST_COLOR",
"D3D11_BLEND_SRC_ALPHA_SAT",
"D3D11_BLEND_BLEND_FACTOR",
"D3D11_BLEND_INV_BLEND_FACTOR",
"D3D11_BLEND_SRC1_COLOR",
"D3D11_BLEND_INV_SRC1_COLOR",
"D3D11_BLEND_SRC1_ALPHA",
"D3D11_BLEND_INV_SRC1_ALPHA",
])
D3D11_BLEND_OP = Enum("D3D11_BLEND_OP", [
"D3D11_BLEND_OP_ADD",
"D3D11_BLEND_OP_SUBTRACT",
"D3D11_BLEND_OP_REV_SUBTRACT",
"D3D11_BLEND_OP_MIN",
"D3D11_BLEND_OP_MAX",
])
D3D11_COLOR_WRITE_ENABLE = Enum("D3D11_COLOR_WRITE_ENABLE", [
"D3D11_COLOR_WRITE_ENABLE_ALL",
"D3D11_COLOR_WRITE_ENABLE_RED",
"D3D11_COLOR_WRITE_ENABLE_GREEN",
"D3D11_COLOR_WRITE_ENABLE_BLUE",
"D3D11_COLOR_WRITE_ENABLE_ALPHA",
])
D3D11_RENDER_TARGET_BLEND_DESC = Struct("D3D11_RENDER_TARGET_BLEND_DESC", [
(BOOL, "BlendEnable"),
(D3D11_BLEND, "SrcBlend"),
(D3D11_BLEND, "DestBlend"),
(D3D11_BLEND_OP, "BlendOp"),
(D3D11_BLEND, "SrcBlendAlpha"),
(D3D11_BLEND, "DestBlendAlpha"),
(D3D11_BLEND_OP, "BlendOpAlpha"),
(UINT8, "RenderTargetWriteMask"),
])
D3D11_BLEND_DESC = Struct("D3D11_BLEND_DESC", [
(BOOL, "AlphaToCoverageEnable"),
(BOOL, "IndependentBlendEnable"),
(Array(D3D11_RENDER_TARGET_BLEND_DESC, 8), "RenderTarget"),
])
ID3D11BlendState.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_BLEND_DESC), "pDesc")]),
]
D3D11_RASTERIZER_DESC = Struct("D3D11_RASTERIZER_DESC", [
(D3D11_FILL_MODE, "FillMode"),
(D3D11_CULL_MODE, "CullMode"),
(BOOL, "FrontCounterClockwise"),
(INT, "DepthBias"),
(FLOAT, "DepthBiasClamp"),
(FLOAT, "SlopeScaledDepthBias"),
(BOOL, "DepthClipEnable"),
(BOOL, "ScissorEnable"),
(BOOL, "MultisampleEnable"),
(BOOL, "AntialiasedLineEnable"),
])
ID3D11RasterizerState.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_RASTERIZER_DESC), "pDesc")]),
]
D3D11_SUBRESOURCE_DATA = Struct("D3D11_SUBRESOURCE_DATA", [
(OpaquePointer(Const(Void)), "pSysMem"),
(UINT, "SysMemPitch"),
(UINT, "SysMemSlicePitch"),
])
D3D11_MAPPED_SUBRESOURCE = Struct("D3D11_MAPPED_SUBRESOURCE", [
(OpaquePointer(Void), "pData"),
(UINT, "RowPitch"),
(UINT, "DepthPitch"),
])
ID3D11Resource.methods += [
StdMethod(Void, "GetType", [Out(Pointer(D3D11_RESOURCE_DIMENSION), "pResourceDimension")]),
StdMethod(Void, "SetEvictionPriority", [(UINT, "EvictionPriority")]),
StdMethod(UINT, "GetEvictionPriority", []),
]
D3D11_BUFFER_DESC = Struct("D3D11_BUFFER_DESC", [
(UINT, "ByteWidth"),
(D3D11_USAGE, "Usage"),
(D3D11_BIND_FLAG, "BindFlags"),
(D3D11_CPU_ACCESS_FLAG, "CPUAccessFlags"),
(D3D11_RESOURCE_MISC_FLAG, "MiscFlags"),
(UINT, "StructureByteStride"),
])
ID3D11Buffer.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_BUFFER_DESC), "pDesc")]),
]
D3D11_TEXTURE1D_DESC = Struct("D3D11_TEXTURE1D_DESC", [
(UINT, "Width"),
(UINT, "MipLevels"),
(UINT, "ArraySize"),
(DXGI_FORMAT, "Format"),
(D3D11_USAGE, "Usage"),
(D3D11_BIND_FLAG, "BindFlags"),
(D3D11_CPU_ACCESS_FLAG, "CPUAccessFlags"),
(D3D11_RESOURCE_MISC_FLAG, "MiscFlags"),
])
ID3D11Texture1D.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_TEXTURE1D_DESC), "pDesc")]),
]
D3D11_TEXTURE2D_DESC = Struct("D3D11_TEXTURE2D_DESC", [
(UINT, "Width"),
(UINT, "Height"),
(UINT, "MipLevels"),
(UINT, "ArraySize"),
(DXGI_FORMAT, "Format"),
(DXGI_SAMPLE_DESC, "SampleDesc"),
(D3D11_USAGE, "Usage"),
(D3D11_BIND_FLAG, "BindFlags"),
(D3D11_CPU_ACCESS_FLAG, "CPUAccessFlags"),
(D3D11_RESOURCE_MISC_FLAG, "MiscFlags"),
])
ID3D11Texture2D.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_TEXTURE2D_DESC), "pDesc")]),
]
D3D11_TEXTURE3D_DESC = Struct("D3D11_TEXTURE3D_DESC", [
(UINT, "Width"),
(UINT, "Height"),
(UINT, "Depth"),
(UINT, "MipLevels"),
(DXGI_FORMAT, "Format"),
(D3D11_USAGE, "Usage"),
(D3D11_BIND_FLAG, "BindFlags"),
(D3D11_CPU_ACCESS_FLAG, "CPUAccessFlags"),
(D3D11_RESOURCE_MISC_FLAG, "MiscFlags"),
])
ID3D11Texture3D.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_TEXTURE3D_DESC), "pDesc")]),
]
D3D11_TEXTURECUBE_FACE = Enum("D3D11_TEXTURECUBE_FACE", [
"D3D11_TEXTURECUBE_FACE_POSITIVE_X",
"D3D11_TEXTURECUBE_FACE_NEGATIVE_X",
"D3D11_TEXTURECUBE_FACE_POSITIVE_Y",
"D3D11_TEXTURECUBE_FACE_NEGATIVE_Y",
"D3D11_TEXTURECUBE_FACE_POSITIVE_Z",
"D3D11_TEXTURECUBE_FACE_NEGATIVE_Z",
])
ID3D11View.methods += [
StdMethod(Void, "GetResource", [Out(Pointer(ObjPointer(ID3D11Resource)), "ppResource")]),
]
D3D11_BUFFER_SRV = Struct("D3D11_BUFFER_SRV", [
(Union(None, [(UINT, "FirstElement"), (UINT, "ElementOffset")]), None),
(Union(None, [(UINT, "NumElements"), (UINT, "ElementWidth")]), None),
])
D3D11_BUFFEREX_SRV_FLAG = Flags(UINT, [
"D3D11_BUFFEREX_SRV_FLAG_RAW",
])
D3D11_BUFFEREX_SRV = Struct("D3D11_BUFFEREX_SRV", [
(UINT, "FirstElement"),
(UINT, "NumElements"),
(D3D11_BUFFEREX_SRV_FLAG, "Flags"),
])
D3D11_TEX1D_SRV = Struct("D3D11_TEX1D_SRV", [
(UINT, "MostDetailedMip"),
(UINT, "MipLevels"),
])
D3D11_TEX1D_ARRAY_SRV = Struct("D3D11_TEX1D_ARRAY_SRV", [
(UINT, "MostDetailedMip"),
(UINT, "MipLevels"),
(UINT, "FirstArraySlice"),
(UINT, "ArraySize"),
])
D3D11_TEX2D_SRV = Struct("D3D11_TEX2D_SRV", [
(UINT, "MostDetailedMip"),
(UINT, "MipLevels"),
])
D3D11_TEX2D_ARRAY_SRV = Struct("D3D11_TEX2D_ARRAY_SRV", [
(UINT, "MostDetailedMip"),
(UINT, "MipLevels"),
(UINT, "FirstArraySlice"),
(UINT, "ArraySize"),
])
D3D11_TEX3D_SRV = Struct("D3D11_TEX3D_SRV", [
(UINT, "MostDetailedMip"),
(UINT, "MipLevels"),
])
D3D11_TEXCUBE_SRV = Struct("D3D11_TEXCUBE_SRV", [
(UINT, "MostDetailedMip"),
(UINT, "MipLevels"),
])
D3D11_TEXCUBE_ARRAY_SRV = Struct("D3D11_TEXCUBE_ARRAY_SRV", [
(UINT, "MostDetailedMip"),
(UINT, "MipLevels"),
(UINT, "First2DArrayFace"),
(UINT, "NumCubes"),
])
D3D11_TEX2DMS_SRV = Struct("D3D11_TEX2DMS_SRV", [
(UINT, "UnusedField_NothingToDefine"),
])
D3D11_TEX2DMS_ARRAY_SRV = Struct("D3D11_TEX2DMS_ARRAY_SRV", [
(UINT, "FirstArraySlice"),
(UINT, "ArraySize"),
])
D3D11_SHADER_RESOURCE_VIEW_DESC = Struct("D3D11_SHADER_RESOURCE_VIEW_DESC", [
(DXGI_FORMAT, "Format"),
(D3D11_SRV_DIMENSION, "ViewDimension"),
(Union(None, [
(D3D11_BUFFER_SRV, "Buffer"),
(D3D11_TEX1D_SRV, "Texture1D"),
(D3D11_TEX1D_ARRAY_SRV, "Texture1DArray"),
(D3D11_TEX2D_SRV, "Texture2D"),
(D3D11_TEX2D_ARRAY_SRV, "Texture2DArray"),
(D3D11_TEX2DMS_SRV, "Texture2DMS"),
(D3D11_TEX2DMS_ARRAY_SRV, "Texture2DMSArray"),
(D3D11_TEX3D_SRV, "Texture3D"),
(D3D11_TEXCUBE_SRV, "TextureCube"),
(D3D11_TEXCUBE_ARRAY_SRV, "TextureCubeArray"),
(D3D11_BUFFEREX_SRV, "BufferEx"),
]), None),
])
ID3D11ShaderResourceView.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_SHADER_RESOURCE_VIEW_DESC), "pDesc")]),
]
D3D11_BUFFER_RTV = Struct("D3D11_BUFFER_RTV", [
(Union(None, [(UINT, "FirstElement"), (UINT, "ElementOffset")]), None),
(Union(None, [(UINT, "NumElements"), (UINT, "ElementWidth")]), None),
])
D3D11_TEX1D_RTV = Struct("D3D11_TEX1D_RTV", [
(UINT, "MipSlice"),
])
D3D11_TEX1D_ARRAY_RTV = Struct("D3D11_TEX1D_ARRAY_RTV", [
(UINT, "MipSlice"),
(UINT, "FirstArraySlice"),
(UINT, "ArraySize"),
])
D3D11_TEX2D_RTV = Struct("D3D11_TEX2D_RTV", [
(UINT, "MipSlice"),
])
D3D11_TEX2DMS_RTV = Struct("D3D11_TEX2DMS_RTV", [
(UINT, "UnusedField_NothingToDefine"),
])
D3D11_TEX2D_ARRAY_RTV = Struct("D3D11_TEX2D_ARRAY_RTV", [
(UINT, "MipSlice"),
(UINT, "FirstArraySlice"),
(UINT, "ArraySize"),
])
D3D11_TEX2DMS_ARRAY_RTV = Struct("D3D11_TEX2DMS_ARRAY_RTV", [
(UINT, "FirstArraySlice"),
(UINT, "ArraySize"),
])
D3D11_TEX3D_RTV = Struct("D3D11_TEX3D_RTV", [
(UINT, "MipSlice"),
(UINT, "FirstWSlice"),
(UINT, "WSize"),
])
D3D11_RENDER_TARGET_VIEW_DESC = Struct("D3D11_RENDER_TARGET_VIEW_DESC", [
(DXGI_FORMAT, "Format"),
(D3D11_RTV_DIMENSION, "ViewDimension"),
(Union(None, [
(D3D11_BUFFER_RTV, "Buffer"),
(D3D11_TEX1D_RTV, "Texture1D"),
(D3D11_TEX1D_ARRAY_RTV, "Texture1DArray"),
(D3D11_TEX2D_RTV, "Texture2D"),
(D3D11_TEX2D_ARRAY_RTV, "Texture2DArray"),
(D3D11_TEX2DMS_RTV, "Texture2DMS"),
(D3D11_TEX2DMS_ARRAY_RTV, "Texture2DMSArray"),
(D3D11_TEX3D_RTV, "Texture3D"),
]), None),
])
ID3D11RenderTargetView.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_RENDER_TARGET_VIEW_DESC), "pDesc")]),
]
D3D11_TEX1D_DSV = Struct("D3D11_TEX1D_DSV", [
(UINT, "MipSlice"),
])
D3D11_TEX1D_ARRAY_DSV = Struct("D3D11_TEX1D_ARRAY_DSV", [
(UINT, "MipSlice"),
(UINT, "FirstArraySlice"),
(UINT, "ArraySize"),
])
D3D11_TEX2D_DSV = Struct("D3D11_TEX2D_DSV", [
(UINT, "MipSlice"),
])
D3D11_TEX2D_ARRAY_DSV = Struct("D3D11_TEX2D_ARRAY_DSV", [
(UINT, "MipSlice"),
(UINT, "FirstArraySlice"),
(UINT, "ArraySize"),
])
D3D11_TEX2DMS_DSV = Struct("D3D11_TEX2DMS_DSV", [
(UINT, "UnusedField_NothingToDefine"),
])
D3D11_TEX2DMS_ARRAY_DSV = Struct("D3D11_TEX2DMS_ARRAY_DSV", [
(UINT, "FirstArraySlice"),
(UINT, "ArraySize"),
])
D3D11_DSV_FLAG = Flags(UINT, [
"D3D11_DSV_READ_ONLY_DEPTH",
"D3D11_DSV_READ_ONLY_STENCIL",
])
D3D11_DEPTH_STENCIL_VIEW_DESC = Struct("D3D11_DEPTH_STENCIL_VIEW_DESC", [
(DXGI_FORMAT, "Format"),
(D3D11_DSV_DIMENSION, "ViewDimension"),
(D3D11_DSV_FLAG, "Flags"),
(Union(None, [
(D3D11_TEX1D_DSV, "Texture1D"),
(D3D11_TEX1D_ARRAY_DSV, "Texture1DArray"),
(D3D11_TEX2D_DSV, "Texture2D"),
(D3D11_TEX2D_ARRAY_DSV, "Texture2DArray"),
(D3D11_TEX2DMS_DSV, "Texture2DMS"),
(D3D11_TEX2DMS_ARRAY_DSV, "Texture2DMSArray"),
]), None),
])
ID3D11DepthStencilView.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_DEPTH_STENCIL_VIEW_DESC), "pDesc")]),
]
D3D11_BUFFER_UAV_FLAG = Flags(UINT, [
"D3D11_BUFFER_UAV_FLAG_RAW",
"D3D11_BUFFER_UAV_FLAG_APPEND",
"D3D11_BUFFER_UAV_FLAG_COUNTER",
])
D3D11_BUFFER_UAV = Struct("D3D11_BUFFER_UAV", [
(UINT, "FirstElement"),
(UINT, "NumElements"),
(D3D11_BUFFER_UAV_FLAG, "Flags"),
])
D3D11_TEX1D_UAV = Struct("D3D11_TEX1D_UAV", [
(UINT, "MipSlice"),
])
D3D11_TEX1D_ARRAY_UAV = Struct("D3D11_TEX1D_ARRAY_UAV", [
(UINT, "MipSlice"),
(UINT, "FirstArraySlice"),
(UINT, "ArraySize"),
])
D3D11_TEX2D_UAV = Struct("D3D11_TEX2D_UAV", [
(UINT, "MipSlice"),
])
D3D11_TEX2D_ARRAY_UAV = Struct("D3D11_TEX2D_ARRAY_UAV", [
(UINT, "MipSlice"),
(UINT, "FirstArraySlice"),
(UINT, "ArraySize"),
])
D3D11_TEX3D_UAV = Struct("D3D11_TEX3D_UAV", [
(UINT, "MipSlice"),
(UINT, "FirstWSlice"),
(UINT, "WSize"),
])
D3D11_UNORDERED_ACCESS_VIEW_DESC = Struct("D3D11_UNORDERED_ACCESS_VIEW_DESC", [
(DXGI_FORMAT, "Format"),
(D3D11_UAV_DIMENSION, "ViewDimension"),
(Union(None, [
(D3D11_BUFFER_UAV, "Buffer"),
(D3D11_TEX1D_UAV, "Texture1D"),
(D3D11_TEX1D_ARRAY_UAV, "Texture1DArray"),
(D3D11_TEX2D_UAV, "Texture2D"),
(D3D11_TEX2D_ARRAY_UAV, "Texture2DArray"),
(D3D11_TEX3D_UAV, "Texture3D"),
]), None),
])
ID3D11UnorderedAccessView.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_UNORDERED_ACCESS_VIEW_DESC), "pDesc")]),
]
D3D11_FILTER = Enum("D3D11_FILTER", [
"D3D11_FILTER_MIN_MAG_MIP_POINT",
"D3D11_FILTER_MIN_MAG_POINT_MIP_LINEAR",
"D3D11_FILTER_MIN_POINT_MAG_LINEAR_MIP_POINT",
"D3D11_FILTER_MIN_POINT_MAG_MIP_LINEAR",
"D3D11_FILTER_MIN_LINEAR_MAG_MIP_POINT",
"D3D11_FILTER_MIN_LINEAR_MAG_POINT_MIP_LINEAR",
"D3D11_FILTER_MIN_MAG_LINEAR_MIP_POINT",
"D3D11_FILTER_MIN_MAG_MIP_LINEAR",
"D3D11_FILTER_ANISOTROPIC",
"D3D11_FILTER_COMPARISON_MIN_MAG_MIP_POINT",
"D3D11_FILTER_COMPARISON_MIN_MAG_POINT_MIP_LINEAR",
"D3D11_FILTER_COMPARISON_MIN_POINT_MAG_LINEAR_MIP_POINT",
"D3D11_FILTER_COMPARISON_MIN_POINT_MAG_MIP_LINEAR",
"D3D11_FILTER_COMPARISON_MIN_LINEAR_MAG_MIP_POINT",
"D3D11_FILTER_COMPARISON_MIN_LINEAR_MAG_POINT_MIP_LINEAR",
"D3D11_FILTER_COMPARISON_MIN_MAG_LINEAR_MIP_POINT",
"D3D11_FILTER_COMPARISON_MIN_MAG_MIP_LINEAR",
"D3D11_FILTER_COMPARISON_ANISOTROPIC",
])
D3D11_FILTER_TYPE = Enum("D3D11_FILTER_TYPE", [
"D3D11_FILTER_TYPE_POINT",
"D3D11_FILTER_TYPE_LINEAR",
])
D3D11_TEXTURE_ADDRESS_MODE = Enum("D3D11_TEXTURE_ADDRESS_MODE", [
"D3D11_TEXTURE_ADDRESS_WRAP",
"D3D11_TEXTURE_ADDRESS_MIRROR",
"D3D11_TEXTURE_ADDRESS_CLAMP",
"D3D11_TEXTURE_ADDRESS_BORDER",
"D3D11_TEXTURE_ADDRESS_MIRROR_ONCE",
])
D3D11_SAMPLER_DESC = Struct("D3D11_SAMPLER_DESC", [
(D3D11_FILTER, "Filter"),
(D3D11_TEXTURE_ADDRESS_MODE, "AddressU"),
(D3D11_TEXTURE_ADDRESS_MODE, "AddressV"),
(D3D11_TEXTURE_ADDRESS_MODE, "AddressW"),
(FLOAT, "MipLODBias"),
(UINT, "MaxAnisotropy"),
(D3D11_COMPARISON_FUNC, "ComparisonFunc"),
(Array(FLOAT, 4), "BorderColor"),
(FLOAT, "MinLOD"),
(FLOAT, "MaxLOD"),
])
ID3D11SamplerState.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_SAMPLER_DESC), "pDesc")]),
]
D3D11_FORMAT_SUPPORT = Flags(UINT, [
"D3D11_FORMAT_SUPPORT_BUFFER",
"D3D11_FORMAT_SUPPORT_IA_VERTEX_BUFFER",
"D3D11_FORMAT_SUPPORT_IA_INDEX_BUFFER",
"D3D11_FORMAT_SUPPORT_SO_BUFFER",
"D3D11_FORMAT_SUPPORT_TEXTURE1D",
"D3D11_FORMAT_SUPPORT_TEXTURE2D",
"D3D11_FORMAT_SUPPORT_TEXTURE3D",
"D3D11_FORMAT_SUPPORT_TEXTURECUBE",
"D3D11_FORMAT_SUPPORT_SHADER_LOAD",
"D3D11_FORMAT_SUPPORT_SHADER_SAMPLE",
"D3D11_FORMAT_SUPPORT_SHADER_SAMPLE_COMPARISON",
"D3D11_FORMAT_SUPPORT_SHADER_SAMPLE_MONO_TEXT",
"D3D11_FORMAT_SUPPORT_MIP",
"D3D11_FORMAT_SUPPORT_MIP_AUTOGEN",
"D3D11_FORMAT_SUPPORT_RENDER_TARGET",
"D3D11_FORMAT_SUPPORT_BLENDABLE",
"D3D11_FORMAT_SUPPORT_DEPTH_STENCIL",
"D3D11_FORMAT_SUPPORT_CPU_LOCKABLE",
"D3D11_FORMAT_SUPPORT_MULTISAMPLE_RESOLVE",
"D3D11_FORMAT_SUPPORT_DISPLAY",
"D3D11_FORMAT_SUPPORT_CAST_WITHIN_BIT_LAYOUT",
"D3D11_FORMAT_SUPPORT_MULTISAMPLE_RENDERTARGET",
"D3D11_FORMAT_SUPPORT_MULTISAMPLE_LOAD",
"D3D11_FORMAT_SUPPORT_SHADER_GATHER",
"D3D11_FORMAT_SUPPORT_BACK_BUFFER_CAST",
"D3D11_FORMAT_SUPPORT_TYPED_UNORDERED_ACCESS_VIEW",
"D3D11_FORMAT_SUPPORT_SHADER_GATHER_COMPARISON",
])
D3D11_FORMAT_SUPPORT2 = Enum("D3D11_FORMAT_SUPPORT2", [
"D3D11_FORMAT_SUPPORT2_UAV_ATOMIC_ADD",
"D3D11_FORMAT_SUPPORT2_UAV_ATOMIC_BITWISE_OPS",
"D3D11_FORMAT_SUPPORT2_UAV_ATOMIC_COMPARE_STORE_OR_COMPARE_EXCHANGE",
"D3D11_FORMAT_SUPPORT2_UAV_ATOMIC_EXCHANGE",
"D3D11_FORMAT_SUPPORT2_UAV_ATOMIC_SIGNED_MIN_OR_MAX",
"D3D11_FORMAT_SUPPORT2_UAV_ATOMIC_UNSIGNED_MIN_OR_MAX",
"D3D11_FORMAT_SUPPORT2_UAV_TYPED_LOAD",
"D3D11_FORMAT_SUPPORT2_UAV_TYPED_STORE",
])
ID3D11Asynchronous.methods += [
StdMethod(UINT, "GetDataSize", []),
]
D3D11_ASYNC_GETDATA_FLAG = Flags(UINT, [
"D3D11_ASYNC_GETDATA_DONOTFLUSH",
])
D3D11_QUERY = Enum("D3D11_QUERY", [
"D3D11_QUERY_EVENT",
"D3D11_QUERY_OCCLUSION",
"D3D11_QUERY_TIMESTAMP",
"D3D11_QUERY_TIMESTAMP_DISJOINT",
"D3D11_QUERY_PIPELINE_STATISTICS",
"D3D11_QUERY_OCCLUSION_PREDICATE",
"D3D11_QUERY_SO_STATISTICS",
"D3D11_QUERY_SO_OVERFLOW_PREDICATE",
"D3D11_QUERY_SO_STATISTICS_STREAM0",
"D3D11_QUERY_SO_OVERFLOW_PREDICATE_STREAM0",
"D3D11_QUERY_SO_STATISTICS_STREAM1",
"D3D11_QUERY_SO_OVERFLOW_PREDICATE_STREAM1",
"D3D11_QUERY_SO_STATISTICS_STREAM2",
"D3D11_QUERY_SO_OVERFLOW_PREDICATE_STREAM2",
"D3D11_QUERY_SO_STATISTICS_STREAM3",
"D3D11_QUERY_SO_OVERFLOW_PREDICATE_STREAM3",
])
D3D11_QUERY_MISC_FLAG = Flags(UINT, [
"D3D11_QUERY_MISC_PREDICATEHINT",
])
D3D11_QUERY_DESC = Struct("D3D11_QUERY_DESC", [
(D3D11_QUERY, "Query"),
(D3D11_QUERY_MISC_FLAG, "MiscFlags"),
])
ID3D11Query.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_QUERY_DESC), "pDesc")]),
]
D3D11_QUERY_DATA_TIMESTAMP_DISJOINT = Struct("D3D11_QUERY_DATA_TIMESTAMP_DISJOINT", [
(UINT64, "Frequency"),
(BOOL, "Disjoint"),
])
D3D11_QUERY_DATA_PIPELINE_STATISTICS = Struct("D3D11_QUERY_DATA_PIPELINE_STATISTICS", [
(UINT64, "IAVertices"),
(UINT64, "IAPrimitives"),
(UINT64, "VSInvocations"),
(UINT64, "GSInvocations"),
(UINT64, "GSPrimitives"),
(UINT64, "CInvocations"),
(UINT64, "CPrimitives"),
(UINT64, "PSInvocations"),
(UINT64, "HSInvocations"),
(UINT64, "DSInvocations"),
(UINT64, "CSInvocations"),
])
D3D11_QUERY_DATA_SO_STATISTICS = Struct("D3D11_QUERY_DATA_SO_STATISTICS", [
(UINT64, "NumPrimitivesWritten"),
(UINT64, "PrimitivesStorageNeeded"),
])
D3D11_COUNTER = Enum("D3D11_COUNTER", [
"D3D11_COUNTER_DEVICE_DEPENDENT_0",
])
D3D11_COUNTER_TYPE = Enum("D3D11_COUNTER_TYPE", [
"D3D11_COUNTER_TYPE_FLOAT32",
"D3D11_COUNTER_TYPE_UINT16",
"D3D11_COUNTER_TYPE_UINT32",
"D3D11_COUNTER_TYPE_UINT64",
])
D3D11_COUNTER_DESC = Struct("D3D11_COUNTER_DESC", [
(D3D11_COUNTER, "Counter"),
(UINT, "MiscFlags"),
])
D3D11_COUNTER_INFO = Struct("D3D11_COUNTER_INFO", [
(D3D11_COUNTER, "LastDeviceDependentCounter"),
(UINT, "NumSimultaneousCounters"),
(UINT8, "NumDetectableParallelUnits"),
])
ID3D11Counter.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_COUNTER_DESC), "pDesc")]),
]
D3D11_STANDARD_MULTISAMPLE_QUALITY_LEVELS = Enum("D3D11_STANDARD_MULTISAMPLE_QUALITY_LEVELS", [
"D3D11_STANDARD_MULTISAMPLE_PATTERN",
"D3D11_CENTER_MULTISAMPLE_PATTERN",
])
D3D11_DEVICE_CONTEXT_TYPE = Enum("D3D11_DEVICE_CONTEXT_TYPE", [
"D3D11_DEVICE_CONTEXT_IMMEDIATE",
"D3D11_DEVICE_CONTEXT_DEFERRED",
])
D3D11_CLASS_INSTANCE_DESC = Struct("D3D11_CLASS_INSTANCE_DESC", [
(UINT, "InstanceId"),
(UINT, "InstanceIndex"),
(UINT, "TypeId"),
(UINT, "ConstantBuffer"),
(UINT, "BaseConstantBufferOffset"),
(UINT, "BaseTexture"),
(UINT, "BaseSampler"),
(BOOL, "Created"),
])
ID3D11ClassInstance.methods += [
StdMethod(Void, "GetClassLinkage", [Out(Pointer(ObjPointer(ID3D11ClassLinkage)), "ppLinkage")]),
StdMethod(Void, "GetDesc", [Out(Pointer(D3D11_CLASS_INSTANCE_DESC), "pDesc")]),
StdMethod(Void, "GetInstanceName", [Out(LPSTR, "pInstanceName"), Out(Pointer(SIZE_T), "pBufferLength")]),
StdMethod(Void, "GetTypeName", [Out(LPSTR, "pTypeName"), Out(Pointer(SIZE_T), "pBufferLength")]),
]
ID3D11ClassLinkage.methods += [
StdMethod(HRESULT, "GetClassInstance", [(LPCSTR, "pClassInstanceName"), (UINT, "InstanceIndex"), Out(Pointer(ObjPointer(ID3D11ClassInstance)), "ppInstance")]),
StdMethod(HRESULT, "CreateClassInstance", [(LPCSTR, "pClassTypeName"), (UINT, "ConstantBufferOffset"), (UINT, "ConstantVectorOffset"), (UINT, "TextureOffset"), (UINT, "SamplerOffset"), Out(Pointer(ObjPointer(ID3D11ClassInstance)), "ppInstance")]),
]
ID3D11CommandList.methods += [
StdMethod(UINT, "GetContextFlags", []),
]
D3D11_FEATURE_DATA_THREADING = Struct("D3D11_FEATURE_DATA_THREADING", [
(BOOL, "DriverConcurrentCreates"),
(BOOL, "DriverCommandLists"),
])
D3D11_FEATURE_DATA_DOUBLES = Struct("D3D11_FEATURE_DATA_DOUBLES", [
(BOOL, "DoublePrecisionFloatShaderOps"),
])
D3D11_FEATURE_DATA_FORMAT_SUPPORT = Struct("D3D11_FEATURE_DATA_FORMAT_SUPPORT", [
(DXGI_FORMAT, "InFormat"),
(D3D11_FORMAT_SUPPORT, "OutFormatSupport"),
])
D3D11_FEATURE_DATA_FORMAT_SUPPORT2 = Struct("D3D11_FEATURE_DATA_FORMAT_SUPPORT2", [
(DXGI_FORMAT, "InFormat"),
(D3D11_FORMAT_SUPPORT2, "OutFormatSupport2"),
])
D3D11_FEATURE_DATA_D3D10_X_HARDWARE_OPTIONS = Struct("D3D11_FEATURE_DATA_D3D10_X_HARDWARE_OPTIONS", [
(BOOL, "ComputeShaders_Plus_RawAndStructuredBuffers_Via_Shader_4_x"),
])
D3D11_FEATURE, D3D11_FEATURE_DATA = EnumPolymorphic("D3D11_FEATURE", "Feature", [
("D3D11_FEATURE_THREADING", Pointer(D3D11_FEATURE_DATA_THREADING)),
("D3D11_FEATURE_DOUBLES", Pointer(D3D11_FEATURE_DATA_DOUBLES)),
("D3D11_FEATURE_FORMAT_SUPPORT", Pointer(D3D11_FEATURE_DATA_FORMAT_SUPPORT)),
("D3D11_FEATURE_FORMAT_SUPPORT2", Pointer(D3D11_FEATURE_DATA_FORMAT_SUPPORT2)),
("D3D11_FEATURE_D3D10_X_HARDWARE_OPTIONS", Pointer(D3D11_FEATURE_DATA_D3D10_X_HARDWARE_OPTIONS)),
], Blob(Void, "FeatureSupportDataSize"), False)
ID3D11DeviceContext.methods += [
StdMethod(Void, "VSSetConstantBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(Const(ObjPointer(ID3D11Buffer)), "NumBuffers"), "ppConstantBuffers")]),
StdMethod(Void, "PSSetShaderResources", [(UINT, "StartSlot"), (UINT, "NumViews"), (Array(Const(ObjPointer(ID3D11ShaderResourceView)), "NumViews"), "ppShaderResourceViews")]),
StdMethod(Void, "PSSetShader", [(ObjPointer(ID3D11PixelShader), "pPixelShader"), (Array(Const(ObjPointer(ID3D11ClassInstance)), "NumClassInstances"), "ppClassInstances"), (UINT, "NumClassInstances")]),
StdMethod(Void, "PSSetSamplers", [(UINT, "StartSlot"), (UINT, "NumSamplers"), (Array(Const(ObjPointer(ID3D11SamplerState)), "NumSamplers"), "ppSamplers")]),
StdMethod(Void, "VSSetShader", [(ObjPointer(ID3D11VertexShader), "pVertexShader"), (Array(Const(ObjPointer(ID3D11ClassInstance)), "NumClassInstances"), "ppClassInstances"), (UINT, "NumClassInstances")]),
StdMethod(Void, "DrawIndexed", [(UINT, "IndexCount"), (UINT, "StartIndexLocation"), (INT, "BaseVertexLocation")]),
StdMethod(Void, "Draw", [(UINT, "VertexCount"), (UINT, "StartVertexLocation")]),
StdMethod(HRESULT, "Map", [(ObjPointer(ID3D11Resource), "pResource"), (UINT, "Subresource"), (D3D11_MAP, "MapType"), (D3D11_MAP_FLAG, "MapFlags"), Out(Pointer(D3D11_MAPPED_SUBRESOURCE), "pMappedResource")]),
StdMethod(Void, "Unmap", [(ObjPointer(ID3D11Resource), "pResource"), (UINT, "Subresource")]),
StdMethod(Void, "PSSetConstantBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(Const(ObjPointer(ID3D11Buffer)), "NumBuffers"), "ppConstantBuffers")]),
StdMethod(Void, "IASetInputLayout", [(ObjPointer(ID3D11InputLayout), "pInputLayout")]),
StdMethod(Void, "IASetVertexBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(Const(ObjPointer(ID3D11Buffer)), "NumBuffers"), "ppVertexBuffers"), (Pointer(Const(UINT)), "pStrides"), (Pointer(Const(UINT)), "pOffsets")]),
StdMethod(Void, "IASetIndexBuffer", [(ObjPointer(ID3D11Buffer), "pIndexBuffer"), (DXGI_FORMAT, "Format"), (UINT, "Offset")]),
StdMethod(Void, "DrawIndexedInstanced", [(UINT, "IndexCountPerInstance"), (UINT, "InstanceCount"), (UINT, "StartIndexLocation"), (INT, "BaseVertexLocation"), (UINT, "StartInstanceLocation")]),
StdMethod(Void, "DrawInstanced", [(UINT, "VertexCountPerInstance"), (UINT, "InstanceCount"), (UINT, "StartVertexLocation"), (UINT, "StartInstanceLocation")]),
StdMethod(Void, "GSSetConstantBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(Const(ObjPointer(ID3D11Buffer)), "NumBuffers"), "ppConstantBuffers")]),
StdMethod(Void, "GSSetShader", [(ObjPointer(ID3D11GeometryShader), "pShader"), (Array(Const(ObjPointer(ID3D11ClassInstance)), "NumClassInstances"), "ppClassInstances"), (UINT, "NumClassInstances")]),
StdMethod(Void, "IASetPrimitiveTopology", [(D3D11_PRIMITIVE_TOPOLOGY, "Topology")]),
StdMethod(Void, "VSSetShaderResources", [(UINT, "StartSlot"), (UINT, "NumViews"), (Array(Const(ObjPointer(ID3D11ShaderResourceView)), "NumViews"), "ppShaderResourceViews")]),
StdMethod(Void, "VSSetSamplers", [(UINT, "StartSlot"), (UINT, "NumSamplers"), (Array(Const(ObjPointer(ID3D11SamplerState)), "NumSamplers"), "ppSamplers")]),
StdMethod(Void, "Begin", [(ObjPointer(ID3D11Asynchronous), "pAsync")]),
StdMethod(Void, "End", [(ObjPointer(ID3D11Asynchronous), "pAsync")]),
StdMethod(HRESULT, "GetData", [(ObjPointer(ID3D11Asynchronous), "pAsync"), Out(OpaqueBlob(Void, "DataSize"), "pData"), (UINT, "DataSize"), (D3D11_ASYNC_GETDATA_FLAG, "GetDataFlags")]),
StdMethod(Void, "SetPredication", [(ObjPointer(ID3D11Predicate), "pPredicate"), (BOOL, "PredicateValue")]),
StdMethod(Void, "GSSetShaderResources", [(UINT, "StartSlot"), (UINT, "NumViews"), (Array(Const(ObjPointer(ID3D11ShaderResourceView)), "NumViews"), "ppShaderResourceViews")]),
StdMethod(Void, "GSSetSamplers", [(UINT, "StartSlot"), (UINT, "NumSamplers"), (Array(Const(ObjPointer(ID3D11SamplerState)), "NumSamplers"), "ppSamplers")]),
StdMethod(Void, "OMSetRenderTargets", [(UINT, "NumViews"), (Array(Const(ObjPointer(ID3D11RenderTargetView)), "NumViews"), "ppRenderTargetViews"), (ObjPointer(ID3D11DepthStencilView), "pDepthStencilView")]),
StdMethod(Void, "OMSetRenderTargetsAndUnorderedAccessViews", [(UINT, "NumRTVs"), (Array(Const(ObjPointer(ID3D11RenderTargetView)), "NumRTVs"), "ppRenderTargetViews"), (ObjPointer(ID3D11DepthStencilView), "pDepthStencilView"), (UINT, "UAVStartSlot"), (UINT, "NumUAVs"), (Array(Const(ObjPointer(ID3D11UnorderedAccessView)), "NumUAVs"), "ppUnorderedAccessViews"), (Pointer(Const(UINT)), "pUAVInitialCounts")]),
StdMethod(Void, "OMSetBlendState", [(ObjPointer(ID3D11BlendState), "pBlendState"), (Array(Const(FLOAT), 4), "BlendFactor"), (UINT, "SampleMask")]),
StdMethod(Void, "OMSetDepthStencilState", [(ObjPointer(ID3D11DepthStencilState), "pDepthStencilState"), (UINT, "StencilRef")]),
StdMethod(Void, "SOSetTargets", [(UINT, "NumBuffers"), (Array(Const(ObjPointer(ID3D11Buffer)), "NumBuffers"), "ppSOTargets"), (Pointer(Const(UINT)), "pOffsets")]),
StdMethod(Void, "DrawAuto", []),
StdMethod(Void, "DrawIndexedInstancedIndirect", [(ObjPointer(ID3D11Buffer), "pBufferForArgs"), (UINT, "AlignedByteOffsetForArgs")]),
StdMethod(Void, "DrawInstancedIndirect", [(ObjPointer(ID3D11Buffer), "pBufferForArgs"), (UINT, "AlignedByteOffsetForArgs")]),
StdMethod(Void, "Dispatch", [(UINT, "ThreadGroupCountX"), (UINT, "ThreadGroupCountY"), (UINT, "ThreadGroupCountZ")]),
StdMethod(Void, "DispatchIndirect", [(ObjPointer(ID3D11Buffer), "pBufferForArgs"), (UINT, "AlignedByteOffsetForArgs")]),
StdMethod(Void, "RSSetState", [(ObjPointer(ID3D11RasterizerState), "pRasterizerState")]),
StdMethod(Void, "RSSetViewports", [(UINT, "NumViewports"), (Array(Const(D3D11_VIEWPORT), "NumViewports"), "pViewports")]),
StdMethod(Void, "RSSetScissorRects", [(UINT, "NumRects"), (Array(Const(D3D11_RECT), "NumRects"), "pRects")]),
StdMethod(Void, "CopySubresourceRegion", [(ObjPointer(ID3D11Resource), "pDstResource"), (UINT, "DstSubresource"), (UINT, "DstX"), (UINT, "DstY"), (UINT, "DstZ"), (ObjPointer(ID3D11Resource), "pSrcResource"), (UINT, "SrcSubresource"), (Pointer(Const(D3D11_BOX)), "pSrcBox")]),
StdMethod(Void, "CopyResource", [(ObjPointer(ID3D11Resource), "pDstResource"), (ObjPointer(ID3D11Resource), "pSrcResource")]),
StdMethod(Void, "UpdateSubresource", [(ObjPointer(ID3D11Resource), "pDstResource"), (UINT, "DstSubresource"), (Pointer(Const(D3D11_BOX)), "pDstBox"), (OpaquePointer(Const(Void)), "pSrcData"), (UINT, "SrcRowPitch"), (UINT, "SrcDepthPitch")]),
StdMethod(Void, "CopyStructureCount", [(ObjPointer(ID3D11Buffer), "pDstBuffer"), (UINT, "DstAlignedByteOffset"), (ObjPointer(ID3D11UnorderedAccessView), "pSrcView")]),
StdMethod(Void, "ClearRenderTargetView", [(ObjPointer(ID3D11RenderTargetView), "pRenderTargetView"), (Array(Const(FLOAT), 4), "ColorRGBA")]),
StdMethod(Void, "ClearUnorderedAccessViewUint", [(ObjPointer(ID3D11UnorderedAccessView), "pUnorderedAccessView"), (Array(Const(UINT), 4), "Values")]),
StdMethod(Void, "ClearUnorderedAccessViewFloat", [(ObjPointer(ID3D11UnorderedAccessView), "pUnorderedAccessView"), (Array(Const(FLOAT), 4), "Values")]),
StdMethod(Void, "ClearDepthStencilView", [(ObjPointer(ID3D11DepthStencilView), "pDepthStencilView"), (D3D11_CLEAR_FLAG, "ClearFlags"), (FLOAT, "Depth"), (UINT8, "Stencil")]),
StdMethod(Void, "GenerateMips", [(ObjPointer(ID3D11ShaderResourceView), "pShaderResourceView")]),
StdMethod(Void, "SetResourceMinLOD", [(ObjPointer(ID3D11Resource), "pResource"), (FLOAT, "MinLOD")]),
StdMethod(FLOAT, "GetResourceMinLOD", [(ObjPointer(ID3D11Resource), "pResource")]),
StdMethod(Void, "ResolveSubresource", [(ObjPointer(ID3D11Resource), "pDstResource"), (UINT, "DstSubresource"), (ObjPointer(ID3D11Resource), "pSrcResource"), (UINT, "SrcSubresource"), (DXGI_FORMAT, "Format")]),
StdMethod(Void, "ExecuteCommandList", [(ObjPointer(ID3D11CommandList), "pCommandList"), (BOOL, "RestoreContextState")]),
StdMethod(Void, "HSSetShaderResources", [(UINT, "StartSlot"), (UINT, "NumViews"), (Array(Const(ObjPointer(ID3D11ShaderResourceView)), "NumViews"), "ppShaderResourceViews")]),
StdMethod(Void, "HSSetShader", [(ObjPointer(ID3D11HullShader), "pHullShader"), (Array(Const(ObjPointer(ID3D11ClassInstance)), "NumClassInstances"), "ppClassInstances"), (UINT, "NumClassInstances")]),
StdMethod(Void, "HSSetSamplers", [(UINT, "StartSlot"), (UINT, "NumSamplers"), (Array(Const(ObjPointer(ID3D11SamplerState)), "NumSamplers"), "ppSamplers")]),
StdMethod(Void, "HSSetConstantBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(Const(ObjPointer(ID3D11Buffer)), "NumBuffers"), "ppConstantBuffers")]),
StdMethod(Void, "DSSetShaderResources", [(UINT, "StartSlot"), (UINT, "NumViews"), (Array(Const(ObjPointer(ID3D11ShaderResourceView)), "NumViews"), "ppShaderResourceViews")]),
StdMethod(Void, "DSSetShader", [(ObjPointer(ID3D11DomainShader), "pDomainShader"), (Array(Const(ObjPointer(ID3D11ClassInstance)), "NumClassInstances"), "ppClassInstances"), (UINT, "NumClassInstances")]),
StdMethod(Void, "DSSetSamplers", [(UINT, "StartSlot"), (UINT, "NumSamplers"), (Array(Const(ObjPointer(ID3D11SamplerState)), "NumSamplers"), "ppSamplers")]),
StdMethod(Void, "DSSetConstantBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(Const(ObjPointer(ID3D11Buffer)), "NumBuffers"), "ppConstantBuffers")]),
StdMethod(Void, "CSSetShaderResources", [(UINT, "StartSlot"), (UINT, "NumViews"), (Array(Const(ObjPointer(ID3D11ShaderResourceView)), "NumViews"), "ppShaderResourceViews")]),
StdMethod(Void, "CSSetUnorderedAccessViews", [(UINT, "StartSlot"), (UINT, "NumUAVs"), (Array(Const(ObjPointer(ID3D11UnorderedAccessView)), "NumUAVs"), "ppUnorderedAccessViews"), (Pointer(Const(UINT)), "pUAVInitialCounts")]),
StdMethod(Void, "CSSetShader", [(ObjPointer(ID3D11ComputeShader), "pComputeShader"), (Array(Const(ObjPointer(ID3D11ClassInstance)), "NumClassInstances"), "ppClassInstances"), (UINT, "NumClassInstances")]),
StdMethod(Void, "CSSetSamplers", [(UINT, "StartSlot"), (UINT, "NumSamplers"), (Array(Const(ObjPointer(ID3D11SamplerState)), "NumSamplers"), "ppSamplers")]),
StdMethod(Void, "CSSetConstantBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(Const(ObjPointer(ID3D11Buffer)), "NumBuffers"), "ppConstantBuffers")]),
StdMethod(Void, "VSGetConstantBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(ObjPointer(ID3D11Buffer), "NumBuffers"), "ppConstantBuffers")]),
StdMethod(Void, "PSGetShaderResources", [(UINT, "StartSlot"), (UINT, "NumViews"), (Array(ObjPointer(ID3D11ShaderResourceView), "NumViews"), "ppShaderResourceViews")]),
StdMethod(Void, "PSGetShader", [Out(Pointer(ObjPointer(ID3D11PixelShader)), "ppPixelShader"), Out(Array(ObjPointer(ID3D11ClassInstance), "*pNumClassInstances"), "ppClassInstances"), Out(Pointer(UINT), "pNumClassInstances")]),
StdMethod(Void, "PSGetSamplers", [(UINT, "StartSlot"), (UINT, "NumSamplers"), (Array(ObjPointer(ID3D11SamplerState), "NumSamplers"), "ppSamplers")]),
StdMethod(Void, "VSGetShader", [Out(Pointer(ObjPointer(ID3D11VertexShader)), "ppVertexShader"), Out(Array(ObjPointer(ID3D11ClassInstance), "*pNumClassInstances"), "ppClassInstances"), Out(Pointer(UINT), "pNumClassInstances")]),
StdMethod(Void, "PSGetConstantBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(ObjPointer(ID3D11Buffer), "NumBuffers"), "ppConstantBuffers")]),
StdMethod(Void, "IAGetInputLayout", [Out(Pointer(ObjPointer(ID3D11InputLayout)), "ppInputLayout")]),
StdMethod(Void, "IAGetVertexBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(ObjPointer(ID3D11Buffer), "NumBuffers"), "ppVertexBuffers"), Out(Pointer(UINT), "pStrides"), Out(Pointer(UINT), "pOffsets")]),
StdMethod(Void, "IAGetIndexBuffer", [Out(Pointer(ObjPointer(ID3D11Buffer)), "pIndexBuffer"), Out(Pointer(DXGI_FORMAT), "Format"), Out(Pointer(UINT), "Offset")]),
StdMethod(Void, "GSGetConstantBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(ObjPointer(ID3D11Buffer), "NumBuffers"), "ppConstantBuffers")]),
StdMethod(Void, "GSGetShader", [Out(Pointer(ObjPointer(ID3D11GeometryShader)), "ppGeometryShader"), Out(Array(ObjPointer(ID3D11ClassInstance), "*pNumClassInstances"), "ppClassInstances"), Out(Pointer(UINT), "pNumClassInstances")]),
StdMethod(Void, "IAGetPrimitiveTopology", [Out(Pointer(D3D11_PRIMITIVE_TOPOLOGY), "pTopology")]),
StdMethod(Void, "VSGetShaderResources", [(UINT, "StartSlot"), (UINT, "NumViews"), (Array(ObjPointer(ID3D11ShaderResourceView), "NumViews"), "ppShaderResourceViews")]),
StdMethod(Void, "VSGetSamplers", [(UINT, "StartSlot"), (UINT, "NumSamplers"), (Array(ObjPointer(ID3D11SamplerState), "NumSamplers"), "ppSamplers")]),
StdMethod(Void, "GetPredication", [Out(Pointer(ObjPointer(ID3D11Predicate)), "ppPredicate"), Out(Pointer(BOOL), "pPredicateValue")]),
StdMethod(Void, "GSGetShaderResources", [(UINT, "StartSlot"), (UINT, "NumViews"), (Array(ObjPointer(ID3D11ShaderResourceView), "NumViews"), "ppShaderResourceViews")]),
StdMethod(Void, "GSGetSamplers", [(UINT, "StartSlot"), (UINT, "NumSamplers"), (Array(ObjPointer(ID3D11SamplerState), "NumSamplers"), "ppSamplers")]),
StdMethod(Void, "OMGetRenderTargets", [(UINT, "NumViews"), (Array(ObjPointer(ID3D11RenderTargetView), "NumViews"), "ppRenderTargetViews"), Out(Pointer(ObjPointer(ID3D11DepthStencilView)), "ppDepthStencilView")]),
StdMethod(Void, "OMGetRenderTargetsAndUnorderedAccessViews", [(UINT, "NumRTVs"), (Array(ObjPointer(ID3D11RenderTargetView), "NumRTVs"), "ppRenderTargetViews"), Out(Pointer(ObjPointer(ID3D11DepthStencilView)), "ppDepthStencilView"), (UINT, "UAVStartSlot"), (UINT, "NumUAVs"), (Array(ObjPointer(ID3D11UnorderedAccessView), "NumUAVs"), "ppUnorderedAccessViews")]),
StdMethod(Void, "OMGetBlendState", [Out(Pointer(ObjPointer(ID3D11BlendState)), "ppBlendState"), Out(Array(FLOAT, 4), "BlendFactor"), Out(Pointer(UINT), "pSampleMask")]),
StdMethod(Void, "OMGetDepthStencilState", [Out(Pointer(ObjPointer(ID3D11DepthStencilState)), "ppDepthStencilState"), Out(Pointer(UINT), "pStencilRef")]),
StdMethod(Void, "SOGetTargets", [(UINT, "NumBuffers"), (Array(ObjPointer(ID3D11Buffer), "NumBuffers"), "ppSOTargets")]),
StdMethod(Void, "RSGetState", [Out(Pointer(ObjPointer(ID3D11RasterizerState)), "ppRasterizerState")]),
StdMethod(Void, "RSGetViewports", [Out(Pointer(UINT), "pNumViewports"), Out(Array(D3D11_VIEWPORT, "*pNumViewports"), "pViewports")]),
StdMethod(Void, "RSGetScissorRects", [Out(Pointer(UINT), "pNumRects"), Out(Array(D3D11_RECT, "*pNumRects"), "pRects")]),
StdMethod(Void, "HSGetShaderResources", [(UINT, "StartSlot"), (UINT, "NumViews"), (Array(ObjPointer(ID3D11ShaderResourceView), "NumViews"), "ppShaderResourceViews")]),
StdMethod(Void, "HSGetShader", [Out(Pointer(ObjPointer(ID3D11HullShader)), "ppHullShader"), Out(Array(ObjPointer(ID3D11ClassInstance), "*pNumClassInstances"), "ppClassInstances"), Out(Pointer(UINT), "pNumClassInstances")]),
StdMethod(Void, "HSGetSamplers", [(UINT, "StartSlot"), (UINT, "NumSamplers"), (Array(ObjPointer(ID3D11SamplerState), "NumSamplers"), "ppSamplers")]),
StdMethod(Void, "HSGetConstantBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(ObjPointer(ID3D11Buffer), "NumBuffers"), "ppConstantBuffers")]),
StdMethod(Void, "DSGetShaderResources", [(UINT, "StartSlot"), (UINT, "NumViews"), (Array(ObjPointer(ID3D11ShaderResourceView), "NumViews"), "ppShaderResourceViews")]),
StdMethod(Void, "DSGetShader", [Out(Pointer(ObjPointer(ID3D11DomainShader)), "ppDomainShader"), Out(Array(ObjPointer(ID3D11ClassInstance), "*pNumClassInstances"), "ppClassInstances"), Out(Pointer(UINT), "pNumClassInstances")]),
StdMethod(Void, "DSGetSamplers", [(UINT, "StartSlot"), (UINT, "NumSamplers"), (Array(ObjPointer(ID3D11SamplerState), "NumSamplers"), "ppSamplers")]),
StdMethod(Void, "DSGetConstantBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(ObjPointer(ID3D11Buffer), "NumBuffers"), "ppConstantBuffers")]),
StdMethod(Void, "CSGetShaderResources", [(UINT, "StartSlot"), (UINT, "NumViews"), (Array(ObjPointer(ID3D11ShaderResourceView), "NumViews"), "ppShaderResourceViews")]),
StdMethod(Void, "CSGetUnorderedAccessViews", [(UINT, "StartSlot"), (UINT, "NumUAVs"), (Array(ObjPointer(ID3D11UnorderedAccessView), "NumUAVs"), "ppUnorderedAccessViews")]),
StdMethod(Void, "CSGetShader", [Out(Pointer(ObjPointer(ID3D11ComputeShader)), "ppComputeShader"), Out(Array(ObjPointer(ID3D11ClassInstance), "*pNumClassInstances"), "ppClassInstances"), Out(Pointer(UINT), "pNumClassInstances")]),
StdMethod(Void, "CSGetSamplers", [(UINT, "StartSlot"), (UINT, "NumSamplers"), (Array(ObjPointer(ID3D11SamplerState), "NumSamplers"), "ppSamplers")]),
StdMethod(Void, "CSGetConstantBuffers", [(UINT, "StartSlot"), (UINT, "NumBuffers"), (Array(ObjPointer(ID3D11Buffer), "NumBuffers"), "ppConstantBuffers")]),
StdMethod(Void, "ClearState", []),
StdMethod(Void, "Flush", []),
StdMethod(D3D11_DEVICE_CONTEXT_TYPE, "GetType", []),
StdMethod(UINT, "GetContextFlags", []),
StdMethod(HRESULT, "FinishCommandList", [(BOOL, "RestoreDeferredContextState"), Out(Pointer(ObjPointer(ID3D11CommandList)), "ppCommandList")]),
]
D3D11_CREATE_DEVICE_FLAG = Flags(UINT, [
"D3D11_CREATE_DEVICE_SINGLETHREADED",
"D3D11_CREATE_DEVICE_DEBUG",
"D3D11_CREATE_DEVICE_SWITCH_TO_REF",
"D3D11_CREATE_DEVICE_PREVENT_INTERNAL_THREADING_OPTIMIZATIONS",
"D3D11_CREATE_DEVICE_BGRA_SUPPORT",
])
ID3D11Device.methods += [
StdMethod(HRESULT, "CreateBuffer", [(Pointer(Const(D3D11_BUFFER_DESC)), "pDesc"), (Pointer(Const(D3D11_SUBRESOURCE_DATA)), "pInitialData"), Out(Pointer(ObjPointer(ID3D11Buffer)), "ppBuffer")]),
StdMethod(HRESULT, "CreateTexture1D", [(Pointer(Const(D3D11_TEXTURE1D_DESC)), "pDesc"), (Pointer(Const(D3D11_SUBRESOURCE_DATA)), "pInitialData"), Out(Pointer(ObjPointer(ID3D11Texture1D)), "ppTexture1D")]),
StdMethod(HRESULT, "CreateTexture2D", [(Pointer(Const(D3D11_TEXTURE2D_DESC)), "pDesc"), (Pointer(Const(D3D11_SUBRESOURCE_DATA)), "pInitialData"), Out(Pointer(ObjPointer(ID3D11Texture2D)), "ppTexture2D")]),
StdMethod(HRESULT, "CreateTexture3D", [(Pointer(Const(D3D11_TEXTURE3D_DESC)), "pDesc"), (Pointer(Const(D3D11_SUBRESOURCE_DATA)), "pInitialData"), Out(Pointer(ObjPointer(ID3D11Texture3D)), "ppTexture3D")]),
StdMethod(HRESULT, "CreateShaderResourceView", [(ObjPointer(ID3D11Resource), "pResource"), (Pointer(Const(D3D11_SHADER_RESOURCE_VIEW_DESC)), "pDesc"), Out(Pointer(ObjPointer(ID3D11ShaderResourceView)), "ppSRView")]),
StdMethod(HRESULT, "CreateUnorderedAccessView", [(ObjPointer(ID3D11Resource), "pResource"), (Pointer(Const(D3D11_UNORDERED_ACCESS_VIEW_DESC)), "pDesc"), Out(Pointer(ObjPointer(ID3D11UnorderedAccessView)), "ppUAView")]),
StdMethod(HRESULT, "CreateRenderTargetView", [(ObjPointer(ID3D11Resource), "pResource"), (Pointer(Const(D3D11_RENDER_TARGET_VIEW_DESC)), "pDesc"), Out(Pointer(ObjPointer(ID3D11RenderTargetView)), "ppRTView")]),
StdMethod(HRESULT, "CreateDepthStencilView", [(ObjPointer(ID3D11Resource), "pResource"), (Pointer(Const(D3D11_DEPTH_STENCIL_VIEW_DESC)), "pDesc"), Out(Pointer(ObjPointer(ID3D11DepthStencilView)), "ppDepthStencilView")]),
StdMethod(HRESULT, "CreateInputLayout", [(Array(Const(D3D11_INPUT_ELEMENT_DESC), "NumElements"), "pInputElementDescs"), (UINT, "NumElements"), (Blob(Const(Void), "BytecodeLength"), "pShaderBytecodeWithInputSignature"), (SIZE_T, "BytecodeLength"), Out(Pointer(ObjPointer(ID3D11InputLayout)), "ppInputLayout")]),
StdMethod(HRESULT, "CreateVertexShader", [(Blob(Const(Void), "BytecodeLength"), "pShaderBytecode"), (SIZE_T, "BytecodeLength"), (ObjPointer(ID3D11ClassLinkage), "pClassLinkage"), Out(Pointer(ObjPointer(ID3D11VertexShader)), "ppVertexShader")]),
StdMethod(HRESULT, "CreateGeometryShader", [(Blob(Const(Void), "BytecodeLength"), "pShaderBytecode"), (SIZE_T, "BytecodeLength"), (ObjPointer(ID3D11ClassLinkage), "pClassLinkage"), Out(Pointer(ObjPointer(ID3D11GeometryShader)), "ppGeometryShader")]),
StdMethod(HRESULT, "CreateGeometryShaderWithStreamOutput", [(Blob(Const(Void), "BytecodeLength"), "pShaderBytecode"), (SIZE_T, "BytecodeLength"), (Array(Const(D3D11_SO_DECLARATION_ENTRY), "NumEntries"), "pSODeclaration"), (UINT, "NumEntries"), (Array(Const(UINT), "NumStrides"), "pBufferStrides"), (UINT, "NumStrides"), (UINT, "RasterizedStream"), (ObjPointer(ID3D11ClassLinkage), "pClassLinkage"), Out(Pointer(ObjPointer(ID3D11GeometryShader)), "ppGeometryShader")]),
StdMethod(HRESULT, "CreatePixelShader", [(Blob(Const(Void), "BytecodeLength"), "pShaderBytecode"), (SIZE_T, "BytecodeLength"), (ObjPointer(ID3D11ClassLinkage), "pClassLinkage"), Out(Pointer(ObjPointer(ID3D11PixelShader)), "ppPixelShader")]),
StdMethod(HRESULT, "CreateHullShader", [(Blob(Const(Void), "BytecodeLength"), "pShaderBytecode"), (SIZE_T, "BytecodeLength"), (ObjPointer(ID3D11ClassLinkage), "pClassLinkage"), Out(Pointer(ObjPointer(ID3D11HullShader)), "ppHullShader")]),
StdMethod(HRESULT, "CreateDomainShader", [(Blob(Const(Void), "BytecodeLength"), "pShaderBytecode"), (SIZE_T, "BytecodeLength"), (ObjPointer(ID3D11ClassLinkage), "pClassLinkage"), Out(Pointer(ObjPointer(ID3D11DomainShader)), "ppDomainShader")]),
StdMethod(HRESULT, "CreateComputeShader", [(Blob(Const(Void), "BytecodeLength"), "pShaderBytecode"), (SIZE_T, "BytecodeLength"), (ObjPointer(ID3D11ClassLinkage), "pClassLinkage"), Out(Pointer(ObjPointer(ID3D11ComputeShader)), "ppComputeShader")]),
StdMethod(HRESULT, "CreateClassLinkage", [Out(Pointer(ObjPointer(ID3D11ClassLinkage)), "ppLinkage")]),
StdMethod(HRESULT, "CreateBlendState", [(Pointer(Const(D3D11_BLEND_DESC)), "pBlendStateDesc"), Out(Pointer(ObjPointer(ID3D11BlendState)), "ppBlendState")]),
StdMethod(HRESULT, "CreateDepthStencilState", [(Pointer(Const(D3D11_DEPTH_STENCIL_DESC)), "pDepthStencilDesc"), Out(Pointer(ObjPointer(ID3D11DepthStencilState)), "ppDepthStencilState")]),
StdMethod(HRESULT, "CreateRasterizerState", [(Pointer(Const(D3D11_RASTERIZER_DESC)), "pRasterizerDesc"), Out(Pointer(ObjPointer(ID3D11RasterizerState)), "ppRasterizerState")]),
StdMethod(HRESULT, "CreateSamplerState", [(Pointer(Const(D3D11_SAMPLER_DESC)), "pSamplerDesc"), Out(Pointer(ObjPointer(ID3D11SamplerState)), "ppSamplerState")]),
StdMethod(HRESULT, "CreateQuery", [(Pointer(Const(D3D11_QUERY_DESC)), "pQueryDesc"), Out(Pointer(ObjPointer(ID3D11Query)), "ppQuery")]),
StdMethod(HRESULT, "CreatePredicate", [(Pointer(Const(D3D11_QUERY_DESC)), "pPredicateDesc"), Out(Pointer(ObjPointer(ID3D11Predicate)), "ppPredicate")]),
StdMethod(HRESULT, "CreateCounter", [(Pointer(Const(D3D11_COUNTER_DESC)), "pCounterDesc"), Out(Pointer(ObjPointer(ID3D11Counter)), "ppCounter")]),
StdMethod(HRESULT, "CreateDeferredContext", [(UINT, "ContextFlags"), Out(Pointer(ObjPointer(ID3D11DeviceContext)), "ppDeferredContext")]),
StdMethod(HRESULT, "OpenSharedResource", [(HANDLE, "hResource"), (REFIID, "ReturnedInterface"), Out(Pointer(ObjPointer(Void)), "ppResource")]),
StdMethod(HRESULT, "CheckFormatSupport", [(DXGI_FORMAT, "Format"), Out(Pointer(D3D11_FORMAT_SUPPORT), "pFormatSupport")]),
StdMethod(HRESULT, "CheckMultisampleQualityLevels", [(DXGI_FORMAT, "Format"), (UINT, "SampleCount"), Out(Pointer(UINT), "pNumQualityLevels")]),
StdMethod(Void, "CheckCounterInfo", [Out(Pointer(D3D11_COUNTER_INFO), "pCounterInfo")]),
StdMethod(HRESULT, "CheckCounter", [(Pointer(Const(D3D11_COUNTER_DESC)), "pDesc"), Out(Pointer(D3D11_COUNTER_TYPE), "pType"), Out(Pointer(UINT), "pActiveCounters"), Out(LPSTR, "szName"), Out(Pointer(UINT), "pNameLength"), Out(LPSTR, "szUnits"), Out(Pointer(UINT), "pUnitsLength"), Out(LPSTR, "szDescription"), Out(Pointer(UINT), "pDescriptionLength")]),
StdMethod(HRESULT, "CheckFeatureSupport", [(D3D11_FEATURE, "Feature"), Out(D3D11_FEATURE_DATA, "pFeatureSupportData"), (UINT, "FeatureSupportDataSize")]),
StdMethod(HRESULT, "GetPrivateData", [(REFGUID, "guid"), Out(Pointer(UINT), "pDataSize"), Out(OpaquePointer(Void), "pData")]),
StdMethod(HRESULT, "SetPrivateData", [(REFGUID, "guid"), (UINT, "DataSize"), (OpaqueBlob(Const(Void), "DataSize"), "pData")]),
StdMethod(HRESULT, "SetPrivateDataInterface", [(REFGUID, "guid"), (OpaquePointer(Const(IUnknown)), "pData")]),
StdMethod(D3D_FEATURE_LEVEL, "GetFeatureLevel", []),
StdMethod(D3D11_CREATE_DEVICE_FLAG, "GetCreationFlags", []),
StdMethod(HRESULT, "GetDeviceRemovedReason", []),
StdMethod(Void, "GetImmediateContext", [Out(Pointer(ObjPointer(ID3D11DeviceContext)), "ppImmediateContext")]),
StdMethod(HRESULT, "SetExceptionMode", [(D3D11_RAISE_FLAG, "RaiseFlags")]),
StdMethod(UINT, "GetExceptionMode", []),
]
d3d11 = API("d3d11")
d3d11.addFunctions([
StdFunction(HRESULT, "D3D11CreateDevice", [(ObjPointer(IDXGIAdapter), "pAdapter"), (D3D_DRIVER_TYPE, "DriverType"), (HMODULE, "Software"), (D3D11_CREATE_DEVICE_FLAG, "Flags"), (Array(Const(D3D_FEATURE_LEVEL), "FeatureLevels"), "pFeatureLevels"), (UINT, "FeatureLevels"), (UINT, "SDKVersion"), Out(Pointer(ObjPointer(ID3D11Device)), "ppDevice"), Out(Pointer(D3D_FEATURE_LEVEL), "pFeatureLevel"), Out(Pointer(ObjPointer(ID3D11DeviceContext)), "ppImmediateContext")]),
StdFunction(HRESULT, "D3D11CreateDeviceAndSwapChain", [(ObjPointer(IDXGIAdapter), "pAdapter"), (D3D_DRIVER_TYPE, "DriverType"), (HMODULE, "Software"), (D3D11_CREATE_DEVICE_FLAG, "Flags"), (Array(Const(D3D_FEATURE_LEVEL), "FeatureLevels"), "pFeatureLevels"), (UINT, "FeatureLevels"), (UINT, "SDKVersion"), (Pointer(Const(DXGI_SWAP_CHAIN_DESC)), "pSwapChainDesc"), Out(Pointer(ObjPointer(IDXGISwapChain)), "ppSwapChain"), Out(Pointer(ObjPointer(ID3D11Device)), "ppDevice"), Out(Pointer(D3D_FEATURE_LEVEL), "pFeatureLevel"), Out(Pointer(ObjPointer(ID3D11DeviceContext)), "ppImmediateContext")]),
# XXX: Undocumented functions, called by d3d11sdklayers.dll when D3D11_CREATE_DEVICE_DEBUG is set
StdFunction(HRESULT, "D3D11CoreRegisterLayers", [LPCVOID, DWORD], internal=True),
StdFunction(SIZE_T, "D3D11CoreGetLayeredDeviceSize", [LPCVOID, DWORD], internal=True),
StdFunction(HRESULT, "D3D11CoreCreateLayeredDevice", [LPCVOID, DWORD, LPCVOID, (REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppvObj")], internal=True),
StdFunction(HRESULT, "D3D11CoreCreateDevice", [DWORD, DWORD, DWORD, DWORD, DWORD, DWORD, DWORD, DWORD, DWORD], internal=True),
])
d3d11.addInterfaces([
IDXGIAdapter1,
IDXGIDevice1,
IDXGIResource,
ID3D11Debug,
ID3D11InfoQueue,
ID3D11SwitchToRef,
])
| 50.016168 | 596 | 0.739227 |
8a682a5b6be55bf5cb429b4f53cde390f56c0458 | 1,244 | py | Python | day08.py | Pil0u/adventofcode2020 | 97a6c291fc1653bcb1ea7abd7f38e71e2c0458f8 | [
"MIT"
] | null | null | null | day08.py | Pil0u/adventofcode2020 | 97a6c291fc1653bcb1ea7abd7f38e71e2c0458f8 | [
"MIT"
] | null | null | null | day08.py | Pil0u/adventofcode2020 | 97a6c291fc1653bcb1ea7abd7f38e71e2c0458f8 | [
"MIT"
] | null | null | null | from copy import deepcopy
| 19.4375 | 46 | 0.498392 |
8a685db25a2acacd77798f8f41ad85739a6b001d | 3,825 | py | Python | train_fcn.py | onlyNata/segModel | 7a823e096b3ed7f554a331c5fba39e24c9e0d8bf | [
"MIT"
] | 3 | 2018-07-02T06:15:36.000Z | 2019-06-10T06:26:18.000Z | train_fcn.py | onlyNata/segModel | 7a823e096b3ed7f554a331c5fba39e24c9e0d8bf | [
"MIT"
] | null | null | null | train_fcn.py | onlyNata/segModel | 7a823e096b3ed7f554a331c5fba39e24c9e0d8bf | [
"MIT"
] | 1 | 2018-10-19T08:07:59.000Z | 2018-10-19T08:07:59.000Z | # -*- coding: utf-8 -*-
"""
Created on Tue Jun 26 16:34:21 2018
@author: LiHongWang
"""
import os
import tensorflow as tf
from model import fcn_vgg
from model import fcn_mobile
from model import fcn_resnet_v2
from data import input_data
slim = tf.contrib.slim
if __name__=='__main__':
main() | 34.151786 | 99 | 0.539608 |
8a69c6a560d7f1d6a12a9bb69281971b56733693 | 1,637 | py | Python | setup.py | xbabka01/filetype.py | faba42b86988bd21a50d5b20919ecff0c6a84957 | [
"MIT"
] | null | null | null | setup.py | xbabka01/filetype.py | faba42b86988bd21a50d5b20919ecff0c6a84957 | [
"MIT"
] | null | null | null | setup.py | xbabka01/filetype.py | faba42b86988bd21a50d5b20919ecff0c6a84957 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import codecs
from setuptools import find_packages, setup
setup(
name='filetype',
version='1.0.7',
description='Infer file type and MIME type of any file/buffer. '
'No external dependencies.',
long_description=codecs.open('README.rst', 'r',
encoding='utf-8', errors='ignore').read(),
keywords='file libmagic magic infer numbers magicnumbers discovery mime '
'type kind',
url='https://github.com/h2non/filetype.py',
download_url='https://github.com/h2non/filetype.py/tarball/master',
author='Tomas Aparicio',
author_email='[email protected]',
license='MIT',
license_files=['LICENSE'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: System',
'Topic :: System :: Filesystems',
'Topic :: Utilities'],
platforms=['any'],
packages=find_packages(exclude=['dist', 'build', 'docs', 'tests',
'examples']),
package_data={'filetype': ['LICENSE', '*.md']},
zip_safe=True)
| 38.069767 | 77 | 0.588882 |
8a69d4b012c5607f3bca22996d7b21d1f2aed261 | 2,049 | py | Python | demos/netmiko_textfsm.py | ryanaa08/NPA | 45173efa60713858bb8b1d884fe12c50fe69920c | [
"BSD-Source-Code"
] | 4 | 2019-01-15T16:15:26.000Z | 2021-12-05T16:03:15.000Z | demos/netmiko_textfsm.py | krishnakadiyala/NPAcourse | 74f097107839d990b44adcee69d4f949696a332c | [
"BSD-Source-Code"
] | null | null | null | demos/netmiko_textfsm.py | krishnakadiyala/NPAcourse | 74f097107839d990b44adcee69d4f949696a332c | [
"BSD-Source-Code"
] | 2 | 2019-07-04T16:38:19.000Z | 2020-01-31T15:38:27.000Z | # make sure templates are present and netmiko knows about them
# git clone https://github.com/networktocode/ntc-templates
# export NET_TEXTFSM=/home/ntc/ntc-templates/templates/
# see https://github.com/networktocode/ntc-templates/tree/master/templates
# for list of templates
from netmiko import ConnectHandler
import json
user = 'ntc'
pwd = 'ntc123'
d_type = 'cisco_ios'
csr1 = ConnectHandler(ip='csr1', username=user, password=pwd, device_type=d_type)
sh_ip_int_br = csr1.send_command("show ip int brief", use_textfsm=True)
# [{'status': 'up', 'intf': 'GigabitEthernet1', 'ipaddr': '10.0.0.51', 'proto': 'up'}, {'status': 'up', 'intf': 'GigabitEthernet2', 'ipaddr': 'unassigned', 'proto': 'up'}, {'status': 'up', 'intf': 'GigabitEthernet3', 'ipaddr': 'unassigned', 'proto': 'up'}, {'status': 'up', 'intf': 'GigabitEthernet4', 'ipaddr': '5.12.1.1', 'proto': 'up'}, {'status': 'up', 'intf': 'Loopback100', 'ipaddr': '10.200.1.20', 'proto': 'up'}]
# is type list
print (type(sh_ip_int_br))
# list of dicts
print (type(sh_ip_int_br[0]))
for each_dict in sh_ip_int_br:
print "\n"
for key in each_dict.keys():
print key
for each_dict in sh_ip_int_br:
print "\n"
for key, value in each_dict.items():
print key + " is " + value
sh_ver_ios = csr1.send_command("show version", use_textfsm=True)
# [{'running_image': 'packages.conf', 'hostname': 'csr1', 'uptime': '6 hours, 59 minutes', 'config_register': '0x2102', 'hardware': ['CSR1000V'], 'version': '16.6.2', 'serial': ['9KIBQAQ3OPE'], 'rommon': 'IOS-XE'}]
# print the json nicely
print (json.dumps(sh_ver_ios, indent=4))
print sh_ver_ios
# list
print type(sh_ver_ios)
# each item is a dict
print type(sh_ver_ios[0])
# list of dicts with some nested lists with the dicts
for each_dict in sh_ver_ios:
print "\n"
for key, value in each_dict.items():
if type(value) is list:
print key + " is "
for list_entry in value:
print list_entry
if type(value) is str:
print key + " is " + value
| 35.947368 | 420 | 0.660322 |
8a69e368874ca389ea7a44e379f62b44b8a60c98 | 4,411 | py | Python | iap/validate_jwt.py | spitfire55/python-docs-samples | b8fe0d1c5c9f7f5d27965fa3367117af7b1f0aed | [
"Apache-2.0"
] | 4 | 2018-12-23T18:17:14.000Z | 2020-01-05T19:13:58.000Z | iap/validate_jwt.py | spitfire55/python-docs-samples | b8fe0d1c5c9f7f5d27965fa3367117af7b1f0aed | [
"Apache-2.0"
] | 16 | 2019-06-15T00:02:56.000Z | 2021-03-25T23:22:38.000Z | iap/validate_jwt.py | spitfire55/python-docs-samples | b8fe0d1c5c9f7f5d27965fa3367117af7b1f0aed | [
"Apache-2.0"
] | 4 | 2018-06-03T14:43:25.000Z | 2019-11-24T04:05:18.000Z | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Sample showing how to validate the Identity-Aware Proxy (IAP) JWT.
This code should be used by applications in Google Compute Engine-based
environments (such as Google App Engine flexible environment, Google
Compute Engine, or Google Container Engine) to provide an extra layer
of assurance that a request was authorized by IAP.
For applications running in the App Engine standard environment, use
App Engine's Users API instead.
"""
# [START iap_validate_jwt]
import jwt
import requests
def validate_iap_jwt_from_app_engine(iap_jwt, cloud_project_number,
cloud_project_id):
"""Validate a JWT passed to your App Engine app by Identity-Aware Proxy.
Args:
iap_jwt: The contents of the X-Goog-IAP-JWT-Assertion header.
cloud_project_number: The project *number* for your Google Cloud project.
This is returned by 'gcloud projects describe $PROJECT_ID', or
in the Project Info card in Cloud Console.
cloud_project_id: The project *ID* for your Google Cloud project.
Returns:
(user_id, user_email, error_str).
"""
expected_audience = '/projects/{}/apps/{}'.format(
cloud_project_number, cloud_project_id)
return _validate_iap_jwt(iap_jwt, expected_audience)
def validate_iap_jwt_from_compute_engine(iap_jwt, cloud_project_number,
backend_service_id):
"""Validate an IAP JWT for your (Compute|Container) Engine service.
Args:
iap_jwt: The contents of the X-Goog-IAP-JWT-Assertion header.
cloud_project_number: The project *number* for your Google Cloud project.
This is returned by 'gcloud projects describe $PROJECT_ID', or
in the Project Info card in Cloud Console.
backend_service_id: The ID of the backend service used to access the
application. See
https://cloud.google.com/iap/docs/signed-headers-howto
for details on how to get this value.
Returns:
(user_id, user_email, error_str).
"""
expected_audience = '/projects/{}/global/backendServices/{}'.format(
cloud_project_number, backend_service_id)
return _validate_iap_jwt(iap_jwt, expected_audience)
def get_iap_key(key_id):
"""Retrieves a public key from the list published by Identity-Aware Proxy,
re-fetching the key file if necessary.
"""
key_cache = get_iap_key.key_cache
key = key_cache.get(key_id)
if not key:
# Re-fetch the key file.
resp = requests.get(
'https://www.gstatic.com/iap/verify/public_key')
if resp.status_code != 200:
raise Exception(
'Unable to fetch IAP keys: {} / {} / {}'.format(
resp.status_code, resp.headers, resp.text))
key_cache = resp.json()
get_iap_key.key_cache = key_cache
key = key_cache.get(key_id)
if not key:
raise Exception('Key {!r} not found'.format(key_id))
return key
# Used to cache the Identity-Aware Proxy public keys. This code only
# refetches the file when a JWT is signed with a key not present in
# this cache.
get_iap_key.key_cache = {}
# [END iap_validate_jwt]
| 38.692982 | 79 | 0.682838 |
8a6b4f25018fb455967003872eafa0810ca93675 | 1,995 | py | Python | examples/calc.py | manatlan/htag | cf085077adf04bec8a2b059497efedb210c59936 | [
"MIT"
] | 1 | 2022-03-12T09:42:13.000Z | 2022-03-12T09:42:13.000Z | examples/calc.py | manatlan/thag | 0c57a103a8dbdbf9e1f09c759f1c35c1c3eff359 | [
"MIT"
] | null | null | null | examples/calc.py | manatlan/thag | 0c57a103a8dbdbf9e1f09c759f1c35c1c3eff359 | [
"MIT"
] | null | null | null | import os,sys; sys.path.insert(0,os.path.dirname(os.path.dirname(__file__)))
from htag import Tag
"""
This example show you how to make a "Calc App"
(with physical buttons + keyboard events)
There is no work for rendering the layout ;-)
Can't be simpler !
"""
if __name__=="__main__":
# import logging
# logging.basicConfig(format='[%(levelname)-5s] %(name)s: %(message)s',level=logging.DEBUG)
# logging.getLogger("htag.tag").setLevel( logging.INFO )
# and execute it in a pywebview instance
from htag.runners import *
# here is another runner, in a simple browser (thru ajax calls)
BrowserHTTP( Calc ).run()
# PyWebWiew( Calc ).run()
| 28.913043 | 96 | 0.543358 |
8a6c2e5a6d6baef647e0e3b1e7b605691b398cfe | 188 | py | Python | res/example1.py | tghira16/Giraphics | 74265c4c0220c677e0fa3e5e65fd0b7087401106 | [
"MIT"
] | 1 | 2021-03-24T10:09:57.000Z | 2021-03-24T10:09:57.000Z | res/example1.py | tghira16/Giraphics | 74265c4c0220c677e0fa3e5e65fd0b7087401106 | [
"MIT"
] | null | null | null | res/example1.py | tghira16/Giraphics | 74265c4c0220c677e0fa3e5e65fd0b7087401106 | [
"MIT"
] | null | null | null | from giraphics.graphing.graph import Graph
g = Graph(800,600,8,6, 'example1.svg')
g.bg()
g.grid()
g.axes()
g.graph(func)
g.save()
g.display() | 12.533333 | 42 | 0.632979 |
8a6c4e202130d51c730ab01bd3f2f21e5ec32862 | 758 | py | Python | tools/data.py | seanys/2D-Irregular-Packing-Algorithm | cc10edff2bc2631fcbcb47acf7bb3215e5c5023c | [
"MIT"
] | 29 | 2020-02-07T06:41:25.000Z | 2022-03-16T18:04:07.000Z | tools/data.py | seanys/2D-Irregular-Packing-Algorithm | cc10edff2bc2631fcbcb47acf7bb3215e5c5023c | [
"MIT"
] | 6 | 2020-04-27T01:36:27.000Z | 2022-01-31T11:59:05.000Z | tools/data.py | seanys/2D-Irregular-Packing-Algorithm | cc10edff2bc2631fcbcb47acf7bb3215e5c5023c | [
"MIT"
] | 12 | 2020-05-05T05:34:06.000Z | 2022-03-26T07:32:46.000Z | from tools.geofunc import GeoFunc
import pandas as pd
import json
def getData(index):
'''han,jakobs1,jakobs2 '''
'''shapesshirtswimtrousers'''
name=["ga","albano","blaz1","blaz2","dighe1","dighe2","fu","han","jakobs1","jakobs2","mao","marques","shapes","shirts","swim","trousers"]
print("",name[index],"")
''''''
scale=[100,0.5,100,100,20,20,20,10,20,20,0.5,20,50]
print("",scale[index],"")
df = pd.read_csv("data/"+name[index]+".csv")
polygons=[]
for i in range(0,df.shape[0]):
for j in range(0,df['num'][i]):
poly=json.loads(df['polygon'][i])
GeoFunc.normData(poly,scale[index])
polygons.append(poly)
return polygons
| 36.095238 | 141 | 0.60686 |
8a6c803544f7e0d285bc37ff4aefd197349a5940 | 456 | py | Python | src/trw/reporting/__init__.py | civodlu/trw | b9a1cf045f61d6df9c65c014ef63b4048972dcdc | [
"MIT"
] | 3 | 2019-07-04T01:20:41.000Z | 2020-01-27T02:36:12.000Z | src/trw/reporting/__init__.py | civodlu/trw | b9a1cf045f61d6df9c65c014ef63b4048972dcdc | [
"MIT"
] | null | null | null | src/trw/reporting/__init__.py | civodlu/trw | b9a1cf045f61d6df9c65c014ef63b4048972dcdc | [
"MIT"
] | 2 | 2020-10-19T13:46:06.000Z | 2021-12-27T02:18:10.000Z | #from trw.utils import collect_hierarchical_module_name, collect_hierarchical_parameter_name, get_batch_n, to_value, \
# safe_lookup, len_batch
from .export import as_image_ui8, as_rgb_image, export_image, export_sample, export_as_image
from .table_sqlite import TableStream, SQLITE_TYPE_PATTERN, get_table_number_of_rows
from .reporting_bokeh import report, create_default_reporting_options
from .reporting_bokeh_samples import PanelDataSamplesTabular
| 65.142857 | 118 | 0.875 |
8a6d51f8a422fff8bc79749ffb6d71189dc006bc | 2,509 | py | Python | vframe_cli/commands/templates/image-mp.py | julescarbon/vframe | 0798841fa9eb7e1252e4cdf71d68d991c26acab8 | [
"MIT"
] | 1 | 2021-05-15T11:06:39.000Z | 2021-05-15T11:06:39.000Z | vframe_cli/commands/templates/image-mp.py | julescarbon/vframe | 0798841fa9eb7e1252e4cdf71d68d991c26acab8 | [
"MIT"
] | null | null | null | vframe_cli/commands/templates/image-mp.py | julescarbon/vframe | 0798841fa9eb7e1252e4cdf71d68d991c26acab8 | [
"MIT"
] | null | null | null | #############################################################################
#
# VFRAME
# MIT License
# Copyright (c) 2020 Adam Harvey and VFRAME
# https://vframe.io
#
#############################################################################
import click | 28.83908 | 94 | 0.610602 |
8a6d637336ee5d703603ebc196b3672612c215ab | 1,976 | py | Python | src/learndash/api_resources/user.py | MarkMacDon/learndash-python | a3fbfc45567a524b80c732d735f2ae101119f2e4 | [
"MIT"
] | null | null | null | src/learndash/api_resources/user.py | MarkMacDon/learndash-python | a3fbfc45567a524b80c732d735f2ae101119f2e4 | [
"MIT"
] | 1 | 2021-05-06T19:01:24.000Z | 2021-05-06T19:01:24.000Z | src/learndash/api_resources/user.py | MarkMacDon/learndash-python | a3fbfc45567a524b80c732d735f2ae101119f2e4 | [
"MIT"
] | 2 | 2021-05-05T22:45:04.000Z | 2021-07-24T08:47:02.000Z | import learndash
from learndash.api_resources.abstract import ListableAPIResource
from learndash.api_resources.abstract import RetrievableAPIResource
from learndash.api_resources.abstract import UpdateableAPIResource
from learndash.api_resources.abstract import NestedAPIResource
from learndash.api_resources.typing import UserDict
from learndash.api_resources.typing import UserCourseProgressDict
from learndash.api_resources.typing import UserCourseDict
from learndash.api_resources.typing import UserGroupDict
from learndash.api_resources.typing import UserQuizProgressDict
# class UserCourseProgressSteps(ListableAPIResource, NestedAPIResource):
| 35.927273 | 113 | 0.802632 |
8a6dd286ad198b0a16465871a4cd84854d419ad0 | 1,824 | py | Python | lib/galaxy/tool_util/deps/container_resolvers/__init__.py | sneumann/galaxy | f6011bab5b8adbabae4986a45849bb9158ffc8bb | [
"CC-BY-3.0"
] | 1 | 2019-07-27T19:30:55.000Z | 2019-07-27T19:30:55.000Z | lib/galaxy/tool_util/deps/container_resolvers/__init__.py | userssss/galaxy | 9662164ad68b39adf5a5606a7aa8e388f6a79f1e | [
"CC-BY-3.0"
] | 4 | 2021-02-08T20:28:34.000Z | 2022-03-02T02:52:55.000Z | lib/galaxy/tool_util/deps/container_resolvers/__init__.py | userssss/galaxy | 9662164ad68b39adf5a5606a7aa8e388f6a79f1e | [
"CC-BY-3.0"
] | 1 | 2018-05-30T07:38:54.000Z | 2018-05-30T07:38:54.000Z | """The module defines the abstract interface for resolving container images for tool execution."""
from abc import (
ABCMeta,
abstractmethod,
abstractproperty,
)
import six
from galaxy.util.dictifiable import Dictifiable
| 33.777778 | 98 | 0.707237 |
8a6e9d6c995b4c34ef5a6722c4973c2c7fb333f1 | 1,065 | py | Python | projects/eyetracking/gen_adhd_sin.py | nirdslab/streaminghub | a0d9f5f8be0ee6f090bd2b48b9f596695497c2bf | [
"MIT"
] | null | null | null | projects/eyetracking/gen_adhd_sin.py | nirdslab/streaminghub | a0d9f5f8be0ee6f090bd2b48b9f596695497c2bf | [
"MIT"
] | null | null | null | projects/eyetracking/gen_adhd_sin.py | nirdslab/streaminghub | a0d9f5f8be0ee6f090bd2b48b9f596695497c2bf | [
"MIT"
] | 1 | 2020-01-22T15:35:29.000Z | 2020-01-22T15:35:29.000Z | #!/usr/bin/env python3
import glob
import os
import pandas as pd
import dfs
SRC_DIR = f"{dfs.get_data_dir()}/adhd_sin_orig"
OUT_DIR = f"{dfs.get_data_dir()}/adhd_sin"
if __name__ == '__main__':
files = glob.glob(f"{SRC_DIR}/*.csv")
file_names = list(map(os.path.basename, files))
for file_name in file_names:
df: pd.DataFrame = pd.read_csv(f'{SRC_DIR}/{file_name}').set_index('EyeTrackerTimestamp').sort_index()[
['GazePointX (ADCSpx)', 'GazePointY (ADCSpx)', 'PupilLeft', 'PupilRight']].reset_index()
df.columns = ['t', 'x', 'y', 'dl', 'dr']
# fill blanks (order=interpolate(inter)->bfill+ffill(edges))->zerofill
df = df.apply(lambda x: x.interpolate().fillna(method="bfill").fillna(method="ffill")).fillna(0)
df['x'] = df['x'] / 1920
df['y'] = df['y'] / 1080
df['d'] = (df['dl'] + df['dr']) / 2
# start with t=0, and set unit to ms
df['t'] = (df['t'] - df['t'].min()) / 1000
df = df[['t', 'x', 'y', 'd']].round(6).set_index('t')
df.to_csv(f'{OUT_DIR}/{file_name}')
print(f'Processed: {file_name}')
| 35.5 | 107 | 0.613146 |
8a6f626dba5ce35c66724326d654b9ba19117497 | 4,322 | py | Python | dataProcessing.py | TauferLab/PENGUIN | af789163078310f2504b8a0163df4395ccf119f1 | [
"Apache-2.0"
] | null | null | null | dataProcessing.py | TauferLab/PENGUIN | af789163078310f2504b8a0163df4395ccf119f1 | [
"Apache-2.0"
] | null | null | null | dataProcessing.py | TauferLab/PENGUIN | af789163078310f2504b8a0163df4395ccf119f1 | [
"Apache-2.0"
] | null | null | null | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import os
import matplotlib.pyplot as plt
import CurveFit
import shutil
#find all DIRECTORIES containing non-hidden files ending in FILENAME
#get all non-hidden data files in DIRECTORY with extension EXT
#checking if loss ever doesn't decrease for numEpochs epochs in a row.
#dirpath is where the accuracy and loss files are stored. want to move the files into the same format expected by grabNNData.
#a function to read in information (e.g. accuracy, loss) stored at FILENAME
#slice data could be used to test values of E other than E=0.5, which we use by default
| 38.936937 | 204 | 0.657103 |
8a6fea40902a5d1ec59a6cdd9117e96fcdef70a1 | 572 | py | Python | algo_probs/newcoder/classic/nc52.py | Jackthebighead/recruiment-2022 | a81007908e3c2f65a6be3ff2d62dfb92d0753b0d | [
"MIT"
] | null | null | null | algo_probs/newcoder/classic/nc52.py | Jackthebighead/recruiment-2022 | a81007908e3c2f65a6be3ff2d62dfb92d0753b0d | [
"MIT"
] | null | null | null | algo_probs/newcoder/classic/nc52.py | Jackthebighead/recruiment-2022 | a81007908e3c2f65a6be3ff2d62dfb92d0753b0d | [
"MIT"
] | null | null | null | # '(',')','{','}','['']',"()""()[]{}""(]""([)]"
# @param s string
# @return bool
#
| 30.105263 | 114 | 0.47028 |
8a73038a9d54b6fdd609f321f9fbc694a2017b7b | 2,385 | py | Python | piecrust/processing/util.py | airbornemint/PieCrust2 | bd8e44a1a3ba646a9ebfbb4d4f1fa01a1daa3beb | [
"Apache-2.0"
] | null | null | null | piecrust/processing/util.py | airbornemint/PieCrust2 | bd8e44a1a3ba646a9ebfbb4d4f1fa01a1daa3beb | [
"Apache-2.0"
] | null | null | null | piecrust/processing/util.py | airbornemint/PieCrust2 | bd8e44a1a3ba646a9ebfbb4d4f1fa01a1daa3beb | [
"Apache-2.0"
] | null | null | null | import os.path
import time
import logging
import yaml
from piecrust.processing.base import Processor
logger = logging.getLogger(__name__)
| 28.392857 | 71 | 0.554717 |
8a7310d8abb463c70846c800ef296e8c1423ac2b | 186 | py | Python | src/events/cell_pressed.py | ArcosJuan/Get-out-of-my-fucking-maze | ca2cfeaaeecb6c6f583ad647d020f25176170805 | [
"MIT"
] | 2 | 2021-09-09T14:03:40.000Z | 2021-11-03T03:35:55.000Z | src/events/cell_pressed.py | ArcosJuan/Get-out-of-my-fucking-maze | ca2cfeaaeecb6c6f583ad647d020f25176170805 | [
"MIT"
] | null | null | null | src/events/cell_pressed.py | ArcosJuan/Get-out-of-my-fucking-maze | ca2cfeaaeecb6c6f583ad647d020f25176170805 | [
"MIT"
] | null | null | null | from src.events import Event
| 18.6 | 33 | 0.672043 |
8a73f2115b3d49a7048eebbbf6a7d009bf2bcb02 | 864 | py | Python | TopQuarkAnalysis/TopJetCombination/python/TtSemiLepJetCombMaxSumPtWMass_cfi.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | TopQuarkAnalysis/TopJetCombination/python/TtSemiLepJetCombMaxSumPtWMass_cfi.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | TopQuarkAnalysis/TopJetCombination/python/TtSemiLepJetCombMaxSumPtWMass_cfi.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | import FWCore.ParameterSet.Config as cms
#
# module to make the MaxSumPtWMass jet combination
#
findTtSemiLepJetCombMaxSumPtWMass = cms.EDProducer("TtSemiLepJetCombMaxSumPtWMass",
## jet input
jets = cms.InputTag("selectedPatJets"),
## lepton input
leps = cms.InputTag("selectedPatMuons"),
## maximum number of jets to be considered
maxNJets = cms.int32(4),
## nominal WMass parameter (in GeV)
wMass = cms.double(80.4),
## use b-tagging two distinguish between light and b jets
useBTagging = cms.bool(False),
## choose algorithm for b-tagging
bTagAlgorithm = cms.string("trackCountingHighEffBJetTags"),
## minimum b discriminator value required for b jets and
## maximum b discriminator value allowed for non-b jets
minBDiscBJets = cms.double(1.0),
maxBDiscLightJets = cms.double(3.0)
)
| 36 | 83 | 0.706019 |
8a746baf4af656a91220d07018cb78e6eb2e1b1f | 119 | py | Python | xortool/__init__.py | runapp/xortool | 9dac27387e7883775936a31e67598eaba182e053 | [
"MIT"
] | 14 | 2017-06-14T06:10:07.000Z | 2019-02-22T03:21:15.000Z | Cryptography/tools/xortool-master/xortool/__init__.py | rookie-12/My-Gray-Hacker-Resources | e9b10ac7b0e557a9e624a5a6e761f9af4488d777 | [
"MIT"
] | 1 | 2021-04-30T21:19:32.000Z | 2021-04-30T21:19:32.000Z | Cryptography/tools/xortool-master/xortool/__init__.py | rookie-12/My-Gray-Hacker-Resources | e9b10ac7b0e557a9e624a5a6e761f9af4488d777 | [
"MIT"
] | 7 | 2015-10-01T09:47:05.000Z | 2022-01-21T14:25:37.000Z | #!/usr/bin/env python
#-*- coding:utf-8 -*-
__all__ = ["args", "colors", "libcolors", "routine"]
__version__ = "0.96"
| 19.833333 | 52 | 0.605042 |
8a748a255fe78209cc5338aaab9ff134d24befab | 1,134 | py | Python | baopig/ressources/ressources.py | ChreSyr/baopig | 6264ab9a851b1ed0a031292abe7f159a53b3fc5e | [
"MIT"
] | null | null | null | baopig/ressources/ressources.py | ChreSyr/baopig | 6264ab9a851b1ed0a031292abe7f159a53b3fc5e | [
"MIT"
] | null | null | null | baopig/ressources/ressources.py | ChreSyr/baopig | 6264ab9a851b1ed0a031292abe7f159a53b3fc5e | [
"MIT"
] | null | null | null |
from baopig.pybao.objectutilities import Object
from baopig.pybao.issomething import *
# TODO : ButtonRessourcePack.style.create_surface(size)
ressources = _RessourcePack()
| 19.894737 | 68 | 0.666667 |
8a75b4a74e6ecd635d9404db9ea5df06d5114069 | 10,282 | py | Python | bufr_extract_unique_stations.py | glamod/glamod-misc | 4c8743dd3aa436377017c49bec990b11fe1c6f7d | [
"BSD-3-Clause"
] | null | null | null | bufr_extract_unique_stations.py | glamod/glamod-misc | 4c8743dd3aa436377017c49bec990b11fe1c6f7d | [
"BSD-3-Clause"
] | 16 | 2018-10-23T08:06:18.000Z | 2018-10-30T10:20:01.000Z | bufr_extract_unique_stations.py | glamod/glamod-misc | 4c8743dd3aa436377017c49bec990b11fe1c6f7d | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python2.7
"""
Extract unique set of station locations (and names) along with number of obs
RJHD - Exeter - October 2017
"""
# ECMWF import defaults
import traceback
import sys
from eccodes import *
# RJHD imports
import cartopy
import numpy as np
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import gc
VERBOSE = 1 # verbose error reporting.
ATTRS = [
'code',
'units',
'scale',
'reference',
'width'
]
INTMDI = 2147483647
#***************************************************
#***************************************************
def scatter_map(outname, data, lons, lats, cmap, bounds, cb_label, title = "", figtext = "", doText = False):
'''
Standard scatter map
:param str outname: output filename root
:param array data: data to plot
:param array lons: longitudes
:param array lats: latitudes
:param obj cmap: colourmap to use
:param array bounds: bounds for discrete colormap
:param str cb_label: colorbar label
'''
norm=mpl.cm.colors.BoundaryNorm(bounds,cmap.N)
fig = plt.figure(figsize =(10,6.5))
plt.clf()
ax = plt.axes([0.05, 0.10, 0.90, 0.90], projection=cartopy.crs.Robinson())
ax.gridlines() #draw_labels=True)
ax.add_feature(cartopy.feature.LAND, zorder = 0, facecolor = "0.9", edgecolor = "k")
ax.coastlines()
ext = ax.get_extent() # save the original extent
scatter = plt.scatter(lons, lats, c = data, cmap = cmap, norm = norm, s=10, \
transform = cartopy.crs.Geodetic(), edgecolor = "r", linewidth = 0.1)
cb=plt.colorbar(scatter, orientation = 'horizontal', pad = 0.05, fraction = 0.05, \
aspect = 30, ticks = bounds[1:-1], label = cb_label, drawedges=True)
# thicken border of colorbar and the dividers
# http://stackoverflow.com/questions/14477696/customizing-colorbar-border-color-on-matplotlib
# cb.set_ticklabels(["{:g}".format(b) for b in bounds[1:-1]])
# cb.outline.set_color('k')
# cb.outline.set_linewidth(2)
cb.dividers.set_color('k')
cb.dividers.set_linewidth(2)
ax.set_extent(ext, ax.projection) # fix the extent change from colormesh
plt.title(title)
if doText: plt.text(0.01, 0.98, "#stations: {}".format(data.shape[0]), transform = ax.transAxes, fontsize = 10)
plt.savefig(outname)
plt.close()
return # scatter_map
#***************************************************
#***************************************************
if __name__ == "__main__":
import argparse
# set up keyword arguments
parser = argparse.ArgumentParser()
parser.add_argument('--ms', dest='ms', action='store', default = "era40_",
help='Run on ERA40 ["era40_"] (default) or ERA-I ["erai_"] data')
parser.add_argument('--year', dest='year', action='store', default = 1980,
help='Which year to process - default 1980')
args = parser.parse_args()
main(ms = args.ms, year = args.year)
sys.exit()
#***************************************************
# END
#***************************************************
| 33.167742 | 184 | 0.540751 |
8a7777964dadf66bcb5c8207f5f26c1301e49cee | 3,977 | py | Python | libsaas/services/twilio/applications.py | MidtownFellowship/libsaas | 541bb731b996b08ede1d91a235cb82895765c38a | [
"MIT"
] | 155 | 2015-01-27T15:17:59.000Z | 2022-02-20T00:14:08.000Z | libsaas/services/twilio/applications.py | MidtownFellowship/libsaas | 541bb731b996b08ede1d91a235cb82895765c38a | [
"MIT"
] | 14 | 2015-01-12T08:22:37.000Z | 2021-06-16T19:49:31.000Z | libsaas/services/twilio/applications.py | MidtownFellowship/libsaas | 541bb731b996b08ede1d91a235cb82895765c38a | [
"MIT"
] | 43 | 2015-01-28T22:41:45.000Z | 2021-09-21T04:44:26.000Z | from libsaas import http, parsers
from libsaas.services import base
from libsaas.services.twilio import resource
| 28.007042 | 78 | 0.652753 |
8a78745915eb3a4aaf90865a024b4d8bafd46ca7 | 5,151 | py | Python | research/gnn/sgcn/postprocess.py | leelige/mindspore | 5199e05ba3888963473f2b07da3f7bca5b9ef6dc | [
"Apache-2.0"
] | 1 | 2021-11-18T08:17:44.000Z | 2021-11-18T08:17:44.000Z | research/gnn/sgcn/postprocess.py | leelige/mindspore | 5199e05ba3888963473f2b07da3f7bca5b9ef6dc | [
"Apache-2.0"
] | null | null | null | research/gnn/sgcn/postprocess.py | leelige/mindspore | 5199e05ba3888963473f2b07da3f7bca5b9ef6dc | [
"Apache-2.0"
] | 2 | 2019-09-01T06:17:04.000Z | 2019-10-04T08:39:45.000Z | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
postprocess.
"""
import os
import argparse
import numpy as np
from src.ms_utils import calculate_auc
from mindspore import context, load_checkpoint
def score_model(preds, test_pos, test_neg, weight, bias):
"""
Score the model on the test set edges in each epoch.
Args:
epoch (LongTensor): Training epochs.
Returns:
auc(Float32): AUC result.
f1(Float32): F1-Score result.
"""
score_positive_edges = np.array(test_pos, dtype=np.int32).T
score_negative_edges = np.array(test_neg, dtype=np.int32).T
test_positive_z = np.concatenate((preds[score_positive_edges[0, :], :],
preds[score_positive_edges[1, :], :]), axis=1)
test_negative_z = np.concatenate((preds[score_negative_edges[0, :], :],
preds[score_negative_edges[1, :], :]), axis=1)
# operands could not be broadcast together with shapes (4288,128) (128,3)
scores = np.dot(np.concatenate((test_positive_z, test_negative_z), axis=0), weight) + bias
probability_scores = np.exp(softmax(scores))
predictions = probability_scores[:, 0]/probability_scores[:, 0:2].sum(1)
# predictions = predictions.asnumpy()
targets = [0]*len(test_pos) + [1]*len(test_neg)
auc, f1 = calculate_auc(targets, predictions)
return auc, f1
def get_acc():
"""get infer Accuracy."""
parser = argparse.ArgumentParser(description='postprocess')
parser.add_argument('--dataset_name', type=str, default='bitcoin-otc', choices=['bitcoin-otc', 'bitcoin-alpha'],
help='dataset name')
parser.add_argument('--result_path', type=str, default='./ascend310_infer/input/', help='result Files')
parser.add_argument('--label_path', type=str, default='', help='y_test npy Files')
parser.add_argument('--mask_path', type=str, default='', help='test_mask npy Files')
parser.add_argument("--checkpoint_file", type=str, default='sgcn_alpha_f1.ckpt', help="Checkpoint file path.")
parser.add_argument("--edge_path", nargs="?",
default="./input/bitcoin_alpha.csv", help="Edge list csv.")
parser.add_argument("--features-path", nargs="?",
default="./input/bitcoin_alpha.csv", help="Edge list csv.")
parser.add_argument("--test-size", type=float,
default=0.2, help="Test dataset size. Default is 0.2.")
parser.add_argument("--seed", type=int, default=42,
help="Random seed for sklearn pre-training. Default is 42.")
parser.add_argument("--spectral-features", default=True, dest="spectral_features", action="store_true")
parser.add_argument("--reduction-iterations", type=int,
default=30, help="Number of SVD iterations. Default is 30.")
parser.add_argument("--reduction-dimensions", type=int,
default=64, help="Number of SVD feature extraction dimensions. Default is 64.")
args_opt = parser.parse_args()
# Runtime
context.set_context(mode=context.GRAPH_MODE, device_target='Ascend', device_id=0)
# Create network
test_pos = np.load(os.path.join(args_opt.result_path, 'pos_test.npy'))
test_neg = np.load(os.path.join(args_opt.result_path, 'neg_test.npy'))
# Load parameters from checkpoint into network
param_dict = load_checkpoint(args_opt.checkpoint_file)
print(type(param_dict))
print(param_dict)
print(type(param_dict['regression_weights']))
print(param_dict['regression_weights'])
# load_param_into_net(net, param_dict)
pred = np.fromfile('./result_Files/repos_0.bin', np.float32)
if args_opt.dataset_name == 'bitcoin-otc':
pred = pred.reshape(5881, 64)
else:
pred = pred.reshape(3783, 64)
auc, f1 = score_model(pred, test_pos, test_neg, param_dict['regression_weights'].asnumpy(),
param_dict['regression_bias'].asnumpy())
print("Test set results:", "auc=", "{:.5f}".format(auc), "f1=", "{:.5f}".format(f1))
if __name__ == '__main__':
get_acc()
| 48.140187 | 117 | 0.644729 |
8a78d7cdf72b62f6c5c9341d633e72ed6d4ea01c | 4,001 | py | Python | pykeops/common/get_options.py | dvolgyes/keops | 58b2c5f7822a7468a6da2ce439939e7dad04d7f3 | [
"MIT"
] | 1 | 2020-09-29T13:21:30.000Z | 2020-09-29T13:21:30.000Z | pykeops/common/get_options.py | dvolgyes/keops | 58b2c5f7822a7468a6da2ce439939e7dad04d7f3 | [
"MIT"
] | null | null | null | pykeops/common/get_options.py | dvolgyes/keops | 58b2c5f7822a7468a6da2ce439939e7dad04d7f3 | [
"MIT"
] | null | null | null | import re
import numpy as np
from collections import OrderedDict
import pykeops
import pykeops.config
############################################################
# define backend
############################################################
def get_tag_backend(backend, variables, str = False):
"""
entry point to get the correct backend
"""
res = SetBackend()
if not str:
return res.define_tag_backend(backend, variables)
else:
return res.define_backend(backend, variables)
| 38.84466 | 125 | 0.59935 |
8a78e9f69beda0a9b40161770e8196cc19774191 | 4,306 | py | Python | prepare_features_vc.py | tkm2261/dnn-voice-changer | 63a4ca0b2d8a33a26fc5aaec168180152df1b429 | [
"MIT"
] | 13 | 2018-03-09T07:56:50.000Z | 2022-03-26T12:23:22.000Z | prepare_features_vc.py | tkm2261/dnn-voice-changer | 63a4ca0b2d8a33a26fc5aaec168180152df1b429 | [
"MIT"
] | null | null | null | prepare_features_vc.py | tkm2261/dnn-voice-changer | 63a4ca0b2d8a33a26fc5aaec168180152df1b429 | [
"MIT"
] | 2 | 2018-06-16T03:44:56.000Z | 2021-04-06T17:32:38.000Z | """Prepare acoustic features for one-to-one voice conversion.
usage:
prepare_features_vc.py [options] <DATA_ROOT> <source_speaker> <target_speaker>
options:
--max_files=<N> Max num files to be collected. [default: 100]
--dst_dir=<d> Destination directory [default: data/cmu_arctic_vc].
--overwrite Overwrite files.
-h, --help show this help message and exit
"""
from __future__ import division, print_function, absolute_import
from docopt import docopt
import numpy as np
from nnmnkwii.datasets import FileSourceDataset
from nnmnkwii import preprocessing as P
from nnmnkwii.preprocessing.alignment import DTWAligner
from nnmnkwii.datasets import cmu_arctic, voice_statistics, vcc2016
import pysptk
import pyworld
from scipy.io import wavfile
from tqdm import tqdm
from os.path import basename, splitext, exists, expanduser, join, dirname
import os
import sys
from hparams import vc as hp
from hparams import hparams_debug_string
# vcc2016.WavFileDataSource and voice_statistics.WavFileDataSource can be
# drop-in replacement. See below for details:
# https://r9y9.github.io/nnmnkwii/latest/references/datasets.html#builtin-data-sources
if __name__ == "__main__":
args = docopt(__doc__)
print("Command line args:\n", args)
DATA_ROOT = args["<DATA_ROOT>"]
source_speaker = args["<source_speaker>"]
target_speaker = args["<target_speaker>"]
max_files = int(args["--max_files"])
dst_dir = args["--dst_dir"]
overwrite = args["--overwrite"]
print(hparams_debug_string(hp))
X_dataset = FileSourceDataset(MGCSource(DATA_ROOT, [source_speaker],
max_files=max_files))
Y_dataset = FileSourceDataset(MGCSource(DATA_ROOT, [target_speaker],
max_files=max_files))
skip_feature_extraction = exists(join(dst_dir, "X")) \
and exists(join(dst_dir, "Y"))
if overwrite:
skip_feature_extraction = False
if skip_feature_extraction:
print("Features seems to be prepared, skipping feature extraction.")
sys.exit(0)
# Create dirs
for speaker, name in [(source_speaker, "X"), (target_speaker, "Y")]:
d = join(dst_dir, name)
print("Destination dir for {}: {}".format(speaker, d))
if not exists(d):
os.makedirs(d)
# Convert to arrays
print("Convert datasets to arrays")
X, Y = X_dataset.asarray(verbose=1), Y_dataset.asarray(verbose=1)
# Alignment
print("Perform alignment")
X, Y = DTWAligner().transform((X, Y))
print("Save features to disk")
for idx, (x, y) in tqdm(enumerate(zip(X, Y))):
# paths
src_name = splitext(basename(X_dataset.collected_files[idx][0]))[0]
tgt_name = splitext(basename(Y_dataset.collected_files[idx][0]))[0]
src_path = join(dst_dir, "X", src_name)
tgt_path = join(dst_dir, "Y", tgt_name)
# Trim and ajast frames
x = P.trim_zeros_frames(x)
y = P.trim_zeros_frames(y)
x, y = P.adjust_frame_lengths(x, y, pad=True, divisible_by=2)
# Save
np.save(src_path, x)
np.save(tgt_path, y)
| 35.883333 | 86 | 0.656061 |
8a7905cf7b3fc947d0fefe5c680371a050d82807 | 1,855 | py | Python | lib/tests/streamlit/pydeck_test.py | zgtz/streamlit | be797682394955ef2b94a5f7641b6f9d8fd1dbfc | [
"Apache-2.0"
] | 1 | 2022-01-19T10:48:49.000Z | 2022-01-19T10:48:49.000Z | lib/tests/streamlit/pydeck_test.py | zgtz/streamlit | be797682394955ef2b94a5f7641b6f9d8fd1dbfc | [
"Apache-2.0"
] | 52 | 2021-10-04T21:52:48.000Z | 2021-12-29T02:18:44.000Z | lib/tests/streamlit/pydeck_test.py | zgtz/streamlit | be797682394955ef2b94a5f7641b6f9d8fd1dbfc | [
"Apache-2.0"
] | null | null | null | # Copyright 2018-2021 Streamlit Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import pandas as pd
import pydeck as pdk
from tests import testutil
import streamlit as st
import streamlit.elements.deck_gl_json_chart as deck_gl_json_chart
df1 = pd.DataFrame({"lat": [1, 2, 3, 4], "lon": [10, 20, 30, 40]})
| 30.409836 | 75 | 0.618329 |
8a790773c525636d7fecb88a7d84df906ba09ba6 | 40,698 | py | Python | sdks/python/apache_beam/io/gcp/bigquery_tools.py | Doctusoft/beam | 91d59e78ffec3771a1d646c4e320fff571393829 | [
"Apache-2.0"
] | null | null | null | sdks/python/apache_beam/io/gcp/bigquery_tools.py | Doctusoft/beam | 91d59e78ffec3771a1d646c4e320fff571393829 | [
"Apache-2.0"
] | 1 | 2022-02-10T06:56:11.000Z | 2022-02-10T06:56:11.000Z | sdks/python/apache_beam/io/gcp/bigquery_tools.py | Doctusoft/beam | 91d59e78ffec3771a1d646c4e320fff571393829 | [
"Apache-2.0"
] | null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tools used by BigQuery sources and sinks.
Classes, constants and functions in this file are experimental and have no
backwards compatibility guarantees.
These tools include wrappers and clients to interact with BigQuery APIs.
NOTHING IN THIS FILE HAS BACKWARDS COMPATIBILITY GUARANTEES.
"""
from __future__ import absolute_import
import datetime
import decimal
import json
import logging
import re
import sys
import time
import uuid
from builtins import object
from future.utils import iteritems
from apache_beam import coders
from apache_beam.internal.gcp import auth
from apache_beam.internal.gcp.json_value import from_json_value
from apache_beam.internal.gcp.json_value import to_json_value
from apache_beam.internal.http_client import get_new_http
from apache_beam.io.gcp.internal.clients import bigquery
from apache_beam.options import value_provider
from apache_beam.options.pipeline_options import GoogleCloudOptions
from apache_beam.runners.dataflow.native_io import iobase as dataflow_io
from apache_beam.transforms import DoFn
from apache_beam.utils import retry
# Protect against environments where bigquery library is not available.
# pylint: disable=wrong-import-order, wrong-import-position
try:
from apitools.base.py.exceptions import HttpError
except ImportError:
pass
# pylint: enable=wrong-import-order, wrong-import-position
MAX_RETRIES = 3
JSON_COMPLIANCE_ERROR = 'NAN, INF and -INF values are not JSON compliant.'
def get_hashable_destination(destination):
"""Parses a table reference into a (project, dataset, table) tuple.
Args:
destination: Either a TableReference object from the bigquery API.
The object has the following attributes: projectId, datasetId, and
tableId. Or a string representing the destination containing
'PROJECT:DATASET.TABLE'.
Returns:
A string representing the destination containing
'PROJECT:DATASET.TABLE'.
"""
if isinstance(destination, bigquery.TableReference):
return '%s:%s.%s' % (
destination.projectId, destination.datasetId, destination.tableId)
else:
return destination
def parse_table_schema_from_json(schema_string):
"""Parse the Table Schema provided as string.
Args:
schema_string: String serialized table schema, should be a valid JSON.
Returns:
A TableSchema of the BigQuery export from either the Query or the Table.
"""
json_schema = json.loads(schema_string)
def _parse_schema_field(field):
"""Parse a single schema field from dictionary.
Args:
field: Dictionary object containing serialized schema.
Returns:
A TableFieldSchema for a single column in BigQuery.
"""
schema = bigquery.TableFieldSchema()
schema.name = field['name']
schema.type = field['type']
if 'mode' in field:
schema.mode = field['mode']
else:
schema.mode = 'NULLABLE'
if 'description' in field:
schema.description = field['description']
if 'fields' in field:
schema.fields = [_parse_schema_field(x) for x in field['fields']]
return schema
fields = [_parse_schema_field(f) for f in json_schema['fields']]
return bigquery.TableSchema(fields=fields)
def parse_table_reference(table, dataset=None, project=None):
"""Parses a table reference into a (project, dataset, table) tuple.
Args:
table: The ID of the table. The ID must contain only letters
(a-z, A-Z), numbers (0-9), or underscores (_). If dataset argument is None
then the table argument must contain the entire table reference:
'DATASET.TABLE' or 'PROJECT:DATASET.TABLE'. This argument can be a
bigquery.TableReference instance in which case dataset and project are
ignored and the reference is returned as a result. Additionally, for date
partitioned tables, appending '$YYYYmmdd' to the table name is supported,
e.g. 'DATASET.TABLE$YYYYmmdd'.
dataset: The ID of the dataset containing this table or null if the table
reference is specified entirely by the table argument.
project: The ID of the project containing this table or null if the table
reference is specified entirely by the table (and possibly dataset)
argument.
Returns:
A TableReference object from the bigquery API. The object has the following
attributes: projectId, datasetId, and tableId.
Raises:
ValueError: if the table reference as a string does not match the expected
format.
"""
if isinstance(table, bigquery.TableReference):
return table
elif callable(table):
return table
elif isinstance(table, value_provider.ValueProvider):
return table
table_reference = bigquery.TableReference()
# If dataset argument is not specified, the expectation is that the
# table argument will contain a full table reference instead of just a
# table name.
if dataset is None:
match = re.match(
r'^((?P<project>.+):)?(?P<dataset>\w+)\.(?P<table>[\w\$]+)$', table)
if not match:
raise ValueError(
'Expected a table reference (PROJECT:DATASET.TABLE or '
'DATASET.TABLE) instead of %s.' % table)
table_reference.projectId = match.group('project')
table_reference.datasetId = match.group('dataset')
table_reference.tableId = match.group('table')
else:
table_reference.projectId = project
table_reference.datasetId = dataset
table_reference.tableId = table
return table_reference
# -----------------------------------------------------------------------------
# BigQueryWrapper.
def perform_load_job(self,
destination,
files,
job_id,
schema=None,
write_disposition=None,
create_disposition=None):
"""Starts a job to load data into BigQuery.
Returns:
bigquery.JobReference with the information about the job that was started.
"""
return self._insert_load_job(
destination.projectId, job_id, destination, files,
schema=schema,
create_disposition=create_disposition,
write_disposition=write_disposition)
def insert_rows(self, project_id, dataset_id, table_id, rows,
skip_invalid_rows=False):
"""Inserts rows into the specified table.
Args:
project_id: The project id owning the table.
dataset_id: The dataset id owning the table.
table_id: The table id.
rows: A list of plain Python dictionaries. Each dictionary is a row and
each key in it is the name of a field.
skip_invalid_rows: If there are rows with insertion errors, whether they
should be skipped, and all others should be inserted successfully.
Returns:
A tuple (bool, errors). If first element is False then the second element
will be a bigquery.InserttErrorsValueListEntry instance containing
specific errors.
"""
# Prepare rows for insertion. Of special note is the row ID that we add to
# each row in order to help BigQuery avoid inserting a row multiple times.
# BigQuery will do a best-effort if unique IDs are provided. This situation
# can happen during retries on failures.
# TODO(silviuc): Must add support to writing TableRow's instead of dicts.
final_rows = []
for row in rows:
json_object = bigquery.JsonObject()
for k, v in iteritems(row):
if isinstance(v, decimal.Decimal):
# decimal values are converted into string because JSON does not
# support the precision that decimal supports. BQ is able to handle
# inserts into NUMERIC columns by receiving JSON with string attrs.
v = str(v)
json_object.additionalProperties.append(
bigquery.JsonObject.AdditionalProperty(
key=k, value=to_json_value(v)))
final_rows.append(
bigquery.TableDataInsertAllRequest.RowsValueListEntry(
insertId=str(self.unique_row_id),
json=json_object))
result, errors = self._insert_all_rows(
project_id, dataset_id, table_id, final_rows, skip_invalid_rows)
return result, errors
def _convert_cell_value_to_dict(self, value, field):
if field.type == 'STRING':
# Input: "XYZ" --> Output: "XYZ"
return value
elif field.type == 'BOOLEAN':
# Input: "true" --> Output: True
return value == 'true'
elif field.type == 'INTEGER':
# Input: "123" --> Output: 123
return int(value)
elif field.type == 'FLOAT':
# Input: "1.23" --> Output: 1.23
return float(value)
elif field.type == 'TIMESTAMP':
# The UTC should come from the timezone library but this is a known
# issue in python 2.7 so we'll just hardcode it as we're reading using
# utcfromtimestamp.
# Input: 1478134176.985864 --> Output: "2016-11-03 00:49:36.985864 UTC"
dt = datetime.datetime.utcfromtimestamp(float(value))
return dt.strftime('%Y-%m-%d %H:%M:%S.%f UTC')
elif field.type == 'BYTES':
# Input: "YmJi" --> Output: "YmJi"
return value
elif field.type == 'DATE':
# Input: "2016-11-03" --> Output: "2016-11-03"
return value
elif field.type == 'DATETIME':
# Input: "2016-11-03T00:49:36" --> Output: "2016-11-03T00:49:36"
return value
elif field.type == 'TIME':
# Input: "00:49:36" --> Output: "00:49:36"
return value
elif field.type == 'RECORD':
# Note that a schema field object supports also a RECORD type. However
# when querying, the repeated and/or record fields are flattened
# unless we pass the flatten_results flag as False to the source
return self.convert_row_to_dict(value, field)
elif field.type == 'NUMERIC':
return decimal.Decimal(value)
elif field.type == 'GEOGRAPHY':
return value
else:
raise RuntimeError('Unexpected field type: %s' % field.type)
def convert_row_to_dict(self, row, schema):
"""Converts a TableRow instance using the schema to a Python dict."""
result = {}
for index, field in enumerate(schema.fields):
value = None
if isinstance(schema, bigquery.TableSchema):
cell = row.f[index]
value = from_json_value(cell.v) if cell.v is not None else None
elif isinstance(schema, bigquery.TableFieldSchema):
cell = row['f'][index]
value = cell['v'] if 'v' in cell else None
if field.mode == 'REPEATED':
if value is None:
# Ideally this should never happen as repeated fields default to
# returning an empty list
result[field.name] = []
else:
result[field.name] = [self._convert_cell_value_to_dict(x['v'], field)
for x in value]
elif value is None:
if not field.mode == 'NULLABLE':
raise ValueError('Received \'None\' as the value for the field %s '
'but the field is not NULLABLE.' % field.name)
result[field.name] = None
else:
result[field.name] = self._convert_cell_value_to_dict(value, field)
return result
# -----------------------------------------------------------------------------
# BigQueryReader, BigQueryWriter.
| 39.095101 | 80 | 0.685832 |
8a790aaa3beecccbae1e5fe2d0bb1478dbadd597 | 1,841 | py | Python | VENV/lib/python3.6/site-packages/PyInstaller/hooks/hook-PyQt5.py | workingyifei/display-pattern-generator | b27be84c6221fa93833f283109870737b05bfbf6 | [
"MIT"
] | 3 | 2018-11-27T06:30:23.000Z | 2021-05-30T15:56:32.000Z | VENV/lib/python3.6/site-packages/PyInstaller/hooks/hook-PyQt5.py | workingyifei/display-pattern-generator | b27be84c6221fa93833f283109870737b05bfbf6 | [
"MIT"
] | 1 | 2018-11-15T02:00:31.000Z | 2021-12-06T02:20:32.000Z | VENV/lib/python3.6/site-packages/PyInstaller/hooks/hook-PyQt5.py | workingyifei/display-pattern-generator | b27be84c6221fa93833f283109870737b05bfbf6 | [
"MIT"
] | 1 | 2020-11-06T18:46:35.000Z | 2020-11-06T18:46:35.000Z | #-----------------------------------------------------------------------------
# Copyright (c) 2005-2017, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
from PyInstaller.utils.hooks import (
get_module_attribute, is_module_satisfies, qt_menu_nib_dir, get_module_file_attribute,
collect_data_files)
from PyInstaller.compat import getsitepackages, is_darwin, is_win
# On Windows system PATH has to be extended to point to the PyQt5 directory.
# The PySide directory contains Qt dlls. We need to avoid including different
# version of Qt libraries when there is installed another application (e.g. QtCreator)
if is_win:
from PyInstaller.utils.win32.winutils import extend_system_path
extend_system_path([os.path.join(x, 'PyQt5') for x in getsitepackages()])
extend_system_path([os.path.join(os.path.dirname(get_module_file_attribute('PyQt5')),
'Qt', 'bin')])
# In the new consolidated mode any PyQt depends on _qt
hiddenimports = ['sip', 'PyQt5.Qt']
# Collect just the qt.conf file.
datas = [x for x in collect_data_files('PyQt5', False, os.path.join('Qt', 'bin')) if
x[0].endswith('qt.conf')]
# For Qt<5.4 to work on Mac OS X it is necessary to include `qt_menu.nib`.
# This directory contains some resource files necessary to run PyQt or PySide
# app.
if is_darwin:
# Version of the currently installed Qt 5.x shared library.
qt_version = get_module_attribute('PyQt5.QtCore', 'QT_VERSION_STR')
if is_module_satisfies('Qt < 5.4', qt_version):
datas = [(qt_menu_nib_dir('PyQt5'), '')]
| 42.813953 | 90 | 0.669745 |
8a7922d582e70ee076c3374be8cdb74d33423c9b | 1,038 | py | Python | tests/ast/nodes/test_from_node.py | upgradvisor/vyper | 642884ea938a25793c1b2fac866e8458e63a7b49 | [
"Apache-2.0"
] | 1,471 | 2017-12-25T05:47:57.000Z | 2019-11-19T07:47:53.000Z | tests/ast/nodes/test_from_node.py | upgradvisor/vyper | 642884ea938a25793c1b2fac866e8458e63a7b49 | [
"Apache-2.0"
] | 915 | 2019-11-21T05:48:16.000Z | 2022-03-31T23:51:03.000Z | tests/ast/nodes/test_from_node.py | upgradvisor/vyper | 642884ea938a25793c1b2fac866e8458e63a7b49 | [
"Apache-2.0"
] | 321 | 2017-12-25T16:37:21.000Z | 2019-11-15T17:44:06.000Z | from vyper import ast as vy_ast
| 25.95 | 65 | 0.719653 |
8a79bd5eb2532e1ffdd3b87d6be696b8303afc7f | 2,624 | py | Python | generator/modules/opencv.py | dayta-ai/deepo | fa720e39052e63adfe0f2b9dbd8444a0d69c2540 | [
"MIT"
] | 1 | 2021-11-18T18:34:29.000Z | 2021-11-18T18:34:29.000Z | generator/modules/opencv.py | dayta-ai/deepo | fa720e39052e63adfe0f2b9dbd8444a0d69c2540 | [
"MIT"
] | null | null | null | generator/modules/opencv.py | dayta-ai/deepo | fa720e39052e63adfe0f2b9dbd8444a0d69c2540 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from .__module__ import Module, dependency, source, version
from .tools import Tools
from .boost import Boost
from .python import Python
| 35.945205 | 100 | 0.463796 |
8a7a7334b3428135d28ee8a3da56e39eed250254 | 1,564 | py | Python | day16/solve16.py | jmacarthur/aoc2017 | 2a3096aabf464ef52c05f9437498035cfb5ca1a6 | [
"MIT"
] | null | null | null | day16/solve16.py | jmacarthur/aoc2017 | 2a3096aabf464ef52c05f9437498035cfb5ca1a6 | [
"MIT"
] | null | null | null | day16/solve16.py | jmacarthur/aoc2017 | 2a3096aabf464ef52c05f9437498035cfb5ca1a6 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import sys
import copy
stage_length = 16
stage = map(chr, range(ord('a'),ord('a')+stage_length))
def spin(amount):
"""To save time, this function isn't used except at the end.
Normally, a counter marks the start of the stage and this changes
instead. """
global stage
stage = stage[amount:] + stage[:amount]
with open(sys.argv[1], 'rt') as f:
program = ",".join(f.readlines()).split(",")
n = 0
pos = 0
arguments_list = [x[1:].strip().split("/") for x in program]
action_list = [x[0] for x in program]
history = []
# Change this to 1 for the solution to part 1.
iterations = 1000000000
while n<iterations:
for s in range(0,len(program)):
arguments = arguments_list[s]
if action_list[s] == 's':
pos += stage_length-int(arguments[0])
elif action_list[s] == 'x':
swap((int(arguments[0])+pos)%stage_length, (int(arguments[1])+pos)%stage_length)
elif action_list[s] == 'p':
pos1 = stage.index(arguments[0])
pos2 = stage.index(arguments[1])
swap(pos1, pos2)
if stage in history:
print("Duplicate found: %r at index %d matches at stage %d"%(stage, history.index(stage), n))
loop_length = n - history.index(stage)
complete_cycles = (iterations - n) / loop_length
n += complete_cycles * loop_length
history.append(copy.copy(stage))
n += 1
spin(pos % stage_length)
print "".join(stage)
| 30.076923 | 101 | 0.621483 |
8a7abfc40ef422e33ab3c8284edc61617b59e3dc | 1,165 | py | Python | skimage/segmentation/tests/test_felzenszwalb.py | jaberg/scikits-image | 2ab3e2dfb341189ef2ff9370c6cf3d33ef6ec88d | [
"BSD-3-Clause"
] | 2 | 2020-02-17T18:54:33.000Z | 2021-09-28T15:18:23.000Z | skimage/segmentation/tests/test_felzenszwalb.py | jaberg/scikits-image | 2ab3e2dfb341189ef2ff9370c6cf3d33ef6ec88d | [
"BSD-3-Clause"
] | 1 | 2020-03-30T12:31:55.000Z | 2020-03-30T12:31:55.000Z | skimage/segmentation/tests/test_felzenszwalb.py | emmanuelle/scikit-image | eccc41907135cf81b99c4be18a480a9bc705485d | [
"BSD-3-Clause"
] | 1 | 2019-12-20T19:19:59.000Z | 2019-12-20T19:19:59.000Z | import numpy as np
from numpy.testing import assert_equal, assert_array_equal
from nose.tools import assert_greater
from skimage.segmentation import felzenszwalb
if __name__ == '__main__':
from numpy import testing
testing.run_module_suite()
| 29.125 | 73 | 0.628326 |
8a7ac7f87e160e8f864dafce2acd68a6454b8a68 | 1,419 | py | Python | tests/middleware/test_csrf_middleware.py | w3x10e8/core | d8f0ca29c2bd5e86d199391fa916ce2f5c9b0f49 | [
"MIT"
] | null | null | null | tests/middleware/test_csrf_middleware.py | w3x10e8/core | d8f0ca29c2bd5e86d199391fa916ce2f5c9b0f49 | [
"MIT"
] | null | null | null | tests/middleware/test_csrf_middleware.py | w3x10e8/core | d8f0ca29c2bd5e86d199391fa916ce2f5c9b0f49 | [
"MIT"
] | null | null | null | from masonite.request import Request
from masonite.view import View
from masonite.auth.Csrf import Csrf
from masonite.app import App
from masonite.middleware import CsrfMiddleware
from masonite.testsuite.TestSuite import generate_wsgi
import pytest
from masonite.exceptions import InvalidCSRFToken
| 36.384615 | 107 | 0.718816 |
8a7bd23662f4d2b0b0c83db0df08df0f16f7923c | 690 | py | Python | phoible/views.py | ltxom/phoible | 7ce6f5e62d885f142dba61937d920e68fa7f9fca | [
"Apache-2.0"
] | 31 | 2015-01-20T01:36:22.000Z | 2022-03-11T16:47:30.000Z | phoible/views.py | ltxom/phoible | 7ce6f5e62d885f142dba61937d920e68fa7f9fca | [
"Apache-2.0"
] | 22 | 2015-03-09T11:11:31.000Z | 2022-03-07T14:08:29.000Z | phoible/views.py | ltxom/phoible | 7ce6f5e62d885f142dba61937d920e68fa7f9fca | [
"Apache-2.0"
] | 12 | 2015-11-16T18:28:43.000Z | 2021-05-20T21:55:49.000Z | from pyramid.view import view_config
import os
| 32.857143 | 72 | 0.708696 |
8a7c5a43d05f7336921551d124cf954c34bc06e5 | 46,013 | py | Python | tests/restapi/test_routes.py | aiace9/aiida-core | 09ac91654648adb684a58d5d2d7b1c11a503dae8 | [
"MIT",
"BSD-3-Clause"
] | null | null | null | tests/restapi/test_routes.py | aiace9/aiida-core | 09ac91654648adb684a58d5d2d7b1c11a503dae8 | [
"MIT",
"BSD-3-Clause"
] | null | null | null | tests/restapi/test_routes.py | aiace9/aiida-core | 09ac91654648adb684a58d5d2d7b1c11a503dae8 | [
"MIT",
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
# pylint: disable=too-many-lines
"""Unittests for REST API."""
import tempfile
from flask_cors.core import ACL_ORIGIN
from aiida import orm
from aiida.backends.testbase import AiidaTestCase
from aiida.common import json
from aiida.common.links import LinkType
from aiida.restapi.run_api import configure_api
| 40.186026 | 117 | 0.583878 |
8a7d500dd98fa04ac32ae6b712ad22a261bd4d52 | 3,644 | py | Python | processmonitor.py | yletallec/processmonitor | 95db3416ec35fcb1325a1ac6c5a26807e4c3a474 | [
"MIT"
] | null | null | null | processmonitor.py | yletallec/processmonitor | 95db3416ec35fcb1325a1ac6c5a26807e4c3a474 | [
"MIT"
] | null | null | null | processmonitor.py | yletallec/processmonitor | 95db3416ec35fcb1325a1ac6c5a26807e4c3a474 | [
"MIT"
] | null | null | null | """Process Monitor
Usage:
processmonitor.py <process_name> <overall_duration> [<sampling_interval>]
processmonitor.py -h|--help
processmonitor.py -v|--version
Options:
<process_name> Process name argument.
<overall_duration> Overall duration of the monitoring in seconds.
<sampling_interval> Sampling interval in seconds (optional, default 5).
-h --help Show this screen.
-v --version Show version.
"""
from docopt import docopt
from utils import string_to_integer
from process import Process
from threading import Event, Thread
from datetime import datetime
import os
import sys
import csv
import time
from enum import IntEnum
init()
| 31.145299 | 94 | 0.630626 |
8a7d668b99ceea74e75c844a87347ac04ef02b71 | 6,740 | py | Python | Projects/DeepLearningTechniques/MobileNet_v2/tiny_imagenet/data_loader.py | Tim232/Python-Things | 05f0f373a4cf298e70d9668c88a6e3a9d1cd8146 | [
"MIT"
] | 2 | 2020-12-05T07:42:55.000Z | 2021-01-06T23:23:18.000Z | Projects/DeepLearningTechniques/MobileNet_v2/tiny_imagenet/data_loader.py | Tim232/Python-Things | 05f0f373a4cf298e70d9668c88a6e3a9d1cd8146 | [
"MIT"
] | null | null | null | Projects/DeepLearningTechniques/MobileNet_v2/tiny_imagenet/data_loader.py | Tim232/Python-Things | 05f0f373a4cf298e70d9668c88a6e3a9d1cd8146 | [
"MIT"
] | null | null | null | import os
import re
import numpy as np
from Projects.DeepLearningTechniques.MobileNet_v2.tiny_imagenet.constants import * | 41.863354 | 145 | 0.616914 |
8a7d81f9fd3f30534398ff05abd7412a6f78b709 | 4,035 | py | Python | MarkReport/MarkReport.py | dedukun/MarkReport | 2d92c87a69db5868d14b7a59e815b9ee72d439f9 | [
"MIT"
] | null | null | null | MarkReport/MarkReport.py | dedukun/MarkReport | 2d92c87a69db5868d14b7a59e815b9ee72d439f9 | [
"MIT"
] | null | null | null | MarkReport/MarkReport.py | dedukun/MarkReport | 2d92c87a69db5868d14b7a59e815b9ee72d439f9 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Command line flags
import os
import glob
import re
import pyinotify
import subprocess
from sys import stdout, stderr
from time import time, sleep
from tempfile import gettempdir
from distutils.dir_util import copy_tree
from shutil import copyfile
from weasyprint import HTML
import argparse
parser = argparse.ArgumentParser(
description='Converts Markdown to elegant PDF reports')
parser.add_argument('--basic', dest='basic', action='store_true',
help='Do not enrich HTML with LaTeX and syntax highlighting (faster builds)')
parser.add_argument('--watch', dest='watch', action='store_true',
help='Watch the current folder for changes and rebuild automatically')
parser.add_argument('--quiet', dest='quiet', action='store_true',
help='Do not output any information')
parser.add_argument("--timeout", type=int, default=2,
help='Page generation timeout')
parser.add_argument("--base-html", type=str, default="",
help='The path to the base HTML file')
parser.set_defaults(watch=False)
args = parser.parse_args()
# Check directory
ok = False
for file in os.listdir("."):
if file.endswith(".md"):
ok = True
break
if not ok:
stderr.write("No markdown file found in the current folder")
exit(1)
if args.base_html != "":
if not os.path.isfile(args.base_html):
stderr.write("The given base HTML file doesn't exist")
exit(1)
script_path = os.path.dirname(os.path.realpath(__file__))
# Temp dir
timestamp = str(int(time()))
tmp_dir = gettempdir() + "/" + timestamp + "_md-report/"
os.makedirs(tmp_dir, exist_ok=True)
# Headless browser
if not args.basic:
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
options = Options()
options.headless = True
options.log.level = "trace"
d = DesiredCapabilities.FIREFOX
d['loggingPrefs'] = {'browser': 'ALL'}
driver = webdriver.Firefox(options=options, capabilities=d)
driver.set_page_load_timeout(args.timeout)
prev_compile_time = 0
recompile(None)
if not args.watch:
if not args.basic:
driver.quit()
exit(0)
watch_manager = pyinotify.WatchManager()
event_notifier = pyinotify.Notifier(watch_manager, recompile)
watch_manager.add_watch(os.path.abspath("."), pyinotify.ALL_EVENTS, rec=True)
event_notifier.loop()
if not args.basic:
driver.quit()
| 27.827586 | 105 | 0.662949 |
8a7e18d0d0b30bb03c5125997bb7d29ab2737184 | 902 | py | Python | DFS/13023.py | kjh9267/BOJ_Python | b4d2ae09c252cc9280df93ccecbd07880947827e | [
"Apache-2.0"
] | null | null | null | DFS/13023.py | kjh9267/BOJ_Python | b4d2ae09c252cc9280df93ccecbd07880947827e | [
"Apache-2.0"
] | null | null | null | DFS/13023.py | kjh9267/BOJ_Python | b4d2ae09c252cc9280df93ccecbd07880947827e | [
"Apache-2.0"
] | null | null | null | # https://www.acmicpc.net/problem/13023
import sys
sys.setrecursionlimit(999999999)
if __name__ == '__main__':
input = __import__('sys').stdin.readline
target_depth = 4
N, M = map(int, input().split())
graph = [list() for _ in range(N)]
for _ in range(M):
a, b = map(int, input().split())
graph[a].append(b)
graph[b].append(a)
print(dfs_all())
| 19.191489 | 49 | 0.578714 |
8a7ecd71a92cf19cd5b6422ac30a671d4195653c | 1,358 | py | Python | experiments/bst/setup.py | bigchaindb/privacy-protocols | d220f642c7c056e5ec179b47a8d0863dbc373d9d | [
"CC-BY-4.0"
] | 68 | 2017-08-02T14:22:59.000Z | 2022-02-19T05:27:42.000Z | experiments/bst/setup.py | bigchaindb/privacy-protocols | d220f642c7c056e5ec179b47a8d0863dbc373d9d | [
"CC-BY-4.0"
] | 6 | 2017-08-05T18:30:14.000Z | 2017-08-22T19:54:53.000Z | experiments/bst/setup.py | bigchaindb/privacy-protocols | d220f642c7c056e5ec179b47a8d0863dbc373d9d | [
"CC-BY-4.0"
] | 15 | 2017-08-22T16:04:26.000Z | 2022-03-13T10:36:02.000Z | """bst: BigchainDB Sharing Tools"""
from setuptools import setup, find_packages
install_requires = [
'base58~=0.2.2',
'PyNaCl~=1.1.0',
'bigchaindb-driver',
'click==6.7',
'colorama',
]
setup(
name='bst',
version='0.1.0',
description='bst: BigchainDB Sharing Tools',
long_description=(
'A collection of scripts with different patterns to share'
'private data on BigchainDB.'),
url='https://github.com/vrde/bst/',
author='Alberto Granzotto',
author_email='[email protected]',
license='AGPLv3',
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Database',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Software Development',
'Natural Language :: English',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
],
packages=find_packages(),
entry_points={
'console_scripts': [
'bst=bst.cli:main'
],
},
install_requires=install_requires
)
| 26.115385 | 74 | 0.594993 |
8a7f754432204bffd274f53972f0d99bc17086e5 | 118 | py | Python | polyaxon/db/admin/job_resources.py | elyase/polyaxon | 1c19f059a010a6889e2b7ea340715b2bcfa382a0 | [
"MIT"
] | null | null | null | polyaxon/db/admin/job_resources.py | elyase/polyaxon | 1c19f059a010a6889e2b7ea340715b2bcfa382a0 | [
"MIT"
] | null | null | null | polyaxon/db/admin/job_resources.py | elyase/polyaxon | 1c19f059a010a6889e2b7ea340715b2bcfa382a0 | [
"MIT"
] | null | null | null | from django.contrib import admin
from db.models.job_resources import JobResources
admin.site.register(JobResources)
| 19.666667 | 48 | 0.847458 |
8a7f7c81cefa2649d2218e763e7fb484932406a9 | 8,498 | py | Python | voting_ml/main.py | tommy-waltmann/voting-ml | 327de4515d8f2f7b8e072833df20eca651621ea6 | [
"BSD-3-Clause"
] | null | null | null | voting_ml/main.py | tommy-waltmann/voting-ml | 327de4515d8f2f7b8e072833df20eca651621ea6 | [
"BSD-3-Clause"
] | 2 | 2021-04-20T19:04:36.000Z | 2021-04-24T22:33:47.000Z | voting_ml/main.py | tommy-waltmann/voting-ml | 327de4515d8f2f7b8e072833df20eca651621ea6 | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import sklearn
import subprocess
from sklearn import model_selection, tree
import data
import feature_selection
import model_sel
import os
import matplotlib.pyplot as plt
import seaborn as sns
def separate_weights(X_train, column_names):
"""
Removes the column containing weights from X_train, and returns it as
a separate array.
"""
weight_column_idx = column_names.index('weight')
weights = X_train[:, weight_column_idx]
new_X_train = np.delete(X_train, weight_column_idx, axis=1)
new_questions = column_names
new_questions.remove('weight')
return new_X_train, weights, new_questions
if __name__ == "__main__":
main()
| 47.741573 | 222 | 0.564603 |
8a7f9273d28271b0f56005e762e91504d2293322 | 12,334 | py | Python | src/the_tale/the_tale/game/heroes/tests/test_logic.py | al-arz/the-tale | 542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5 | [
"BSD-3-Clause"
] | null | null | null | src/the_tale/the_tale/game/heroes/tests/test_logic.py | al-arz/the-tale | 542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5 | [
"BSD-3-Clause"
] | null | null | null | src/the_tale/the_tale/game/heroes/tests/test_logic.py | al-arz/the-tale | 542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5 | [
"BSD-3-Clause"
] | null | null | null |
import smart_imports
smart_imports.all()
| 44.688406 | 137 | 0.652749 |
8a7fb03f3abaa9ff95210abc3bc840c8008d9076 | 41 | py | Python | tinylinks/tests/test_app/models.py | brad/django-tinylinks | b3ae58ebe0d0292b7f618e9b0f1a08d2fb61b173 | [
"MIT"
] | 11 | 2016-11-27T15:46:42.000Z | 2021-07-31T14:03:54.000Z | tinylinks/tests/test_app/models.py | brad/django-tinylinks | b3ae58ebe0d0292b7f618e9b0f1a08d2fb61b173 | [
"MIT"
] | 2 | 2016-12-27T19:53:59.000Z | 2017-05-26T07:12:02.000Z | tinylinks/tests/test_app/models.py | brad/django-tinylinks | b3ae58ebe0d0292b7f618e9b0f1a08d2fb61b173 | [
"MIT"
] | 5 | 2015-02-01T01:10:31.000Z | 2015-10-29T18:48:59.000Z | """Dummy model needed for tests."""
pass
| 13.666667 | 35 | 0.682927 |
8a7fb88f2b8f8ab7d00332f23a58d29ccc1392ee | 1,346 | py | Python | postcipes/hydraulic_jump.py | timofeymukha/postcipes | f37b349038e26bb0295a2511295a46ef63fcd851 | [
"MIT"
] | null | null | null | postcipes/hydraulic_jump.py | timofeymukha/postcipes | f37b349038e26bb0295a2511295a46ef63fcd851 | [
"MIT"
] | null | null | null | postcipes/hydraulic_jump.py | timofeymukha/postcipes | f37b349038e26bb0295a2511295a46ef63fcd851 | [
"MIT"
] | 1 | 2019-03-20T22:39:55.000Z | 2019-03-20T22:39:55.000Z | # This file is part of postcipes
# (c) Timofey Mukha
# The code is released under the MIT Licence.
# See LICENCE.txt and the Legal section in the README for more information
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .postcipe import Postcipe
import turbulucid as tbl
from scipy.interpolate import interp1d
import numpy as np
import h5py
__all__ = ["HydraulicJump"]
| 33.65 | 81 | 0.653046 |
8a80483513e593a3c49ee46795ac3b8d601f6b9a | 416 | py | Python | main/SimulationSettings/ScreenshotsSteppable/Simulation/screenshots_steppables.py | JulianoGianlupi/nh-cc3d-4x-base-tool | c0f4aceebd4c5bf3ec39e831ef851e419b161259 | [
"CC0-1.0"
] | null | null | null | main/SimulationSettings/ScreenshotsSteppable/Simulation/screenshots_steppables.py | JulianoGianlupi/nh-cc3d-4x-base-tool | c0f4aceebd4c5bf3ec39e831ef851e419b161259 | [
"CC0-1.0"
] | null | null | null | main/SimulationSettings/ScreenshotsSteppable/Simulation/screenshots_steppables.py | JulianoGianlupi/nh-cc3d-4x-base-tool | c0f4aceebd4c5bf3ec39e831ef851e419b161259 | [
"CC0-1.0"
] | 1 | 2021-02-26T21:50:29.000Z | 2021-02-26T21:50:29.000Z | from cc3d.core.PySteppables import *
from cc3d import CompuCellSetup
from random import random
| 27.733333 | 93 | 0.6875 |
8a80b1c774bd44450fbb371648857468404e7e42 | 3,350 | py | Python | aesara/gpuarray/optdb.py | anirudhacharya/aesara | cbf91122296b68ee2ad592b2312d56f6ff65ba53 | [
"BSD-3-Clause"
] | 1 | 2021-11-09T10:19:46.000Z | 2021-11-09T10:19:46.000Z | aesara/gpuarray/optdb.py | anirudhacharya/aesara | cbf91122296b68ee2ad592b2312d56f6ff65ba53 | [
"BSD-3-Clause"
] | null | null | null | aesara/gpuarray/optdb.py | anirudhacharya/aesara | cbf91122296b68ee2ad592b2312d56f6ff65ba53 | [
"BSD-3-Clause"
] | null | null | null | from aesara.compile import optdb
from aesara.graph.opt import GraphToGPULocalOptGroup, TopoOptimizer, local_optimizer
from aesara.graph.optdb import (
EquilibriumDB,
LocalGroupDB,
OptimizationDatabase,
SequenceDB,
)
gpu_optimizer = EquilibriumDB()
gpu_cut_copies = EquilibriumDB()
# Not used for an EquilibriumOptimizer. It has the "tracks" that we need for GraphToGPUDB.
gpu_optimizer2 = EquilibriumDB()
gpu_seqopt = SequenceDB()
# do not add 'fast_run' to these two as this would always enable gpuarray mode
optdb.register(
"gpuarray_opt",
gpu_seqopt,
optdb.__position__.get("add_destroy_handler", 49.5) - 1,
"gpuarray",
)
pool_db = LocalGroupDB()
pool_db2 = LocalGroupDB(local_opt=GraphToGPULocalOptGroup)
pool_db2.__name__ = "pool_db2"
matrix_ops_db = LocalGroupDB()
matrix_ops_db2 = LocalGroupDB(local_opt=GraphToGPULocalOptGroup)
matrix_ops_db2.__name__ = "matrix_ops_db2"
abstract_batch_norm_db = LocalGroupDB()
abstract_batch_norm_db2 = LocalGroupDB(local_opt=GraphToGPULocalOptGroup)
abstract_batch_norm_db2.__name__ = "abstract_batch_norm_db2"
abstract_batch_norm_groupopt = LocalGroupDB()
abstract_batch_norm_groupopt.__name__ = "gpuarray_batchnorm_opts"
def register_opt2(tracks, *tags, **kwargs):
"""
Decorator for the new GraphToGPU optimizer.
Takes an extra parameter(Op) compared to register_opt decorator.
Parameters
----------
tracks : List of Op class Or Op instance or None
The Node's Op to which optimization is being applied.
tags : String
The optimization tag to which the optimizer will be registered.
"""
return f
# Register GPU convolution implementation
# They are tried in a specific order so we can control
# which ones take precedence over others.
abstractconv_groupopt = LocalGroupDB()
abstractconv_groupopt.__name__ = "gpuarray_abstractconv_opts"
register_opt("fast_compile")(abstractconv_groupopt)
| 28.632479 | 90 | 0.711343 |
8a810acd6b334888a1432a3e590727946894d380 | 4,579 | py | Python | jenkinsapi/node.py | imsardine/jenkinsapi | d4bfac62a4d01394ff41540c4d8d897ab566f4eb | [
"MIT"
] | null | null | null | jenkinsapi/node.py | imsardine/jenkinsapi | d4bfac62a4d01394ff41540c4d8d897ab566f4eb | [
"MIT"
] | null | null | null | jenkinsapi/node.py | imsardine/jenkinsapi | d4bfac62a4d01394ff41540c4d8d897ab566f4eb | [
"MIT"
] | null | null | null | """
Module for jenkinsapi Node class
"""
from jenkinsapi.jenkinsbase import JenkinsBase
from jenkinsapi.custom_exceptions import PostRequired
import logging
try:
from urllib import quote as urlquote
except ImportError:
# Python3
from urllib.parse import quote as urlquote
log = logging.getLogger(__name__)
| 37.227642 | 79 | 0.580913 |
8a82d93e4ba8abbe55f44853090dbccbc8c6e819 | 48,277 | py | Python | edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GenericHazards.py | srcarter3/awips2 | 37f31f5e88516b9fd576eaa49d43bfb762e1d174 | [
"Apache-2.0"
] | null | null | null | edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GenericHazards.py | srcarter3/awips2 | 37f31f5e88516b9fd576eaa49d43bfb762e1d174 | [
"Apache-2.0"
] | null | null | null | edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GenericHazards.py | srcarter3/awips2 | 37f31f5e88516b9fd576eaa49d43bfb762e1d174 | [
"Apache-2.0"
] | 1 | 2021-10-30T00:03:05.000Z | 2021-10-30T00:03:05.000Z | ##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# ----------------------------------------------------------------------------
#
# SOFTWARE HISTORY
#
# Date Ticket# Engineer Description
# ------------ ---------- ----------- --------------------------
# 05/07/2015 4027 randerso Migrated A1 OB9.16 code to A2
# 06/17/2015 4027 dgilling Perform case-insensitive
# comparisons in foundCTAs.
# 07/13/2015 4648 randerso Fix bullets in follow up products
# 02/24/2016 5411 randerso Make bullet headers upper case
# 07/15/2016 5749 randerso Replaced ellipses with commas in hazardBodyText
#
##
# This is a base file that is not intended to be overridden.
##
#-------------------------------------------------------------------------
# Description: This product is a template for creating Hazard Products.
#-------------------------------------------------------------------------
# Copying:
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#-------------------------------------------------------------------------
# Standard and Local file names and Locations:
# GenericHazards
#-------------------------------------------------------------------------
# Customization Points:
#
# DEFINITION SECTION
#
# Required Configuration Items:
#
# displayName If not None, defines how product appears in GFE GUI
#
# You must set the following:
#
# productName defines name of product e.g. "Zone Forecast Product"
# fullStationID Full station identifier, 4 letter, such as "KSLC".
# wmoID WMO ID code for product header, such as "FOUS45"
# pil Product pil, such as "SFTBOS"
# areaName (opt.) Area name for product header, such as "Western New York"
# wfoCityState City,state that the WFO is located in, such as "Buffalo NY"
#
# Optional Configuration Items
#
# mapNameForCombinations Name of the map background that is used for
# creating/editing the combinations file. This must
# be defined or the GFE zone combiner
# database Source database for product. Can be "Official",
# "Fcst" or "ISC"
# outputFile Defines the output location of the finished product.
# Product is saved if autoWrite is 1.
# debug If on, debug_print statements will appear.
# textdbPil Defines the awips product identifier
# (e.g., DENCCFDEN) that is used to store the product
# in the AWIPS text database. The product is not
# automatically stored unless autoStore is 1. This
# value is also used for the default GUI entry for
# storage.
# awipsWANPil Defines the awips product identifier
# (e.g., KBOUCCFDEN) that is used to transmit the
# product to the AWIPS WAN. The product is not
# automatically transmitted unless autoSend is 1.
# This value is also used for the default GUI
# entry for storage.
# autoSend If set to 1, then the product will be automatically
# sent on the AWIPS WAN to the "autoSendAddress" with
# the "awipsWANPil after product creation.
# autoStore If set to 1, then the product will be automatically
# stored into the text database using the "textdbPil"
# after product creation.
# autoWrite If set to 1, then the product will be automatically
# written to the "output" named disk file after
# product creation.
#
# lineLength max length of each line
#
# defaultEditAreas defines edit areas, default is Combinations
#
# purgeTime Maximum number of hours past issuance time for the
# expire time.
# includeCities If 1, cities will be included in the area header
# accurateCities If 1, cities are determined from grids
# citiesPhrase "Including the cities of" phrase used when including
# cities
# includeZoneNames If 1, zone names will be included in the area header
# easPhrase Optional EAS phrase to be include in product header
#
# hazardSamplingThreshold Defines the percentage coverage or number of
# grid points in a zone that must contain the hazard
# in order for it to be considered. Tuple (percent, points)
# includeOverviewHeadline If 1, the overview header is templated
# includeOverview If 1, the overview section is templated
# bulletProd If 1, the product will use a bullet format
#-------------------------------------------------------------------------
# Weather Elements Needed:
# Hazards
#-------------------------------------------------------------------------
# Edit Areas Needed: None
#-------------------------------------------------------------------------
# Associated Utilities Files e.g. Combinations file:
# Combinations file
#-------------------------------------------------------------------------
# Component Products:
# Hazards
#-------------------------------------------------------------------------
# Development tasks that are identified and in progress:
#
# To look up tasks and their status, see the Text Product User Guide
# Section on "Tkgnats: Task Reporting System".
#-------------------------------------------------------------------------
# Additional Information:
#-------------------------------------------------------------------------
# Example Output:
#-------------------------------------------------------------------------
import LogStream
import TextRules
import SampleAnalysis
import time, string, types, copy, re
import CallToActions
import AbsTime
| 41.29769 | 151 | 0.53106 |
8a8396f2f3ab51a489f606b57146366f183507ea | 14,346 | py | Python | virtualscreening/vina/spark/buried_areas.py | rodrigofaccioli/drugdesign | de15880af361a010729b1f4fbc8a75a2b36688a6 | [
"Apache-2.0"
] | 3 | 2015-01-19T20:12:59.000Z | 2019-02-21T18:43:04.000Z | virtualscreening/vina/spark/buried_areas.py | rodrigofaccioli/drugdesign | de15880af361a010729b1f4fbc8a75a2b36688a6 | [
"Apache-2.0"
] | 22 | 2015-01-05T16:48:54.000Z | 2017-01-21T16:36:10.000Z | virtualscreening/vina/spark/buried_areas.py | rodrigofaccioli/drugdesign | de15880af361a010729b1f4fbc8a75a2b36688a6 | [
"Apache-2.0"
] | 11 | 2015-03-03T13:32:24.000Z | 2020-04-03T11:22:24.000Z | from pyspark import SparkContext, SparkConf, SparkFiles
from pyspark.sql import SQLContext, Row
import ConfigParser as configparser
from subprocess import Popen, PIPE
from datetime import datetime
from vina_utils import get_directory_complex_pdb_analysis, get_files_pdb, get_name_model_pdb, get_ligand_from_receptor_ligand_model, get_separator_filename_mode, get_directory_pdb_analysis, loading_pdb_2_list, get_name_receptor_pdb, get_files_pdb_filter
import os, sys
from os_utils import preparing_path
from gromacs_utils import get_value_from_xvg_sasa
from pdb_io import replace_chain_atom_line
from database_io import load_database
main()
| 43.87156 | 253 | 0.748292 |
8a84ca10fd051b6b0bb8be0088246cc71958f9d5 | 12,062 | py | Python | oase-root/web_app/views/system/mail/action_mail.py | Masa-Yasuno/oase | 90f3cee73c0d9b3153808a4a72bd19984a4873f9 | [
"Apache-2.0"
] | 9 | 2020-03-25T07:51:47.000Z | 2022-02-07T00:07:28.000Z | oase-root/web_app/views/system/mail/action_mail.py | Masa-Yasuno/oase | 90f3cee73c0d9b3153808a4a72bd19984a4873f9 | [
"Apache-2.0"
] | 1,164 | 2021-01-28T23:16:11.000Z | 2022-03-28T07:23:10.000Z | oase-root/web_app/views/system/mail/action_mail.py | Masa-Yasuno/oase | 90f3cee73c0d9b3153808a4a72bd19984a4873f9 | [
"Apache-2.0"
] | 25 | 2020-03-17T06:48:30.000Z | 2022-02-15T15:13:44.000Z | # Copyright 2019 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
[]
MAIL
"""
import pytz
import datetime
import json
import socket
import traceback
from django.http import HttpResponse
from django.http import HttpResponseServerError
from django.db import transaction
from django.conf import settings
from libs.commonlibs import define as defs
from libs.commonlibs.oase_logger import OaseLogger
from libs.commonlibs.aes_cipher import AESCipher
from web_app.models.models import ActionType
from web_app.models.mail_models import MailDriver
from web_app.templatetags.common import get_message
from web_app.serializers.unicode_check import UnicodeCheck
logger = OaseLogger.get_instance() #
def modify(self, json_str, request):
"""
[]
DB
"""
logger.logic_log('LOSI00001', 'None', request=request)
error_flag = False
error_msg = {
'mail_disp_name' : '',
'protocol' : '',
'smtp_server' : '',
'port' : '',
'user' : '',
'password' : '',
}
now = datetime.datetime.now(pytz.timezone('UTC'))
emo_chk = UnicodeCheck()
#
response = {"status": "success",}
try:
rq = json_str['json_str']
ope = int(rq['ope'])
#
if ope != defs.DABASE_OPECODE.OPE_DELETE:
error_flag = self._validate(rq, error_msg, request)
if error_flag:
raise UserWarning('validation error.')
#
cipher = AESCipher(settings.AES_KEY)
if ope == defs.DABASE_OPECODE.OPE_UPDATE:
encrypted_password = cipher.encrypt(rq['password']) if rq['password'] else ''
driver_info_mod = MailDriver.objects.get(mail_driver_id=rq['mail_driver_id'])
driver_info_mod.mail_disp_name = rq['mail_disp_name']
driver_info_mod.protocol = rq['protocol']
driver_info_mod.smtp_server = rq['smtp_server']
driver_info_mod.port = rq['port']
driver_info_mod.user = rq['user']
driver_info_mod.password = encrypted_password
driver_info_mod.last_update_user = request.user.user_name
driver_info_mod.last_update_timestamp = now
driver_info_mod.save(force_update=True)
elif ope == defs.DABASE_OPECODE.OPE_DELETE:
MailDriver.objects.filter(pk=rq['mail_driver_id']).delete()
elif ope == defs.DABASE_OPECODE.OPE_INSERT:
encrypted_password = cipher.encrypt(rq['password']) if rq['password'] else ''
driver_info_reg = MailDriver(
mail_disp_name = rq['mail_disp_name'],
protocol = rq['protocol'],
smtp_server = rq['smtp_server'],
port = rq['port'],
user = rq['user'],
password = encrypted_password,
last_update_user = request.user.user_name,
last_update_timestamp = now
).save(force_insert=True)
except MailDriver.DoesNotExist:
logger.logic_log('LOSM07006', "mail_driver_id", mail_driver_id, request=request)
except Exception as e:
logger.logic_log('LOSI00005', traceback.format_exc(), request=request)
response = {
'status': 'failure',
'error_msg': error_msg, # ()
}
logger.logic_log('LOSI00002', 'response=%s' % response, request=request)
return response
def _validate(self, rq, error_msg, request):
"""
[]
[]
rq: dict
error_msg: dict
[]
"""
logger.logic_log('LOSI00001', 'data: %s, error_msg:%s'%(rq, error_msg))
error_flag = False
emo_chk = UnicodeCheck()
emo_flag = False
emo_flag_ita_disp_name = False
emo_flag_hostname = False
if len(rq['mail_disp_name']) == 0:
error_flag = True
error_msg['mail_disp_name'] += get_message('MOSJA27201', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07001', 'mail_disp_name', request=request)
if len(rq['mail_disp_name']) > 64:
error_flag = True
error_msg['mail_disp_name'] += get_message('MOSJA27202', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07002', 'mail_disp_name', 64, rq['mail_disp_name'], request=request)
#
value_list = emo_chk.is_emotion(rq['mail_disp_name'])
if len(value_list) > 0:
error_flag = True
emo_flag = True
error_msg['mail_disp_name'] += get_message('MOSJA27216', request.user.get_lang_mode(), showMsgId=False) + '\n'
if len(rq['protocol']) == 0:
error_flag = True
error_msg['protocol'] += get_message('MOSJA27212', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07001', 'protocol', request=request)
if len(rq['protocol']) > 64:
error_flag = True
error_msg['protocol'] += get_message('MOSJA27213', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07002', 'protocol', 64, rq['protocol'], request=request)
if len(rq['smtp_server']) == 0:
error_flag = True
error_msg['smtp_server'] += get_message('MOSJA27203', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07001', 'smtp_server', request=request)
if len(rq['smtp_server']) > 128:
error_flag = True
error_msg['smtp_server'] += get_message('MOSJA27204', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07002', 'smtp_server', 64, rq['smtp_server'], request=request)
#
value_list = emo_chk.is_emotion(rq['smtp_server'])
if len(value_list) > 0:
error_flag = True
error_msg['smtp_server'] += get_message('MOSJA27217', request.user.get_lang_mode(), showMsgId=False) + '\n'
if len(rq['port']) == 0:
error_flag = True
error_msg['port'] += get_message('MOSJA27205', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07001', 'port', request=request)
try:
tmp_port = int(rq['port'])
if 0 > tmp_port or tmp_port > 65535:
error_flag = True
error_msg['port'] += get_message('MOSJA27206', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07003', 'port', rq['port'], request=request)
except ValueError:
error_flag = True
error_msg['port'] += get_message('MOSJA27206', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07003', 'port', rq['port'], request=request)
if len(rq['user']) > 64:
error_flag = True
error_msg['user'] += get_message('MOSJA27207', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07002', 'user', 64, rq['user'], request=request)
#
value_list = emo_chk.is_emotion(rq['user'])
if len(value_list) > 0:
error_flag = True
error_msg['user'] += get_message('MOSJA27218', request.user.get_lang_mode(), showMsgId=False) + '\n'
if len(rq['password']) > 64:
error_flag = True
error_msg['password'] += get_message('MOSJA27208', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07002', 'password', 64, rq['password'], request=request)
#
value_list = emo_chk.is_emotion(rq['password'])
if len(value_list) > 0:
error_flag = True
error_msg['password'] += get_message('MOSJA27219', request.user.get_lang_mode(), showMsgId=False) + '\n'
if not emo_flag:
duplication = MailDriver.objects.filter(mail_disp_name=rq['mail_disp_name'])
if len(duplication) == 1 and int(rq['mail_driver_id']) != duplication[0].mail_driver_id:
error_flag = True
error_msg['mail_disp_name'] += get_message('MOSJA27209', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07004', 'mail_disp_name', rq['mail_disp_name'], request=request)
if error_flag == False:
#
resp_code = -1
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
resp_code = sock.connect_ex((rq['smtp_server'], int(rq['port']))) # host/etc/hosts
sock.close()
except Exception as e:
pass
if resp_code != 0:
error_flag = True
#todo
error_msg['mail_disp_name'] += get_message('MOSJA27215', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07005', rq['smtp_server'], rq['port'], request=request)
return error_flag
| 35.372434 | 122 | 0.596419 |
8a85a524c6381c0f4e277dd284d072a8b41daaac | 3,427 | py | Python | queue/animal_queue.py | cozek/code-practice | bf3098dbeb502cab2e22ce7ea73c2aa05a3caf80 | [
"MIT"
] | null | null | null | queue/animal_queue.py | cozek/code-practice | bf3098dbeb502cab2e22ce7ea73c2aa05a3caf80 | [
"MIT"
] | null | null | null | queue/animal_queue.py | cozek/code-practice | bf3098dbeb502cab2e22ce7ea73c2aa05a3caf80 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from typing import Any, Union
def main():
q = AnimalQueue()
dogs = [Dog("d1"), Dog("d2"), Dog("d3")]
cats = [Cat("c1"), Cat("c2"), Cat("c3")]
both = []
while cats != []:
both.append(cats.pop())
both.append(dogs.pop())
[q.enqueue(animal) for animal in both]
string = ""
for anim in both:
string += f"{anim.name} {anim.order} | "
print(string)
# print(q.print_cats())
get = q.dequeDog()
print(get.order,get.name)
get = q.dequeAny()
print(get.order,get.name)
if __name__ == "__main__":
main()
| 24.133803 | 79 | 0.541873 |
8a85f7a1837485544e723eea52a8cc5f16480c6c | 6,816 | py | Python | ophyd/areadetector/detectors.py | NSLS-II/ophyd | d5fc722eef4d3d83845b1d523004302ec3aadb78 | [
"BSD-3-Clause"
] | 16 | 2015-05-20T20:48:25.000Z | 2019-04-24T21:12:59.000Z | ophyd/areadetector/detectors.py | NSLS-II/ophyd | d5fc722eef4d3d83845b1d523004302ec3aadb78 | [
"BSD-3-Clause"
] | 594 | 2015-01-05T21:55:21.000Z | 2019-05-10T02:05:24.000Z | ophyd/areadetector/detectors.py | NSLS-II/ophyd | d5fc722eef4d3d83845b1d523004302ec3aadb78 | [
"BSD-3-Clause"
] | 34 | 2015-01-23T19:50:58.000Z | 2019-05-07T05:38:57.000Z | # vi: ts=4 sw=4
'''AreaDetector Devices
`areaDetector`_ detector abstractions
.. _areaDetector: https://areadetector.github.io/master/index.html
'''
import warnings
from .base import (ADBase, ADComponent as C)
from . import cam
__all__ = ['DetectorBase',
'AreaDetector',
'AdscDetector',
'Andor3Detector',
'AndorDetector',
'BrukerDetector',
'DexelaDetector',
'EmergentVisionDetector',
'EigerDetector',
'FirewireLinDetector',
'FirewireWinDetector',
'GreatEyesDetector',
'LightFieldDetector',
'Mar345Detector',
'MarCCDDetector',
'PSLDetector',
'PerkinElmerDetector',
'PICamDetector',
'PilatusDetector',
'PixiradDetector',
'PointGreyDetector',
'ProsilicaDetector',
'PvcamDetector',
'RoperDetector',
'SimDetector',
'URLDetector',
'UVCDetector',
'Xspress3Detector'
]
| 27.264 | 81 | 0.639965 |
8a86ee599744eb8c45294e523b8309fa29706768 | 261 | py | Python | python/EXERCICIO 96 - FUNCAO QUE CALCULA A AREA.py | debor4h/exerciciosPython | a18d88c6e98bc49005bfcb8badeb712007c16d69 | [
"MIT"
] | 1 | 2022-03-15T02:25:17.000Z | 2022-03-15T02:25:17.000Z | python/EXERCICIO 96 - FUNCAO QUE CALCULA A AREA.py | debor4h/exerciciosPython | a18d88c6e98bc49005bfcb8badeb712007c16d69 | [
"MIT"
] | null | null | null | python/EXERCICIO 96 - FUNCAO QUE CALCULA A AREA.py | debor4h/exerciciosPython | a18d88c6e98bc49005bfcb8badeb712007c16d69 | [
"MIT"
] | null | null | null |
print('Controle de Terrenos')
print('-' * 20)
l = float(input('Largura (m): '))
c = float(input('Comprimento (m): '))
area(f'A rea do seu terreno {l}X{c} de {l*c}m.')
| 26.1 | 55 | 0.64751 |
8a8756b0429224a6d5fdf07d18eb3a9eed2f7a05 | 2,373 | py | Python | auth_iam/dashboard/auth/routes.py | santiher/dash-auth-example | 9854bfe953f86a0c7ed97660da30b7b7d1d3069f | [
"MIT"
] | 11 | 2020-03-05T18:50:07.000Z | 2022-02-16T19:45:35.000Z | auth_iam/dashboard/auth/routes.py | santiher/dash-auth-example | 9854bfe953f86a0c7ed97660da30b7b7d1d3069f | [
"MIT"
] | null | null | null | auth_iam/dashboard/auth/routes.py | santiher/dash-auth-example | 9854bfe953f86a0c7ed97660da30b7b7d1d3069f | [
"MIT"
] | null | null | null | import os
from functools import wraps
from os.path import join as join_path
from dash import Dash
from flask import make_response, render_template_string, redirect
excluded_resources_endpoints = (
'static', '_dash_assets.static', '/_favicon.ico', '/login', '/logout',
'/_user', '/auth')
def add_routes(app, authorizer):
"""Adds authentication endpoints to a flask app.
Decorates other endpoints to grant access.
The endpoints are:
* /login
* Method: GET
* /logout
* Method: GET
* Erases cookies
* /auth
* Method: GET
* Validates cookies if present or header authentication
* Header:
'Authorization: DASHBOARD-AUTH username=([^/]*)/password=([^/]*)'
* Sets cookies on login
* Rejects unauthorized users
Parameters
----------
app: flask.Flask or dash.Dash
The flask or dash application
excluded_resources_endpoints: tuple(str)
Tuple with endpoints where access must not be checked.
"""
if isinstance(app, Dash):
app = app.server
login_template = load_template('login.html')
app.add_url_rule('/auth', '/auth', auth)
app.add_url_rule('/login', '/login', login)
app.add_url_rule('/logout', '/logout', logout)
for endpoint, function in app.view_functions.items():
if endpoint not in excluded_resources_endpoints:
app.view_functions[endpoint] = authorize_endpoint(function)
def load_template(filename):
"""Loads the login html template."""
pyfile_path = os.path.dirname(os.path.abspath(__file__))
path = join_path(pyfile_path, 'templates', filename)
with open(path, 'r') as f:
return f.read().strip()
| 29.296296 | 77 | 0.634218 |
8a8789db154d951e04619fad043530fa0eb6fd39 | 935 | py | Python | amazon/model_api/migrations/0005_remove_order_datetimecreated_alter_order__id_and_more.py | gabrielkarras/SOEN341 | da7241abd894bda4d5f7465b3de70e51afacf3f5 | [
"MIT"
] | 3 | 2022-01-16T19:12:37.000Z | 2022-01-25T18:50:15.000Z | amazon/model_api/migrations/0005_remove_order_datetimecreated_alter_order__id_and_more.py | gabrielkarras/SOEN341 | da7241abd894bda4d5f7465b3de70e51afacf3f5 | [
"MIT"
] | 83 | 2022-01-16T18:57:50.000Z | 2022-03-30T00:44:35.000Z | amazon/model_api/migrations/0005_remove_order_datetimecreated_alter_order__id_and_more.py | gabrielkarras/SOEN341 | da7241abd894bda4d5f7465b3de70e51afacf3f5 | [
"MIT"
] | null | null | null | # Generated by Django 4.0.1 on 2022-04-07 01:20
from django.db import migrations, models
| 28.333333 | 86 | 0.594652 |
8a88b11de563042688caafdaffa71f1207edee67 | 8,082 | py | Python | items/migrations/0001_initial.py | tony-joseph/livre | 3a6a851ed58029d5d14edde647b15ed22d65f24b | [
"BSD-3-Clause"
] | 1 | 2020-05-06T16:59:47.000Z | 2020-05-06T16:59:47.000Z | items/migrations/0001_initial.py | tony-joseph/livre | 3a6a851ed58029d5d14edde647b15ed22d65f24b | [
"BSD-3-Clause"
] | null | null | null | items/migrations/0001_initial.py | tony-joseph/livre | 3a6a851ed58029d5d14edde647b15ed22d65f24b | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-21 12:22
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
| 56.915493 | 191 | 0.614081 |
8a894222f80aae1db1ccdaaadeb6288f55d6b62f | 267 | py | Python | compliance_suite/exceptions/user_config_exception.py | alextsaihi/rnaget-compliance-suite | a3accae431b9e4f7791dfa5ae867e70da2dd6278 | [
"Apache-2.0"
] | 1 | 2019-09-18T14:38:55.000Z | 2019-09-18T14:38:55.000Z | compliance_suite/exceptions/user_config_exception.py | alextsaihi/rnaget-compliance-suite | a3accae431b9e4f7791dfa5ae867e70da2dd6278 | [
"Apache-2.0"
] | 14 | 2019-05-24T18:55:23.000Z | 2022-02-25T16:56:28.000Z | compliance_suite/exceptions/user_config_exception.py | alextsaihi/rnaget-compliance-suite | a3accae431b9e4f7791dfa5ae867e70da2dd6278 | [
"Apache-2.0"
] | 8 | 2019-04-08T14:48:35.000Z | 2022-02-04T16:59:59.000Z | # -*- coding: utf-8 -*-
"""Module compliance_suite.exceptions.user_config_exception.py
This module contains class definition for user config file exceptions.
""" | 26.7 | 70 | 0.752809 |
8a8988f59a7e29aadd9cfcc08e9db137ae34f210 | 3,677 | py | Python | 2021/day15/aoc-2021-d15.py | bbornstein/aoc | 624dacfe591a46aa34e3071b894076cf60091e7d | [
"MIT"
] | null | null | null | 2021/day15/aoc-2021-d15.py | bbornstein/aoc | 624dacfe591a46aa34e3071b894076cf60091e7d | [
"MIT"
] | null | null | null | 2021/day15/aoc-2021-d15.py | bbornstein/aoc | 624dacfe591a46aa34e3071b894076cf60091e7d | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Advent of Code 2021, Day 15 (https://adventofcode.com/2021/day/15)
# Author: Ben Bornstein
import collections
import heapq
Point = collections.namedtuple('Point', ['x', 'y'])
Point.__add__ = lambda self, q: Point(self[0] + q[0], self[1] + q[1])
def resize (self, factor):
"""Resizes this `RiskMap` by setting its expansion factor to `factor`
copies both horizontally and vertically.
"""
self._factor = factor
def valid (self, pos):
"""Indicates whether or not `pos` is valid (inside this `RiskMap`)."""
return pos.y in range(0, self.nrows) and pos.x in range(0, self.ncols)
def search (rmap, start, end):
"""Searches `RiskMap` `rmap` (breadth-first) to find the least risky
path from `start` to `end`. Returns the total risk of that path.
"""
risk = 0
queue = [ (rmap[p], p) for p in rmap.neighbors(start) ]
visited = { start }
heapq.heapify(queue)
while len(queue) > 0:
risk, current = heapq.heappop(queue)
if current == end:
break
for pos in rmap.neighbors(current):
if pos not in visited:
heapq.heappush( queue, ((rmap[pos] + risk), pos) )
visited.add(pos)
return risk
filename = 'aoc-2021-d15.txt'
rmap = RiskMap.load(filename)
start = Point(0, 0)
end = Point(rmap.ncols - 1, rmap.nrows - 1)
# Part 1
#
# Q: Lowest total risk of any path from the top left to the bottom right?
# A: Total Risk = 755
print(f'Part 1: Total Risk = {search(rmap, start, end):4}')
# Part 2
#
# Q: Lowest total risk of any path from the top left to the bottom right?
# A: Total Risk = 3016
rmap.resize(factor=5)
end = Point(rmap.ncols - 1, rmap.nrows - 1)
print(f'Part 2: Total Risk = {search(rmap, start, end)}')
| 25.184932 | 78 | 0.56731 |
8a89fcb6aa9605bd61ebc69c816df71f6eb1ab81 | 673 | py | Python | indico/modules/events/abstracts/compat.py | aiforrural/Digital-Events-Example | 628aaa8727b259b9367ac0ae1c5ba8e9e95eca82 | [
"MIT"
] | 1 | 2021-02-08T09:34:27.000Z | 2021-02-08T09:34:27.000Z | indico/modules/events/abstracts/compat.py | pamirk/indico | c3b4e06b11cc21ad497f74d0b2ca901bc1b2a768 | [
"MIT"
] | null | null | null | indico/modules/events/abstracts/compat.py | pamirk/indico | c3b4e06b11cc21ad497f74d0b2ca901bc1b2a768 | [
"MIT"
] | null | null | null | # This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask import redirect
from indico.modules.events.abstracts.models.abstracts import Abstract
from indico.web.flask.util import url_for
from indico.web.rh import RHSimple
| 35.421053 | 86 | 0.786033 |
8a8a2f0c0a2dfbb11e77c498d88fd4e6f73817b2 | 2,168 | py | Python | src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/database_account_list_keys_result_py3.py | limingu/azure-cli-extensions | 1bc29f089f4da42ab8905e440f2f46d6b5b0aa97 | [
"MIT"
] | 2 | 2021-06-05T17:51:26.000Z | 2021-11-17T11:17:56.000Z | src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/database_account_list_keys_result_py3.py | limingu/azure-cli-extensions | 1bc29f089f4da42ab8905e440f2f46d6b5b0aa97 | [
"MIT"
] | 1 | 2020-06-12T01:39:40.000Z | 2020-06-12T01:39:40.000Z | src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/models/database_account_list_keys_result_py3.py | anpaz-msft/azure-cli-extensions | 847fd487fe61e83f2a4163a9393edc9555267bc2 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .database_account_list_read_only_keys_result_py3 import DatabaseAccountListReadOnlyKeysResult
| 40.90566 | 98 | 0.66928 |
8a8aa73cf4c767bf7b906925d1382b404b94f301 | 1,834 | py | Python | Google/google_books/scrape_google_books.py | dimitryzub/blog-posts-archive | 0978aaa0c9f0142d6f996b81ce391930c5e3be35 | [
"CC0-1.0"
] | null | null | null | Google/google_books/scrape_google_books.py | dimitryzub/blog-posts-archive | 0978aaa0c9f0142d6f996b81ce391930c5e3be35 | [
"CC0-1.0"
] | null | null | null | Google/google_books/scrape_google_books.py | dimitryzub/blog-posts-archive | 0978aaa0c9f0142d6f996b81ce391930c5e3be35 | [
"CC0-1.0"
] | null | null | null | from parsel import Selector
import requests, json, re
params = {
"q": "richard branson",
"tbm": "bks",
"gl": "us",
"hl": "en"
}
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.87 Safari/537.36",
}
html = requests.get("https://www.google.com/search", params=params, headers=headers, timeout=30)
selector = Selector(text=html.text)
books_results = []
# https://regex101.com/r/mapBs4/1
book_thumbnails = re.findall(r"s=\\'data:image/jpg;base64,(.*?)\\'", str(selector.css("script").getall()), re.DOTALL)
for book_thumbnail, book_result in zip(book_thumbnails, selector.css(".Yr5TG")):
title = book_result.css(".DKV0Md::text").get()
link = book_result.css(".bHexk a::attr(href)").get()
displayed_link = book_result.css(".tjvcx::text").get()
snippet = book_result.css(".cmlJmd span::text").get()
author = book_result.css(".fl span::text").get()
author_link = f'https://www.google.com/search{book_result.css(".N96wpd .fl::attr(href)").get()}'
date_published = book_result.css(".fl+ span::text").get()
preview_link = book_result.css(".R1n8Q a.yKioRe:nth-child(1)::attr(href)").get()
more_editions_link = book_result.css(".R1n8Q a.yKioRe:nth-child(2)::attr(href)").get()
books_results.append({
"title": title,
"link": link,
"displayed_link": displayed_link,
"snippet": snippet,
"author": author,
"author_link": author_link,
"date_published": date_published,
"preview_link": preview_link,
"more_editions_link": f"https://www.google.com{more_editions_link}" if more_editions_link is not None else None,
"thumbnail": bytes(bytes(book_thumbnail, "ascii").decode("unicode-escape"), "ascii").decode("unicode-escape")
})
| 39.869565 | 135 | 0.657579 |
8a8bbdd35a1d135f6e6a32befca7b762678940d4 | 327 | py | Python | Python/Higher-Or-Lower/hol/__init__.py | AustinTSchaffer/DailyProgrammer | b16d9babb298ac5e879c514f9c4646b99c6860a8 | [
"MIT"
] | 1 | 2020-07-28T17:07:35.000Z | 2020-07-28T17:07:35.000Z | Python/Higher-Or-Lower/hol/__init__.py | AustinTSchaffer/DailyProgrammer | b16d9babb298ac5e879c514f9c4646b99c6860a8 | [
"MIT"
] | 5 | 2021-04-06T18:25:29.000Z | 2021-04-10T15:13:28.000Z | Python/Higher-Or-Lower/hol/__init__.py | AustinTSchaffer/DailyProgrammer | b16d9babb298ac5e879c514f9c4646b99c6860a8 | [
"MIT"
] | null | null | null | r"""
Contains classes and methods that can be used when simulating the game
Higher-or-Lower and performing statistical analysis on different games.
"""
from hol import (
cards,
constants,
)
from hol._hol import (
generate_all_games,
should_pick_higher,
is_a_winning_game,
generate_win_statistics,
)
| 17.210526 | 71 | 0.737003 |
8a8bd51e1880ca1483e91fca0ab41237e4c4f869 | 4,896 | py | Python | Lib/hTools2/dialogs/glyphs/slide.py | gferreira/hTools2 | a75a671b81a0f4ce5c82b2ad3e2f971ca3e3d98c | [
"BSD-3-Clause"
] | 11 | 2015-01-06T15:43:56.000Z | 2019-07-27T00:35:20.000Z | Lib/hTools2/dialogs/glyphs/slide.py | gferreira/hTools2 | a75a671b81a0f4ce5c82b2ad3e2f971ca3e3d98c | [
"BSD-3-Clause"
] | 2 | 2017-05-17T10:11:46.000Z | 2018-11-21T21:43:43.000Z | Lib/hTools2/dialogs/glyphs/slide.py | gferreira/hTools2 | a75a671b81a0f4ce5c82b2ad3e2f971ca3e3d98c | [
"BSD-3-Clause"
] | 4 | 2015-01-10T13:58:50.000Z | 2019-12-18T15:40:14.000Z | # [h] slide selected glyphs
from mojo.roboFont import CurrentFont, CurrentGlyph, version
from vanilla import *
from hTools2 import hDialog
from hTools2.modules.fontutils import get_full_name, get_glyphs
from hTools2.modules.messages import no_font_open, no_glyph_selected
| 31.184713 | 89 | 0.55576 |
8a8c544c5af946feba8528e8627d4c6fff3edf22 | 3,495 | py | Python | werobot/utils.py | lilac/WeRobot | 29fd70631b736a0c339f16f7729ea89f986c8bae | [
"MIT"
] | 2 | 2018-06-03T16:32:07.000Z | 2018-06-03T16:32:10.000Z | werobot/utils.py | Milleree/WeRoBot | f9777f792d55ae70e7262f13e6e3f3667a167036 | [
"MIT"
] | 9 | 2020-06-05T19:51:33.000Z | 2022-03-11T23:40:25.000Z | werobot/utils.py | Milleree/WeRoBot | f9777f792d55ae70e7262f13e6e3f3667a167036 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import io
import json
import os
import random
import re
import string
import time
from functools import wraps
from hashlib import sha1
import six
try:
from secrets import choice
except ImportError:
from random import choice
string_types = (six.string_types, six.text_type, six.binary_type)
re_type = type(re.compile("regex_test"))
def byte2int(s, index=0):
"""Get the ASCII int value of a character in a string.
:param s: a string
:param index: the position of desired character
:return: ASCII int value
"""
if six.PY2:
return ord(s[index])
return s[index]
def pay_sign_dict(
appid,
pay_sign_key,
add_noncestr=True,
add_timestamp=True,
add_appid=True,
**kwargs
):
"""
"""
assert pay_sign_key, "PAY SIGN KEY IS EMPTY"
if add_appid:
kwargs.update({'appid': appid})
if add_noncestr:
kwargs.update({'noncestr': generate_token()})
if add_timestamp:
kwargs.update({'timestamp': int(time.time())})
params = kwargs.items()
_params = [
(k.lower(), v) for k, v in kwargs.items() if k.lower() != "appid"
]
_params += [('appid', appid), ('appkey', pay_sign_key)]
_params.sort()
sign = '&'.join(["%s=%s" % (str(p[0]), str(p[1]))
for p in _params]).encode("utf-8")
sign = sha1(sign).hexdigest()
sign_type = 'SHA1'
return dict(params), sign, sign_type
| 22.403846 | 73 | 0.645207 |
8a8c957af09c1662e1613d8819301ef9871bcd5c | 5,914 | py | Python | tensorflow/python/ops/standard_ops.py | ashutom/tensorflow-upstream | c16069c19de9e286dd664abb78d0ea421e9f32d4 | [
"Apache-2.0"
] | 8 | 2021-08-03T03:57:10.000Z | 2021-12-13T01:19:02.000Z | tensorflow/python/ops/standard_ops.py | CaptainGizzy21/tensorflow | 3457a2b122e50b4d44ceaaed5a663d635e5c22df | [
"Apache-2.0"
] | 17 | 2021-08-12T19:38:42.000Z | 2022-01-27T14:39:35.000Z | tensorflow/python/ops/standard_ops.py | CaptainGizzy21/tensorflow | 3457a2b122e50b4d44ceaaed5a663d635e5c22df | [
"Apache-2.0"
] | 4 | 2022-01-13T11:23:44.000Z | 2022-03-02T11:11:42.000Z | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=unused-import
"""Import names of Tensor Flow standard Ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import platform as _platform
import sys as _sys
from tensorflow.python import autograph
from tensorflow.python.training.experimental import loss_scaling_gradient_tape
# pylint: disable=g-bad-import-order
# Imports the following modules so that @RegisterGradient get executed.
from tensorflow.python.ops import array_grad
from tensorflow.python.ops import cudnn_rnn_grad
from tensorflow.python.ops import data_flow_grad
from tensorflow.python.ops import manip_grad
from tensorflow.python.ops import math_grad
from tensorflow.python.ops import random_grad
from tensorflow.python.ops import rnn_grad
from tensorflow.python.ops import sparse_grad
from tensorflow.python.ops import state_grad
from tensorflow.python.ops import tensor_array_grad
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.array_ops import * # pylint: disable=redefined-builtin
from tensorflow.python.ops.check_ops import *
from tensorflow.python.ops.clip_ops import *
from tensorflow.python.ops.special_math_ops import *
# TODO(vrv): Switch to import * once we're okay with exposing the module.
from tensorflow.python.ops.confusion_matrix import confusion_matrix
from tensorflow.python.ops.control_flow_ops import Assert
from tensorflow.python.ops.control_flow_ops import case
from tensorflow.python.ops.control_flow_ops import cond
from tensorflow.python.ops.control_flow_ops import group
from tensorflow.python.ops.control_flow_ops import no_op
from tensorflow.python.ops.control_flow_ops import tuple # pylint: disable=redefined-builtin
# pylint: enable=redefined-builtin
from tensorflow.python.eager import wrap_function
from tensorflow.python.ops.control_flow_ops import while_loop
from tensorflow.python.ops.batch_ops import *
from tensorflow.python.ops.critical_section_ops import *
from tensorflow.python.ops.data_flow_ops import *
from tensorflow.python.ops.functional_ops import *
from tensorflow.python.ops.gradients import *
from tensorflow.python.ops.histogram_ops import *
from tensorflow.python.ops.init_ops import *
from tensorflow.python.ops.io_ops import *
from tensorflow.python.ops.linalg_ops import *
from tensorflow.python.ops.logging_ops import Print
from tensorflow.python.ops.logging_ops import get_summary_op
from tensorflow.python.ops.logging_ops import timestamp
from tensorflow.python.ops.lookup_ops import initialize_all_tables
from tensorflow.python.ops.lookup_ops import tables_initializer
from tensorflow.python.ops.manip_ops import *
from tensorflow.python.ops.math_ops import * # pylint: disable=redefined-builtin
from tensorflow.python.ops.numerics import *
from tensorflow.python.ops.parsing_ops import *
from tensorflow.python.ops.partitioned_variables import *
from tensorflow.python.ops.proto_ops import *
from tensorflow.python.ops.ragged import ragged_dispatch as _ragged_dispatch
from tensorflow.python.ops.ragged import ragged_operators as _ragged_operators
from tensorflow.python.ops.random_ops import *
from tensorflow.python.ops.script_ops import py_func
from tensorflow.python.ops.session_ops import *
from tensorflow.python.ops.sort_ops import *
from tensorflow.python.ops.sparse_ops import *
from tensorflow.python.ops.state_ops import assign
from tensorflow.python.ops.state_ops import assign_add
from tensorflow.python.ops.state_ops import assign_sub
from tensorflow.python.ops.state_ops import count_up_to
from tensorflow.python.ops.state_ops import scatter_add
from tensorflow.python.ops.state_ops import scatter_div
from tensorflow.python.ops.state_ops import scatter_mul
from tensorflow.python.ops.state_ops import scatter_sub
from tensorflow.python.ops.state_ops import scatter_min
from tensorflow.python.ops.state_ops import scatter_max
from tensorflow.python.ops.state_ops import scatter_update
from tensorflow.python.ops.state_ops import scatter_nd_add
from tensorflow.python.ops.state_ops import scatter_nd_sub
# TODO(simister): Re-enable once binary size increase due to scatter_nd
# ops is under control.
# from tensorflow.python.ops.state_ops import scatter_nd_mul
# from tensorflow.python.ops.state_ops import scatter_nd_div
from tensorflow.python.ops.state_ops import scatter_nd_update
from tensorflow.python.ops.stateless_random_ops import *
from tensorflow.python.ops.string_ops import *
from tensorflow.python.ops.template import *
from tensorflow.python.ops.tensor_array_ops import *
from tensorflow.python.ops.variable_scope import * # pylint: disable=redefined-builtin
from tensorflow.python.ops.variables import *
from tensorflow.python.ops.parallel_for.control_flow_ops import vectorized_map
# pylint: disable=g-import-not-at-top
if _platform.system() == "Windows":
from tensorflow.python.compiler.tensorrt import trt_convert_windows as trt
else:
from tensorflow.python.compiler.tensorrt import trt_convert as trt
# pylint: enable=g-import-not-at-top
# pylint: enable=wildcard-import
# pylint: enable=g-bad-import-order
# These modules were imported to set up RaggedTensor operators and dispatchers:
del _ragged_dispatch, _ragged_operators
| 46.936508 | 93 | 0.825668 |
8a8ce25aff69e17f6f7281d206c301403a98d23f | 3,208 | py | Python | src/tango_scaling_test/TestDeviceServer/__main__.py | rtobar/sdp-prototype | 9f1527b884bf80daa509a7fe3722160c77260f4f | [
"BSD-3-Clause"
] | null | null | null | src/tango_scaling_test/TestDeviceServer/__main__.py | rtobar/sdp-prototype | 9f1527b884bf80daa509a7fe3722160c77260f4f | [
"BSD-3-Clause"
] | null | null | null | src/tango_scaling_test/TestDeviceServer/__main__.py | rtobar/sdp-prototype | 9f1527b884bf80daa509a7fe3722160c77260f4f | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Test Tango device server for use with scaling tests."""
import sys
import time
import argparse
import tango
from tango.server import run
from TestDevice import TestDevice
def init_callback():
"""Report server start up times.
This callback is executed post server initialisation.
"""
# pylint: disable=global-statement
global START_TIME
db = tango.Database()
elapsed = time.time() - START_TIME
list_devices()
exported_devices = list(db.get_device_exported('test/*'))
num_devices = len(exported_devices)
file = open('results.txt', 'a')
file.write(',{},{}\n'.format(elapsed, elapsed / num_devices))
print('>> Time taken to start devices: {:.4f} s ({:.4f} s/dev)'
.format(elapsed, elapsed / num_devices))
def delete_server():
"""Delete the TestDeviceServer from the tango db."""
db = tango.Database()
db.set_timeout_millis(50000)
server = 'TestDeviceServer/1'
server_list = list(db.get_server_list(server))
if server in server_list:
start_time = time.time()
db.delete_server('TestDeviceServer/1')
print('- Delete server: {:.4f} s'.format(time.time() - start_time))
def register(num_devices):
"""Register devices in the tango db."""
db = tango.Database()
device_info = tango.DbDevInfo()
device_info.server = 'TestDeviceServer/1'
# pylint: disable=protected-access
device_info._class = 'TestDevice'
start_time = time.time()
for device_id in range(num_devices):
device_info.name = 'test/test_device/{:05d}'.format(device_id)
db.add_device(device_info)
elapsed = time.time() - start_time
file = open('results.txt', 'a')
file.write('{},{},{}'.format(num_devices, elapsed, elapsed/num_devices))
print('- Register devices: {:.4f} s ({:.4f} s/device)'
.format(elapsed, elapsed / num_devices))
def list_devices():
"""List tango devices associated with the TestDeviceServer."""
db = tango.Database()
server_instance = 'TestDeviceServer/1'
device_class = 'TestDevice'
devices = list(db.get_device_name(server_instance, device_class))
print('- No. registered devices: {}'.format(len(devices)))
exported_devices = list(db.get_device_exported('test/*'))
print('- No. running devices: {}'.format(len(exported_devices)))
def main(args=None, **kwargs):
"""Run (start) the device server."""
run([TestDevice], verbose=True, msg_stream=sys.stdout,
post_init_callback=init_callback, raises=False,
args=args, **kwargs)
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(description='Device registration time.')
PARSER.add_argument('num_devices', metavar='N', type=int,
default=1, nargs='?',
help='Number of devices to start.')
ARGS = PARSER.parse_args()
delete_server()
time.sleep(0.5)
list_devices()
print('* Registering {} devices'.format(ARGS.num_devices))
register(ARGS.num_devices)
list_devices()
print('* Starting server ...')
sys.argv = ['TestDeviceServer', '1', '-v4']
START_TIME = time.time()
main()
| 29.981308 | 77 | 0.65586 |
8a8d44634b296be16e3e3fe11b62e194bcce203d | 14,955 | py | Python | test/test_pipeline.py | ParikhKadam/haystack | 8a57f6b16af0bdd41dc02bf1200e0adbdf1da39b | [
"Apache-2.0"
] | 1 | 2021-08-04T09:06:43.000Z | 2021-08-04T09:06:43.000Z | test/test_pipeline.py | jacksbox/haystack | 65f1da00cc4b6757752dafb8bf756531fad46dd0 | [
"Apache-2.0"
] | null | null | null | test/test_pipeline.py | jacksbox/haystack | 65f1da00cc4b6757752dafb8bf756531fad46dd0 | [
"Apache-2.0"
] | null | null | null | from pathlib import Path
import pytest
from haystack.document_store.elasticsearch import ElasticsearchDocumentStore
from haystack.pipeline import TranslationWrapperPipeline, JoinDocuments, ExtractiveQAPipeline, Pipeline, FAQPipeline, \
DocumentSearchPipeline, RootNode
from haystack.retriever.dense import DensePassageRetriever
from haystack.retriever.sparse import ElasticsearchRetriever
def test_parallel_paths_in_pipeline_graph():
pipeline = Pipeline()
pipeline.add_node(name="A", component=A(), inputs=["Query"])
pipeline.add_node(name="B", component=B(), inputs=["A"])
pipeline.add_node(name="C", component=C(), inputs=["B"])
pipeline.add_node(name="E", component=E(), inputs=["C"])
pipeline.add_node(name="D", component=D(), inputs=["B"])
pipeline.add_node(name="F", component=JoinNode(), inputs=["D", "E"])
output = pipeline.run(query="test")
assert output["output"] == "ABDABCE"
pipeline = Pipeline()
pipeline.add_node(name="A", component=A(), inputs=["Query"])
pipeline.add_node(name="B", component=B(), inputs=["A"])
pipeline.add_node(name="C", component=C(), inputs=["B"])
pipeline.add_node(name="D", component=D(), inputs=["B"])
pipeline.add_node(name="E", component=JoinNode(), inputs=["C", "D"])
output = pipeline.run(query="test")
assert output["output"] == "ABCABD"
def test_parallel_paths_in_pipeline_graph_with_branching():
pipeline = Pipeline()
pipeline.add_node(name="A", component=AWithOutput1(), inputs=["Query"])
pipeline.add_node(name="B", component=B(), inputs=["A.output_1"])
pipeline.add_node(name="C", component=C(), inputs=["A.output_2"])
pipeline.add_node(name="D", component=E(), inputs=["B"])
pipeline.add_node(name="E", component=D(), inputs=["B"])
pipeline.add_node(name="F", component=JoinNode(), inputs=["D", "E", "C"])
output = pipeline.run(query="test")
assert output["output"] == "ABEABD"
pipeline = Pipeline()
pipeline.add_node(name="A", component=AWithOutput2(), inputs=["Query"])
pipeline.add_node(name="B", component=B(), inputs=["A.output_1"])
pipeline.add_node(name="C", component=C(), inputs=["A.output_2"])
pipeline.add_node(name="D", component=E(), inputs=["B"])
pipeline.add_node(name="E", component=D(), inputs=["B"])
pipeline.add_node(name="F", component=JoinNode(), inputs=["D", "E", "C"])
output = pipeline.run(query="test")
assert output["output"] == "AC"
pipeline = Pipeline()
pipeline.add_node(name="A", component=AWithOutputAll(), inputs=["Query"])
pipeline.add_node(name="B", component=B(), inputs=["A.output_1"])
pipeline.add_node(name="C", component=C(), inputs=["A.output_2"])
pipeline.add_node(name="D", component=E(), inputs=["B"])
pipeline.add_node(name="E", component=D(), inputs=["B"])
pipeline.add_node(name="F", component=JoinNode(), inputs=["D", "E", "C"])
output = pipeline.run(query="test")
assert output["output"] == "ACABEABD"
| 42.126761 | 119 | 0.664527 |
8a8db025d17d202dce4f03767b8394c4ff63db8d | 14,254 | py | Python | src/telr/TELR_assembly.py | dominik-handler/TELR | 3e34e54fc959c13fa45dc911facf0d5179fbb34b | [
"BSD-2-Clause"
] | 22 | 2020-09-22T21:21:17.000Z | 2022-01-21T17:52:12.000Z | src/telr/TELR_assembly.py | dominik-handler/TELR | 3e34e54fc959c13fa45dc911facf0d5179fbb34b | [
"BSD-2-Clause"
] | 6 | 2021-05-07T13:52:30.000Z | 2022-03-27T18:21:10.000Z | src/telr/TELR_assembly.py | dominik-handler/TELR | 3e34e54fc959c13fa45dc911facf0d5179fbb34b | [
"BSD-2-Clause"
] | 6 | 2020-10-01T12:47:19.000Z | 2021-08-13T14:38:11.000Z | import sys
import os
import subprocess
import shutil
import time
import logging
from Bio import SeqIO
from multiprocessing import Pool
import pysam
from telr.TELR_utility import mkdir, check_exist, format_time
def get_local_contigs(
assembler,
polisher,
contig_dir,
vcf_parsed,
out,
sample_name,
bam,
raw_reads,
thread,
presets,
polish_iterations,
):
"""Perform local assembly using reads from parsed VCF file in parallel"""
# Prepare reads used for local assembly and polishing
sv_reads_dir = os.path.join(out, "sv_reads")
try:
prep_assembly_inputs(
vcf_parsed, out, sample_name, bam, raw_reads, sv_reads_dir, read_type="sv"
)
except Exception as e:
print(e)
print("Prepare local assembly input data failed, exiting...")
sys.exit(1)
mkdir(contig_dir)
k = 0
asm_pa_list = []
with open(vcf_parsed, "r") as input:
for line in input:
entry = line.replace("\n", "").split("\t")
contig_name = "_".join([entry[0], entry[1], entry[2]])
# rename variant reads
sv_reads = sv_reads_dir + "/contig" + str(k)
sv_reads_rename = sv_reads_dir + "/" + contig_name + ".reads.fa"
os.rename(sv_reads, sv_reads_rename)
thread_asm = 1
asm_pa = [
sv_reads_rename,
contig_dir,
contig_name,
thread_asm,
presets,
assembler,
polisher,
polish_iterations,
]
asm_pa_list.append(asm_pa)
k = k + 1
# run assembly in parallel
logging.info("Perform local assembly of non-reference TE loci...")
start_time = time.time()
try:
pool = Pool(processes=thread)
contig_list = pool.map(run_assembly_polishing, asm_pa_list)
pool.close()
pool.join()
except Exception as e:
print(e)
print("Local assembly failed, exiting...")
sys.exit(1)
proc_time = time.time() - start_time
# merge all contigs
assembly_passed_loci = set()
merged_contigs = os.path.join(out, sample_name + ".contigs.fa")
with open(merged_contigs, "w") as merged_output_handle:
for contig in contig_list:
if check_exist(contig):
contig_name = os.path.basename(contig).replace(".cns.fa", "")
assembly_passed_loci.add(contig_name)
parsed_contig = os.path.join(contig_dir, contig_name + ".cns.ctg1.fa")
with open(contig, "r") as input:
records = SeqIO.parse(input, "fasta")
for record in records:
if record.id == "ctg1" or record.id == "contig_1":
record.id = contig_name
record.description = "len=" + str(len(record.seq))
SeqIO.write(record, merged_output_handle, "fasta")
with open(parsed_contig, "w") as parsed_output_handle:
SeqIO.write(record, parsed_output_handle, "fasta")
logging.info("Local assembly finished in " + format_time(proc_time))
return merged_contigs, assembly_passed_loci
def run_flye_polishing(
asm_cns, reads, asm_dir, contig_name, thread, polish_iterations, presets
):
"""Run Flye polishing"""
if presets == "pacbio":
presets_flye = "--pacbio-raw"
else:
presets_flye = "--nano-raw"
tmp_out_dir = os.path.join(asm_dir, contig_name)
mkdir(tmp_out_dir)
try:
subprocess.call(
[
"flye",
"--polish-target",
asm_cns,
presets_flye,
reads,
"--out-dir",
tmp_out_dir,
"--thread",
str(thread),
"--iterations",
str(polish_iterations),
]
)
except Exception as e:
print(e)
print("Polishing failed, exiting...")
return None
# rename contig file
polished_contig = os.path.join(
tmp_out_dir, "polished_" + str(polish_iterations) + ".fasta"
)
if check_exist(polished_contig):
os.rename(polished_contig, asm_cns)
shutil.rmtree(tmp_out_dir)
return asm_cns
else:
return None
def run_wtdbg2_polishing(asm_cns, reads, threads, polish_iterations, presets):
"""Run wtdbg2 polishing"""
if presets == "pacbio":
presets_minimap2 = "map-pb"
else:
presets_minimap2 = "map-ont"
# polish consensus
threads = str(min(threads, 4))
bam = asm_cns + ".bam"
k = 0
while True:
# align reads to contigs
command = (
"minimap2 -t "
+ threads
+ " -ax "
+ presets_minimap2
+ " -r2k "
+ asm_cns
+ " "
+ reads
+ " | samtools sort -@"
+ threads
+ " > "
+ bam
)
try:
subprocess.run(
command,
shell=True,
timeout=300,
stdout=subprocess.DEVNULL,
stderr=subprocess.STDOUT,
)
except subprocess.TimeoutExpired:
print("fail to map reads to contig: " + asm_cns)
return
# run wtpoa-cns to get polished contig
cns_tmp = asm_cns + ".tmp"
command = (
"samtools view -F0x900 "
+ bam
+ " | wtpoa-cns -t "
+ threads
+ " -d "
+ asm_cns
+ " -i - -fo "
+ cns_tmp
)
try:
subprocess.run(
command,
shell=True,
timeout=300,
stdout=subprocess.DEVNULL,
stderr=subprocess.STDOUT,
)
except subprocess.TimeoutExpired:
print("fail to polish contig: " + asm_cns)
return
if check_exist(cns_tmp):
os.rename(cns_tmp, asm_cns)
os.remove(bam)
else:
break
k = k + 1
if k >= polish_iterations:
break
if check_exist(asm_cns):
return asm_cns
else:
print("polishing failed for " + asm_cns + "\n")
return None
def run_flye_assembly(sv_reads, asm_dir, contig_name, thread, presets):
"""Run Flye assembly"""
if presets == "pacbio":
presets_flye = "--pacbio-raw"
else:
presets_flye = "--nano-raw"
tmp_out_dir = os.path.join(asm_dir, contig_name)
mkdir(tmp_out_dir)
try:
subprocess.call(
[
"flye",
presets_flye,
sv_reads,
"--out-dir",
tmp_out_dir,
"--thread",
str(thread),
"--iterations",
"0",
]
)
except Exception as e:
print(e)
print("Assembly failed, exiting...")
return
# rename contigs
contig_path = os.path.join(tmp_out_dir, "assembly.fasta")
contig_path_new = os.path.join(asm_dir, contig_name + ".cns.fa")
if check_exist(contig_path):
os.rename(contig_path, contig_path_new)
# remove tmp files
shutil.rmtree(tmp_out_dir)
return contig_path_new
else:
print("assembly failed")
return None
def run_wtdbg2_assembly(sv_reads, asm_dir, contig_name, thread, presets):
"""Run wtdbg2 assembly"""
if presets == "pacbio":
presets_wtdbg2 = "rs"
else:
presets_wtdbg2 = "ont"
prefix = sv_reads.replace(".reads.fa", "")
try:
subprocess.run(
[
"wtdbg2",
"-x",
presets_wtdbg2,
"-q",
"-AS",
"1",
"-g",
"30k",
"-t",
str(thread),
"-i",
sv_reads,
"-fo",
prefix,
],
timeout=300,
)
except subprocess.TimeoutExpired:
print("fail to build contig layout for contig: " + contig_name)
return
except Exception as e:
print(e)
print("wtdbg2 failed, exiting...")
return None
# derive consensus
contig_layout = prefix + ".ctg.lay.gz"
if check_exist(contig_layout):
cns_thread = str(min(thread, 4))
consensus = prefix + ".cns.fa"
try:
subprocess.run(
[
"wtpoa-cns",
"-q",
"-t",
cns_thread,
"-i",
contig_layout,
"-fo",
consensus,
],
timeout=300,
)
except subprocess.TimeoutExpired:
print("fail to assemble contig: " + contig_name)
return None
if check_exist(consensus):
consensus_rename = os.path.join(asm_dir, contig_name + ".cns.fa")
os.rename(consensus, consensus_rename)
return consensus_rename
else:
return None
def prep_assembly_inputs(
vcf_parsed, out, sample_name, bam, raw_reads, reads_dir, read_type="sv"
):
"""Prepare reads for local assembly"""
# logging.info("Prepare reads for local assembly")
if read_type == "sv": # TODO: figure out what this does
# extract read IDs
read_ids = os.path.join(out, sample_name + ".id")
with open(vcf_parsed, "r") as input, open(read_ids, "w") as output:
for line in input:
entry = line.replace("\n", "").split("\t")
read_list = entry[8].split(",")
for read in read_list:
output.write(read + "\n")
else: # TODO: think about using this for assembly, filter for cigar reads
window = 1000
samfile = pysam.AlignmentFile(bam, "rb")
read_ids = os.path.join(out, sample_name + ".id")
vcf_parsed_new = vcf_parsed + ".new"
with open(vcf_parsed, "r") as input, open(read_ids, "w") as output, open(
vcf_parsed_new, "w"
) as VCF:
for line in input:
entry = line.replace("\n", "").split("\t")
# get sniffles read list
read_list = entry[8].split(",")
reads_sniffles = set(read_list)
ins_chr = entry[0]
ins_breakpoint = round((int(entry[1]) + int(entry[2])) / 2)
start = ins_breakpoint - window
end = ins_breakpoint + window
reads = set()
# coverage = 0
for read in samfile.fetch(ins_chr, start, end):
reads.add(read.query_name)
for read in reads:
output.write(read + "\n")
# write
out_line = line.replace("\n", "") + "\t" + str(len(reads))
VCF.write(out_line + "\n")
vcf_parsed = vcf_parsed_new
# generate unique ID list
read_ids_unique = read_ids + ".unique"
command = "cat " + read_ids + " | sort | uniq"
with open(read_ids_unique, "w") as output:
subprocess.call(command, stdout=output, shell=True)
# filter raw reads using read list
subset_fa = os.path.join(out, sample_name + ".subset.fa")
command = "seqtk subseq " + raw_reads + " " + read_ids_unique + " | seqtk seq -a"
with open(subset_fa, "w") as output:
subprocess.call(command, stdout=output, shell=True)
# reorder reads
subset_fa_reorder = out + "/" + sample_name + ".subset.reorder.fa"
extract_reads(subset_fa, read_ids, subset_fa_reorder)
# separate reads into multiple files, using csplit
mkdir(reads_dir)
csplit_prefix = reads_dir + "/contig"
m = []
k = 1
with open(vcf_parsed, "r") as input:
for line in input:
entry = line.replace("\n", "").split("\t")
if read_type == "sv":
k = k + 2 * (len(entry[8].split(",")))
else:
k = k + 2 * int(entry[14])
m.append(k)
if len(m) == 1:
subprocess.call(["cp", subset_fa_reorder, reads_dir + "/contig0"])
elif len(m) == 0:
print("No insertion detected, exiting...")
else:
m = m[:-1]
index = " ".join(str(i) for i in m)
command = (
"csplit -s -f " + csplit_prefix + " -n 1 " + subset_fa_reorder + " " + index
)
subprocess.call(command, shell=True)
# remove tmp files
os.remove(read_ids)
os.remove(read_ids_unique)
os.remove(subset_fa)
os.remove(subset_fa_reorder)
def extract_reads(reads, list, out):
"""Extract reads from fasta using read ID list"""
record_dict = SeqIO.index(reads, "fasta")
with open(out, "wb") as output_handle, open(list, "r") as ID:
for entry in ID:
entry = entry.replace("\n", "")
output_handle.write(record_dict.get_raw(entry))
| 30.32766 | 88 | 0.525116 |
8a900957322aa8d59dab3c2935590611098dad34 | 28,015 | py | Python | pygmt/tests/test_clib.py | aliciaha1997/pygmt | a10af5d8deb3bf3090eab4b6492bcf8cf722cb71 | [
"BSD-3-Clause"
] | null | null | null | pygmt/tests/test_clib.py | aliciaha1997/pygmt | a10af5d8deb3bf3090eab4b6492bcf8cf722cb71 | [
"BSD-3-Clause"
] | null | null | null | pygmt/tests/test_clib.py | aliciaha1997/pygmt | a10af5d8deb3bf3090eab4b6492bcf8cf722cb71 | [
"BSD-3-Clause"
] | 1 | 2021-11-03T07:47:18.000Z | 2021-11-03T07:47:18.000Z | # pylint: disable=protected-access
"""
Test the wrappers for the C API.
"""
import os
from contextlib import contextmanager
import numpy as np
import numpy.testing as npt
import pandas as pd
import pytest
import xarray as xr
from packaging.version import Version
from pygmt import Figure, clib
from pygmt.clib.conversion import dataarray_to_matrix
from pygmt.clib.session import FAMILIES, VIAS
from pygmt.exceptions import (
GMTCLibError,
GMTCLibNoSessionError,
GMTInvalidInput,
GMTVersionError,
)
from pygmt.helpers import GMTTempFile
TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
with clib.Session() as _lib:
gmt_version = Version(_lib.info["version"])
def test_getitem():
"""
Test that I can get correct constants from the C lib.
"""
ses = clib.Session()
assert ses["GMT_SESSION_EXTERNAL"] != -99999
assert ses["GMT_MODULE_CMD"] != -99999
assert ses["GMT_PAD_DEFAULT"] != -99999
assert ses["GMT_DOUBLE"] != -99999
with pytest.raises(GMTCLibError):
ses["A_WHOLE_LOT_OF_JUNK"] # pylint: disable=pointless-statement
def test_create_destroy_session():
"""
Test that create and destroy session are called without errors.
"""
# Create two session and make sure they are not pointing to the same memory
session1 = clib.Session()
session1.create(name="test_session1")
assert session1.session_pointer is not None
session2 = clib.Session()
session2.create(name="test_session2")
assert session2.session_pointer is not None
assert session2.session_pointer != session1.session_pointer
session1.destroy()
session2.destroy()
# Create and destroy a session twice
ses = clib.Session()
for __ in range(2):
with pytest.raises(GMTCLibNoSessionError):
ses.session_pointer # pylint: disable=pointless-statement
ses.create("session1")
assert ses.session_pointer is not None
ses.destroy()
with pytest.raises(GMTCLibNoSessionError):
ses.session_pointer # pylint: disable=pointless-statement
def test_create_session_fails():
"""
Check that an exception is raised when failing to create a session.
"""
ses = clib.Session()
with mock(ses, "GMT_Create_Session", returns=None):
with pytest.raises(GMTCLibError):
ses.create("test-session-name")
# Should fail if trying to create a session before destroying the old one.
ses.create("test1")
with pytest.raises(GMTCLibError):
ses.create("test2")
def test_destroy_session_fails():
"""
Fail to destroy session when given bad input.
"""
ses = clib.Session()
with pytest.raises(GMTCLibNoSessionError):
ses.destroy()
ses.create("test-session")
with mock(ses, "GMT_Destroy_Session", returns=1):
with pytest.raises(GMTCLibError):
ses.destroy()
ses.destroy()
def test_call_module():
"""
Run a command to see if call_module works.
"""
data_fname = os.path.join(TEST_DATA_DIR, "points.txt")
out_fname = "test_call_module.txt"
with clib.Session() as lib:
with GMTTempFile() as out_fname:
lib.call_module("info", "{} -C ->{}".format(data_fname, out_fname.name))
assert os.path.exists(out_fname.name)
output = out_fname.read().strip()
assert output == "11.5309 61.7074 -2.9289 7.8648 0.1412 0.9338"
def test_call_module_invalid_arguments():
"""
Fails for invalid module arguments.
"""
with clib.Session() as lib:
with pytest.raises(GMTCLibError):
lib.call_module("info", "bogus-data.bla")
def test_call_module_invalid_name():
"""
Fails when given bad input.
"""
with clib.Session() as lib:
with pytest.raises(GMTCLibError):
lib.call_module("meh", "")
def test_call_module_error_message():
"""
Check is the GMT error message was captured.
"""
with clib.Session() as lib:
try:
lib.call_module("info", "bogus-data.bla")
except GMTCLibError as error:
assert "Module 'info' failed with status code" in str(error)
assert "gmtinfo [ERROR]: Cannot find file bogus-data.bla" in str(error)
def test_method_no_session():
"""
Fails when not in a session.
"""
# Create an instance of Session without "with" so no session is created.
lib = clib.Session()
with pytest.raises(GMTCLibNoSessionError):
lib.call_module("gmtdefaults", "")
with pytest.raises(GMTCLibNoSessionError):
lib.session_pointer # pylint: disable=pointless-statement
def test_parse_constant_single():
"""
Parsing a single family argument correctly.
"""
lib = clib.Session()
for family in FAMILIES:
parsed = lib._parse_constant(family, valid=FAMILIES)
assert parsed == lib[family]
def test_parse_constant_composite():
"""
Parsing a composite constant argument (separated by |) correctly.
"""
lib = clib.Session()
test_cases = ((family, via) for family in FAMILIES for via in VIAS)
for family, via in test_cases:
composite = "|".join([family, via])
expected = lib[family] + lib[via]
parsed = lib._parse_constant(composite, valid=FAMILIES, valid_modifiers=VIAS)
assert parsed == expected
def test_parse_constant_fails():
"""
Check if the function fails when given bad input.
"""
lib = clib.Session()
test_cases = [
"SOME_random_STRING",
"GMT_IS_DATASET|GMT_VIA_MATRIX|GMT_VIA_VECTOR",
"GMT_IS_DATASET|NOT_A_PROPER_VIA",
"NOT_A_PROPER_FAMILY|GMT_VIA_MATRIX",
"NOT_A_PROPER_FAMILY|ALSO_INVALID",
]
for test_case in test_cases:
with pytest.raises(GMTInvalidInput):
lib._parse_constant(test_case, valid=FAMILIES, valid_modifiers=VIAS)
# Should also fail if not given valid modifiers but is using them anyway.
# This should work...
lib._parse_constant(
"GMT_IS_DATASET|GMT_VIA_MATRIX", valid=FAMILIES, valid_modifiers=VIAS
)
# But this shouldn't.
with pytest.raises(GMTInvalidInput):
lib._parse_constant(
"GMT_IS_DATASET|GMT_VIA_MATRIX", valid=FAMILIES, valid_modifiers=None
)
def test_create_data_dataset():
"""
Run the function to make sure it doesn't fail badly.
"""
with clib.Session() as lib:
# Dataset from vectors
data_vector = lib.create_data(
family="GMT_IS_DATASET|GMT_VIA_VECTOR",
geometry="GMT_IS_POINT",
mode="GMT_CONTAINER_ONLY",
dim=[10, 20, 1, 0], # columns, rows, layers, dtype
)
# Dataset from matrices
data_matrix = lib.create_data(
family="GMT_IS_DATASET|GMT_VIA_MATRIX",
geometry="GMT_IS_POINT",
mode="GMT_CONTAINER_ONLY",
dim=[10, 20, 1, 0],
)
assert data_vector != data_matrix
def test_create_data_grid_dim():
"""
Create a grid ignoring range and inc.
"""
with clib.Session() as lib:
# Grids from matrices using dim
lib.create_data(
family="GMT_IS_GRID|GMT_VIA_MATRIX",
geometry="GMT_IS_SURFACE",
mode="GMT_CONTAINER_ONLY",
dim=[10, 20, 1, 0],
)
def test_create_data_grid_range():
"""
Create a grid specifying range and inc instead of dim.
"""
with clib.Session() as lib:
# Grids from matrices using range and int
lib.create_data(
family="GMT_IS_GRID|GMT_VIA_MATRIX",
geometry="GMT_IS_SURFACE",
mode="GMT_CONTAINER_ONLY",
ranges=[150.0, 250.0, -20.0, 20.0],
inc=[0.1, 0.2],
)
def test_create_data_fails():
"""
Check that create_data raises exceptions for invalid input and output.
"""
# Passing in invalid mode
with pytest.raises(GMTInvalidInput):
with clib.Session() as lib:
lib.create_data(
family="GMT_IS_DATASET",
geometry="GMT_IS_SURFACE",
mode="Not_a_valid_mode",
dim=[0, 0, 1, 0],
ranges=[150.0, 250.0, -20.0, 20.0],
inc=[0.1, 0.2],
)
# Passing in invalid geometry
with pytest.raises(GMTInvalidInput):
with clib.Session() as lib:
lib.create_data(
family="GMT_IS_GRID",
geometry="Not_a_valid_geometry",
mode="GMT_CONTAINER_ONLY",
dim=[0, 0, 1, 0],
ranges=[150.0, 250.0, -20.0, 20.0],
inc=[0.1, 0.2],
)
# If the data pointer returned is None (NULL pointer)
with pytest.raises(GMTCLibError):
with clib.Session() as lib:
with mock(lib, "GMT_Create_Data", returns=None):
lib.create_data(
family="GMT_IS_DATASET",
geometry="GMT_IS_SURFACE",
mode="GMT_CONTAINER_ONLY",
dim=[11, 10, 2, 0],
)
def test_virtual_file():
"""
Test passing in data via a virtual file with a Dataset.
"""
dtypes = "float32 float64 int32 int64 uint32 uint64".split()
shape = (5, 3)
for dtype in dtypes:
with clib.Session() as lib:
family = "GMT_IS_DATASET|GMT_VIA_MATRIX"
geometry = "GMT_IS_POINT"
dataset = lib.create_data(
family=family,
geometry=geometry,
mode="GMT_CONTAINER_ONLY",
dim=[shape[1], shape[0], 1, 0], # columns, rows, layers, dtype
)
data = np.arange(shape[0] * shape[1], dtype=dtype).reshape(shape)
lib.put_matrix(dataset, matrix=data)
# Add the dataset to a virtual file and pass it along to gmt info
vfargs = (family, geometry, "GMT_IN|GMT_IS_REFERENCE", dataset)
with lib.open_virtual_file(*vfargs) as vfile:
with GMTTempFile() as outfile:
lib.call_module("info", "{} ->{}".format(vfile, outfile.name))
output = outfile.read(keep_tabs=True)
bounds = "\t".join(
["<{:.0f}/{:.0f}>".format(col.min(), col.max()) for col in data.T]
)
expected = "<matrix memory>: N = {}\t{}\n".format(shape[0], bounds)
assert output == expected
def test_virtual_file_fails():
"""
Check that opening and closing virtual files raises an exception for non-
zero return codes.
"""
vfargs = (
"GMT_IS_DATASET|GMT_VIA_MATRIX",
"GMT_IS_POINT",
"GMT_IN|GMT_IS_REFERENCE",
None,
)
# Mock Open_VirtualFile to test the status check when entering the context.
# If the exception is raised, the code won't get to the closing of the
# virtual file.
with clib.Session() as lib, mock(lib, "GMT_Open_VirtualFile", returns=1):
with pytest.raises(GMTCLibError):
with lib.open_virtual_file(*vfargs):
print("Should not get to this code")
# Test the status check when closing the virtual file
# Mock the opening to return 0 (success) so that we don't open a file that
# we won't close later.
with clib.Session() as lib, mock(lib, "GMT_Open_VirtualFile", returns=0), mock(
lib, "GMT_Close_VirtualFile", returns=1
):
with pytest.raises(GMTCLibError):
with lib.open_virtual_file(*vfargs):
pass
print("Shouldn't get to this code either")
def test_virtual_file_bad_direction():
"""
Test passing an invalid direction argument.
"""
with clib.Session() as lib:
vfargs = (
"GMT_IS_DATASET|GMT_VIA_MATRIX",
"GMT_IS_POINT",
"GMT_IS_GRID", # The invalid direction argument
0,
)
with pytest.raises(GMTInvalidInput):
with lib.open_virtual_file(*vfargs):
print("This should have failed")
def test_virtualfile_from_vectors():
"""
Test the automation for transforming vectors to virtual file dataset.
"""
dtypes = "float32 float64 int32 int64 uint32 uint64".split()
size = 10
for dtype in dtypes:
x = np.arange(size, dtype=dtype)
y = np.arange(size, size * 2, 1, dtype=dtype)
z = np.arange(size * 2, size * 3, 1, dtype=dtype)
with clib.Session() as lib:
with lib.virtualfile_from_vectors(x, y, z) as vfile:
with GMTTempFile() as outfile:
lib.call_module("info", "{} ->{}".format(vfile, outfile.name))
output = outfile.read(keep_tabs=True)
bounds = "\t".join(
["<{:.0f}/{:.0f}>".format(i.min(), i.max()) for i in (x, y, z)]
)
expected = "<vector memory>: N = {}\t{}\n".format(size, bounds)
assert output == expected
def test_virtualfile_from_vectors_transpose():
"""
Test transforming matrix columns to virtual file dataset.
"""
dtypes = "float32 float64 int32 int64 uint32 uint64".split()
shape = (7, 5)
for dtype in dtypes:
data = np.arange(shape[0] * shape[1], dtype=dtype).reshape(shape)
with clib.Session() as lib:
with lib.virtualfile_from_vectors(*data.T) as vfile:
with GMTTempFile() as outfile:
lib.call_module("info", "{} -C ->{}".format(vfile, outfile.name))
output = outfile.read(keep_tabs=True)
bounds = "\t".join(
["{:.0f}\t{:.0f}".format(col.min(), col.max()) for col in data.T]
)
expected = "{}\n".format(bounds)
assert output == expected
def test_virtualfile_from_vectors_diff_size():
"""
Test the function fails for arrays of different sizes.
"""
x = np.arange(5)
y = np.arange(6)
with clib.Session() as lib:
with pytest.raises(GMTInvalidInput):
with lib.virtualfile_from_vectors(x, y):
print("This should have failed")
def test_virtualfile_from_matrix():
"""
Test transforming a matrix to virtual file dataset.
"""
dtypes = "float32 float64 int32 int64 uint32 uint64".split()
shape = (7, 5)
for dtype in dtypes:
data = np.arange(shape[0] * shape[1], dtype=dtype).reshape(shape)
with clib.Session() as lib:
with lib.virtualfile_from_matrix(data) as vfile:
with GMTTempFile() as outfile:
lib.call_module("info", "{} ->{}".format(vfile, outfile.name))
output = outfile.read(keep_tabs=True)
bounds = "\t".join(
["<{:.0f}/{:.0f}>".format(col.min(), col.max()) for col in data.T]
)
expected = "<matrix memory>: N = {}\t{}\n".format(shape[0], bounds)
assert output == expected
def test_virtualfile_from_matrix_slice():
"""
Test transforming a slice of a larger array to virtual file dataset.
"""
dtypes = "float32 float64 int32 int64 uint32 uint64".split()
shape = (10, 6)
for dtype in dtypes:
full_data = np.arange(shape[0] * shape[1], dtype=dtype).reshape(shape)
rows = 5
cols = 3
data = full_data[:rows, :cols]
with clib.Session() as lib:
with lib.virtualfile_from_matrix(data) as vfile:
with GMTTempFile() as outfile:
lib.call_module("info", "{} ->{}".format(vfile, outfile.name))
output = outfile.read(keep_tabs=True)
bounds = "\t".join(
["<{:.0f}/{:.0f}>".format(col.min(), col.max()) for col in data.T]
)
expected = "<matrix memory>: N = {}\t{}\n".format(rows, bounds)
assert output == expected
def test_virtualfile_from_vectors_pandas():
"""
Pass vectors to a dataset using pandas Series.
"""
dtypes = "float32 float64 int32 int64 uint32 uint64".split()
size = 13
for dtype in dtypes:
data = pd.DataFrame(
data=dict(
x=np.arange(size, dtype=dtype),
y=np.arange(size, size * 2, 1, dtype=dtype),
z=np.arange(size * 2, size * 3, 1, dtype=dtype),
)
)
with clib.Session() as lib:
with lib.virtualfile_from_vectors(data.x, data.y, data.z) as vfile:
with GMTTempFile() as outfile:
lib.call_module("info", "{} ->{}".format(vfile, outfile.name))
output = outfile.read(keep_tabs=True)
bounds = "\t".join(
[
"<{:.0f}/{:.0f}>".format(i.min(), i.max())
for i in (data.x, data.y, data.z)
]
)
expected = "<vector memory>: N = {}\t{}\n".format(size, bounds)
assert output == expected
def test_virtualfile_from_vectors_arraylike():
"""
Pass array-like vectors to a dataset.
"""
size = 13
x = list(range(0, size, 1))
y = tuple(range(size, size * 2, 1))
z = range(size * 2, size * 3, 1)
with clib.Session() as lib:
with lib.virtualfile_from_vectors(x, y, z) as vfile:
with GMTTempFile() as outfile:
lib.call_module("info", "{} ->{}".format(vfile, outfile.name))
output = outfile.read(keep_tabs=True)
bounds = "\t".join(
["<{:.0f}/{:.0f}>".format(min(i), max(i)) for i in (x, y, z)]
)
expected = "<vector memory>: N = {}\t{}\n".format(size, bounds)
assert output == expected
def test_extract_region_fails():
"""
Check that extract region fails if nothing has been plotted.
"""
Figure()
with pytest.raises(GMTCLibError):
with clib.Session() as lib:
lib.extract_region()
def test_extract_region_two_figures():
"""
Extract region should handle multiple figures existing at the same time.
"""
# Make two figures before calling extract_region to make sure that it's
# getting from the current figure, not the last figure.
fig1 = Figure()
region1 = np.array([0, 10, -20, -10])
fig1.coast(region=region1, projection="M6i", frame=True, land="black")
fig2 = Figure()
fig2.basemap(region="US.HI+r5", projection="M6i", frame=True)
# Activate the first figure and extract the region from it
# Use in a different session to avoid any memory problems.
with clib.Session() as lib:
lib.call_module("figure", "{} -".format(fig1._name))
with clib.Session() as lib:
wesn1 = lib.extract_region()
npt.assert_allclose(wesn1, region1)
# Now try it with the second one
with clib.Session() as lib:
lib.call_module("figure", "{} -".format(fig2._name))
with clib.Session() as lib:
wesn2 = lib.extract_region()
npt.assert_allclose(wesn2, np.array([-165.0, -150.0, 15.0, 25.0]))
def test_write_data_fails():
"""
Check that write data raises an exception for non-zero return codes.
"""
# It's hard to make the C API function fail without causing a Segmentation
# Fault. Can't test this if by giving a bad file name because if
# output=='', GMT will just write to stdout and spaces are valid file
# names. Use a mock instead just to exercise this part of the code.
with clib.Session() as lib:
with mock(lib, "GMT_Write_Data", returns=1):
with pytest.raises(GMTCLibError):
lib.write_data(
"GMT_IS_VECTOR",
"GMT_IS_POINT",
"GMT_WRITE_SET",
[1] * 6,
"some-file-name",
None,
)
def test_dataarray_to_matrix_works():
"""
Check that dataarray_to_matrix returns correct output.
"""
data = np.diag(v=np.arange(3))
x = np.linspace(start=0, stop=4, num=3)
y = np.linspace(start=5, stop=9, num=3)
grid = xr.DataArray(data, coords=[("y", y), ("x", x)])
matrix, region, inc = dataarray_to_matrix(grid)
npt.assert_allclose(actual=matrix, desired=np.flipud(data))
npt.assert_allclose(actual=region, desired=[x.min(), x.max(), y.min(), y.max()])
npt.assert_allclose(actual=inc, desired=[x[1] - x[0], y[1] - y[0]])
def test_dataarray_to_matrix_negative_x_increment():
"""
Check if dataarray_to_matrix returns correct output with flipped x.
"""
data = np.diag(v=np.arange(3))
x = np.linspace(start=4, stop=0, num=3)
y = np.linspace(start=5, stop=9, num=3)
grid = xr.DataArray(data, coords=[("y", y), ("x", x)])
matrix, region, inc = dataarray_to_matrix(grid)
npt.assert_allclose(actual=matrix, desired=np.flip(data, axis=(0, 1)))
npt.assert_allclose(actual=region, desired=[x.min(), x.max(), y.min(), y.max()])
npt.assert_allclose(actual=inc, desired=[abs(x[1] - x[0]), abs(y[1] - y[0])])
def test_dataarray_to_matrix_negative_y_increment():
"""
Check that dataarray_to_matrix returns correct output with flipped y.
"""
data = np.diag(v=np.arange(3))
x = np.linspace(start=0, stop=4, num=3)
y = np.linspace(start=9, stop=5, num=3)
grid = xr.DataArray(data, coords=[("y", y), ("x", x)])
matrix, region, inc = dataarray_to_matrix(grid)
npt.assert_allclose(actual=matrix, desired=data)
npt.assert_allclose(actual=region, desired=[x.min(), x.max(), y.min(), y.max()])
npt.assert_allclose(actual=inc, desired=[abs(x[1] - x[0]), abs(y[1] - y[0])])
def test_dataarray_to_matrix_negative_x_and_y_increment():
"""
Check that dataarray_to_matrix returns correct output with flipped x/y.
"""
data = np.diag(v=np.arange(3))
x = np.linspace(start=4, stop=0, num=3)
y = np.linspace(start=9, stop=5, num=3)
grid = xr.DataArray(data, coords=[("y", y), ("x", x)])
matrix, region, inc = dataarray_to_matrix(grid)
npt.assert_allclose(actual=matrix, desired=np.fliplr(data))
npt.assert_allclose(actual=region, desired=[x.min(), x.max(), y.min(), y.max()])
npt.assert_allclose(actual=inc, desired=[abs(x[1] - x[0]), abs(y[1] - y[0])])
def test_dataarray_to_matrix_dims_fails():
"""
Check that it fails for > 2 dims.
"""
# Make a 3D regular grid
data = np.ones((10, 12, 11), dtype="float32")
x = np.arange(11)
y = np.arange(12)
z = np.arange(10)
grid = xr.DataArray(data, coords=[("z", z), ("y", y), ("x", x)])
with pytest.raises(GMTInvalidInput):
dataarray_to_matrix(grid)
def test_dataarray_to_matrix_inc_fails():
"""
Check that it fails for variable increments.
"""
data = np.ones((4, 5), dtype="float64")
x = np.linspace(0, 1, 5)
y = np.logspace(2, 3, 4)
grid = xr.DataArray(data, coords=[("y", y), ("x", x)])
with pytest.raises(GMTInvalidInput):
dataarray_to_matrix(grid)
def test_get_default():
"""
Make sure get_default works without crashing and gives reasonable results.
"""
with clib.Session() as lib:
assert lib.get_default("API_GRID_LAYOUT") in ["rows", "columns"]
assert int(lib.get_default("API_CORES")) >= 1
assert Version(lib.get_default("API_VERSION")) >= Version("6.2.0")
def test_get_default_fails():
"""
Make sure get_default raises an exception for invalid names.
"""
with clib.Session() as lib:
with pytest.raises(GMTCLibError):
lib.get_default("NOT_A_VALID_NAME")
def test_info_dict():
"""
Make sure the clib.Session.info dict is working.
"""
# Check if there are no errors or segfaults from getting all of the
# properties.
with clib.Session() as lib:
assert lib.info
# Mock GMT_Get_Default to return always the same string
def mock_defaults(api, name, value): # pylint: disable=unused-argument
"""
Put 'bla' in the value buffer.
"""
value.value = b"bla"
return 0
ses = clib.Session()
ses.create("test-session")
with mock(ses, "GMT_Get_Default", mock_func=mock_defaults):
# Check for an empty dictionary
assert ses.info
for key in ses.info:
assert ses.info[key] == "bla"
ses.destroy()
def test_fails_for_wrong_version():
"""
Make sure the clib.Session raises an exception if GMT is too old.
"""
# Mock GMT_Get_Default to return an old version
def mock_defaults(api, name, value): # pylint: disable=unused-argument
"""
Return an old version.
"""
if name == b"API_VERSION":
value.value = b"5.4.3"
else:
value.value = b"bla"
return 0
lib = clib.Session()
with mock(lib, "GMT_Get_Default", mock_func=mock_defaults):
with pytest.raises(GMTVersionError):
with lib:
assert lib.info["version"] != "5.4.3"
# Make sure the session is closed when the exception is raised.
with pytest.raises(GMTCLibNoSessionError):
assert lib.session_pointer
| 34.332108 | 85 | 0.60464 |
8a9030707f61608d50658393cbb2098e6e4bfbb7 | 300 | py | Python | stubs/_pytest/_code.py | questioneer-ltd/scrut | 8cb914c9c35eee0d9e17a6051683c970db2649b5 | [
"MIT"
] | null | null | null | stubs/_pytest/_code.py | questioneer-ltd/scrut | 8cb914c9c35eee0d9e17a6051683c970db2649b5 | [
"MIT"
] | null | null | null | stubs/_pytest/_code.py | questioneer-ltd/scrut | 8cb914c9c35eee0d9e17a6051683c970db2649b5 | [
"MIT"
] | null | null | null | """Type stubs for _pytest._code."""
# This class actually has more functions than are specified here.
# We don't use these features, so I don't think its worth including
# them in our type stub. We can always change it later.
| 33.333333 | 67 | 0.72 |
8a9169fbe2dd0a7e667174a77f2109a3f57e8580 | 1,808 | py | Python | Prime Factorization/prime_factorization_II.py | rayvantsahni/Let-us-Math | 571ee70452feae0b15f37d46de658b0c0251bd3d | [
"MIT"
] | 2 | 2020-08-06T07:09:38.000Z | 2020-09-12T02:32:23.000Z | Prime Factorization/prime_factorization_II.py | rayvantsahni/Math-is-Fun | 571ee70452feae0b15f37d46de658b0c0251bd3d | [
"MIT"
] | null | null | null | Prime Factorization/prime_factorization_II.py | rayvantsahni/Math-is-Fun | 571ee70452feae0b15f37d46de658b0c0251bd3d | [
"MIT"
] | 1 | 2021-08-30T14:17:28.000Z | 2021-08-30T14:17:28.000Z |
if __name__ == "__main__":
n = int(input("Enter a number: "))
print(get_factorization(n))
| 54.787879 | 198 | 0.672566 |
8a91ba22fcba12ba8237fcf117a449485cdd3de1 | 31,466 | py | Python | pandas/core/indexes/range.py | mujtahidalam/pandas | 526468c8fe6fc5157aaf2fce327c5ab2a3350f49 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | 2 | 2017-12-14T19:50:52.000Z | 2020-04-07T16:47:23.000Z | pandas/core/indexes/range.py | mujtahidalam/pandas | 526468c8fe6fc5157aaf2fce327c5ab2a3350f49 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2021-07-24T17:35:03.000Z | 2021-07-24T17:35:03.000Z | pandas/core/indexes/range.py | mujtahidalam/pandas | 526468c8fe6fc5157aaf2fce327c5ab2a3350f49 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2018-01-26T08:33:54.000Z | 2018-01-26T08:33:54.000Z | from __future__ import annotations
from datetime import timedelta
import operator
from sys import getsizeof
from typing import (
TYPE_CHECKING,
Any,
Callable,
Hashable,
List,
cast,
)
import warnings
import numpy as np
from pandas._libs import index as libindex
from pandas._libs.lib import no_default
from pandas._typing import Dtype
from pandas.compat.numpy import function as nv
from pandas.util._decorators import (
cache_readonly,
doc,
)
from pandas.util._exceptions import rewrite_exception
from pandas.core.dtypes.common import (
ensure_platform_int,
ensure_python_int,
is_float,
is_integer,
is_scalar,
is_signed_integer_dtype,
is_timedelta64_dtype,
)
from pandas.core.dtypes.generic import ABCTimedeltaIndex
from pandas.core import ops
import pandas.core.common as com
from pandas.core.construction import extract_array
import pandas.core.indexes.base as ibase
from pandas.core.indexes.base import maybe_extract_name
from pandas.core.indexes.numeric import (
Float64Index,
Int64Index,
NumericIndex,
)
from pandas.core.ops.common import unpack_zerodim_and_defer
if TYPE_CHECKING:
from pandas import Index
_empty_range = range(0)
# --------------------------------------------------------------------
# Rendering Methods
def _format_attrs(self):
"""
Return a list of tuples of the (attr, formatted_value)
"""
attrs = self._get_data_as_items()
if self.name is not None:
attrs.append(("name", ibase.default_pprint(self.name)))
return attrs
# --------------------------------------------------------------------
_deprecation_message = (
"RangeIndex.{} is deprecated and will be "
"removed in a future version. Use RangeIndex.{} "
"instead"
)
def memory_usage(self, deep: bool = False) -> int:
"""
Memory usage of my values
Parameters
----------
deep : bool
Introspect the data deeply, interrogate
`object` dtypes for system-level memory consumption
Returns
-------
bytes used
Notes
-----
Memory usage does not include memory consumed by elements that
are not components of the array if deep=False
See Also
--------
numpy.ndarray.nbytes
"""
return self.nbytes
def __contains__(self, key: Any) -> bool:
hash(key)
try:
key = ensure_python_int(key)
except TypeError:
return False
return key in self._range
# --------------------------------------------------------------------
# Indexing Methods
def _get_indexer(
self,
target: Index,
method: str | None = None,
limit: int | None = None,
tolerance=None,
) -> np.ndarray:
# -> np.ndarray[np.intp]
if com.any_not_none(method, tolerance, limit):
return super()._get_indexer(
target, method=method, tolerance=tolerance, limit=limit
)
if self.step > 0:
start, stop, step = self.start, self.stop, self.step
else:
# GH 28678: work on reversed range for simplicity
reverse = self._range[::-1]
start, stop, step = reverse.start, reverse.stop, reverse.step
if not is_signed_integer_dtype(target):
# checks/conversions/roundings are delegated to general method
return super()._get_indexer(target, method=method, tolerance=tolerance)
target_array = np.asarray(target)
locs = target_array - start
valid = (locs % step == 0) & (locs >= 0) & (target_array < stop)
locs[~valid] = -1
locs[valid] = locs[valid] / step
if step != self.step:
# We reversed this range: transform to original locs
locs[valid] = len(self) - 1 - locs[valid]
return ensure_platform_int(locs)
# --------------------------------------------------------------------
def _view(self: RangeIndex) -> RangeIndex:
result = type(self)._simple_new(self._range, name=self._name)
result._cache = self._cache
return result
def min(self, axis=None, skipna: bool = True, *args, **kwargs) -> int:
"""The minimum value of the RangeIndex"""
nv.validate_minmax_axis(axis)
nv.validate_min(args, kwargs)
return self._minmax("min")
def max(self, axis=None, skipna: bool = True, *args, **kwargs) -> int:
"""The maximum value of the RangeIndex"""
nv.validate_minmax_axis(axis)
nv.validate_max(args, kwargs)
return self._minmax("max")
def argsort(self, *args, **kwargs) -> np.ndarray:
"""
Returns the indices that would sort the index and its
underlying data.
Returns
-------
np.ndarray[np.intp]
See Also
--------
numpy.ndarray.argsort
"""
ascending = kwargs.pop("ascending", True) # EA compat
nv.validate_argsort(args, kwargs)
if self._range.step > 0:
result = np.arange(len(self), dtype=np.intp)
else:
result = np.arange(len(self) - 1, -1, -1, dtype=np.intp)
if not ascending:
result = result[::-1]
return result
def equals(self, other: object) -> bool:
"""
Determines if two Index objects contain the same elements.
"""
if isinstance(other, RangeIndex):
return self._range == other._range
return super().equals(other)
# --------------------------------------------------------------------
# Set Operations
def _min_fitting_element(self, lower_limit: int) -> int:
"""Returns the smallest element greater than or equal to the limit"""
no_steps = -(-(lower_limit - self.start) // abs(self.step))
return self.start + abs(self.step) * no_steps
def _max_fitting_element(self, upper_limit: int) -> int:
"""Returns the largest element smaller than or equal to the limit"""
no_steps = (upper_limit - self.start) // abs(self.step)
return self.start + abs(self.step) * no_steps
def _extended_gcd(self, a: int, b: int) -> tuple[int, int, int]:
"""
Extended Euclidean algorithms to solve Bezout's identity:
a*x + b*y = gcd(x, y)
Finds one particular solution for x, y: s, t
Returns: gcd, s, t
"""
s, old_s = 0, 1
t, old_t = 1, 0
r, old_r = b, a
while r:
quotient = old_r // r
old_r, r = r, old_r - quotient * r
old_s, s = s, old_s - quotient * s
old_t, t = t, old_t - quotient * t
return old_r, old_s, old_t
def _union(self, other: Index, sort):
"""
Form the union of two Index objects and sorts if possible
Parameters
----------
other : Index or array-like
sort : False or None, default None
Whether to sort resulting index. ``sort=None`` returns a
monotonically increasing ``RangeIndex`` if possible or a sorted
``Int64Index`` if not. ``sort=False`` always returns an
unsorted ``Int64Index``
.. versionadded:: 0.25.0
Returns
-------
union : Index
"""
if isinstance(other, RangeIndex) and sort is None:
start_s, step_s = self.start, self.step
end_s = self.start + self.step * (len(self) - 1)
start_o, step_o = other.start, other.step
end_o = other.start + other.step * (len(other) - 1)
if self.step < 0:
start_s, step_s, end_s = end_s, -step_s, start_s
if other.step < 0:
start_o, step_o, end_o = end_o, -step_o, start_o
if len(self) == 1 and len(other) == 1:
step_s = step_o = abs(self.start - other.start)
elif len(self) == 1:
step_s = step_o
elif len(other) == 1:
step_o = step_s
start_r = min(start_s, start_o)
end_r = max(end_s, end_o)
if step_o == step_s:
if (
(start_s - start_o) % step_s == 0
and (start_s - end_o) <= step_s
and (start_o - end_s) <= step_s
):
return type(self)(start_r, end_r + step_s, step_s)
if (
(step_s % 2 == 0)
and (abs(start_s - start_o) <= step_s / 2)
and (abs(end_s - end_o) <= step_s / 2)
):
return type(self)(start_r, end_r + step_s / 2, step_s / 2)
elif step_o % step_s == 0:
if (
(start_o - start_s) % step_s == 0
and (start_o + step_s >= start_s)
and (end_o - step_s <= end_s)
):
return type(self)(start_r, end_r + step_s, step_s)
elif step_s % step_o == 0:
if (
(start_s - start_o) % step_o == 0
and (start_s + step_o >= start_o)
and (end_s - step_o <= end_o)
):
return type(self)(start_r, end_r + step_o, step_o)
return self._int64index._union(other, sort=sort)
# --------------------------------------------------------------------
def _concat(self, indexes: list[Index], name: Hashable) -> Index:
"""
Overriding parent method for the case of all RangeIndex instances.
When all members of "indexes" are of type RangeIndex: result will be
RangeIndex if possible, Int64Index otherwise. E.g.:
indexes = [RangeIndex(3), RangeIndex(3, 6)] -> RangeIndex(6)
indexes = [RangeIndex(3), RangeIndex(4, 6)] -> Int64Index([0,1,2,4,5])
"""
if not all(isinstance(x, RangeIndex) for x in indexes):
return super()._concat(indexes, name)
elif len(indexes) == 1:
return indexes[0]
rng_indexes = cast(List[RangeIndex], indexes)
start = step = next_ = None
# Filter the empty indexes
non_empty_indexes = [obj for obj in rng_indexes if len(obj)]
for obj in non_empty_indexes:
rng = obj._range
if start is None:
# This is set by the first non-empty index
start = rng.start
if step is None and len(rng) > 1:
step = rng.step
elif step is None:
# First non-empty index had only one element
if rng.start == start:
values = np.concatenate([x._values for x in rng_indexes])
result = Int64Index(values)
return result.rename(name)
step = rng.start - start
non_consecutive = (step != rng.step and len(rng) > 1) or (
next_ is not None and rng.start != next_
)
if non_consecutive:
result = Int64Index(np.concatenate([x._values for x in rng_indexes]))
return result.rename(name)
if step is not None:
next_ = rng[-1] + step
if non_empty_indexes:
# Get the stop value from "next" or alternatively
# from the last non-empty index
stop = non_empty_indexes[-1].stop if next_ is None else next_
return RangeIndex(start, stop, step).rename(name)
# Here all "indexes" had 0 length, i.e. were empty.
# In this case return an empty range index.
return RangeIndex(0, 0).rename(name)
def __len__(self) -> int:
"""
return the length of the RangeIndex
"""
return len(self._range)
def __getitem__(self, key):
"""
Conserve RangeIndex type for scalar and slice keys.
"""
if isinstance(key, slice):
new_range = self._range[key]
return self._simple_new(new_range, name=self._name)
elif is_integer(key):
new_key = int(key)
try:
return self._range[new_key]
except IndexError as err:
raise IndexError(
f"index {key} is out of bounds for axis 0 with size {len(self)}"
) from err
elif is_scalar(key):
raise IndexError(
"only integers, slices (`:`), "
"ellipsis (`...`), numpy.newaxis (`None`) "
"and integer or boolean "
"arrays are valid indices"
)
# fall back to Int64Index
return super().__getitem__(key)
def _getitem_slice(self: RangeIndex, slobj: slice) -> RangeIndex:
"""
Fastpath for __getitem__ when we know we have a slice.
"""
res = self._range[slobj]
return type(self)._simple_new(res, name=self._name)
# --------------------------------------------------------------------
# Reductions
def all(self, *args, **kwargs) -> bool:
return 0 not in self._range
def any(self, *args, **kwargs) -> bool:
return any(self._range)
# --------------------------------------------------------------------
def _arith_method(self, other, op):
"""
Parameters
----------
other : Any
op : callable that accepts 2 params
perform the binary op
"""
if isinstance(other, ABCTimedeltaIndex):
# Defer to TimedeltaIndex implementation
return NotImplemented
elif isinstance(other, (timedelta, np.timedelta64)):
# GH#19333 is_integer evaluated True on timedelta64,
# so we need to catch these explicitly
return op(self._int64index, other)
elif is_timedelta64_dtype(other):
# Must be an np.ndarray; GH#22390
return op(self._int64index, other)
if op in [
operator.pow,
ops.rpow,
operator.mod,
ops.rmod,
ops.rfloordiv,
divmod,
ops.rdivmod,
]:
return op(self._int64index, other)
step: Callable | None = None
if op in [operator.mul, ops.rmul, operator.truediv, ops.rtruediv]:
step = op
# TODO: if other is a RangeIndex we may have more efficient options
other = extract_array(other, extract_numpy=True, extract_range=True)
attrs = self._get_attributes_dict()
left, right = self, other
try:
# apply if we have an override
if step:
with np.errstate(all="ignore"):
rstep = step(left.step, right)
# we don't have a representable op
# so return a base index
if not is_integer(rstep) or not rstep:
raise ValueError
else:
rstep = left.step
with np.errstate(all="ignore"):
rstart = op(left.start, right)
rstop = op(left.stop, right)
result = type(self)(rstart, rstop, rstep, **attrs)
# for compat with numpy / Int64Index
# even if we can represent as a RangeIndex, return
# as a Float64Index if we have float-like descriptors
if not all(is_integer(x) for x in [rstart, rstop, rstep]):
result = result.astype("float64")
return result
except (ValueError, TypeError, ZeroDivisionError):
# Defer to Int64Index implementation
return op(self._int64index, other)
# TODO: Do attrs get handled reliably?
| 32.777083 | 88 | 0.556664 |
8a921ddf5fe02b1831b2b73b31bdcdcfebea2ba6 | 708 | py | Python | model.py | Hasanweight/pytorch-chatbot-master | 7a3b58af7e5284f1f3f7f7b0aeb3f19d9ee3cbc1 | [
"MIT"
] | null | null | null | model.py | Hasanweight/pytorch-chatbot-master | 7a3b58af7e5284f1f3f7f7b0aeb3f19d9ee3cbc1 | [
"MIT"
] | null | null | null | model.py | Hasanweight/pytorch-chatbot-master | 7a3b58af7e5284f1f3f7f7b0aeb3f19d9ee3cbc1 | [
"MIT"
] | 1 | 2020-11-17T07:04:35.000Z | 2020-11-17T07:04:35.000Z | import torch
import torch.nn as nn
| 30.782609 | 61 | 0.59887 |
8a9277485abaa1ad23562bb5f41c412cb9cb7cd7 | 6,927 | py | Python | jwql/utils/logging_functions.py | hover2pi/jwql | 0a97fe618c007883ffbced88ac1cb45a667fcb3c | [
"BSD-3-Clause"
] | null | null | null | jwql/utils/logging_functions.py | hover2pi/jwql | 0a97fe618c007883ffbced88ac1cb45a667fcb3c | [
"BSD-3-Clause"
] | null | null | null | jwql/utils/logging_functions.py | hover2pi/jwql | 0a97fe618c007883ffbced88ac1cb45a667fcb3c | [
"BSD-3-Clause"
] | null | null | null |
""" Logging functions for the ``jwql`` automation platform.
This module provides decorators to log the execution of modules. Log
files are written to the ``logs/`` directory in the ``jwql`` central
storage area, named by module name and timestamp, e.g.
``monitor_filesystem/monitor_filesystem_2018-06-20-15:22:51.log``
Authors
-------
- Catherine Martlin 2018
- Alex Viana, 2013 (WFC3 QL Version)
Use
---
To log the execution of a module, use:
::
import os
import logging
from jwql.logging.logging_functions import configure_logging
from jwql.logging.logging_functions import log_info
from jwql.logging.logging_functions import log_fail
@log_info
@log_fail
def my_main_function():
pass
if __name__ == '__main__':
module = os.path.basename(__file__).replace('.py', '')
configure_logging(module)
my_main_function()
Dependencies
------------
The user must have a configuration file named ``config.json``
placed in the ``utils`` directory.
References
----------
This code is adopted and updated from python routine
``logging_functions.py`` written by Alex Viana, 2013 for the WFC3
Quicklook automation platform.
"""
import datetime
import getpass
import importlib
import logging
import os
import pwd
import socket
import sys
import time
import traceback
from functools import wraps
from jwql.utils.permissions import set_permissions
from jwql.utils.utils import get_config, ensure_dir_exists
LOG_FILE_LOC = ''
PRODUCTION_BOOL = ''
def configure_logging(module, production_mode=True, path='./'):
"""Configure the log file with a standard logging format.
Parameters
----------
module : str
The name of the module being logged.
production_mode : bool
Whether or not the output should be written to the production
environement.
path : str
Where to write the log if user-supplied path; default to working dir.
"""
# Determine log file location
if production_mode:
log_file = make_log_file(module)
else:
log_file = make_log_file(module, production_mode=False, path=path)
global LOG_FILE_LOC
global PRODUCTION_BOOL
LOG_FILE_LOC = log_file
PRODUCTION_BOOL = production_mode
# Create the log file and set the permissions
logging.basicConfig(filename=log_file,
format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%m/%d/%Y %H:%M:%S %p',
level=logging.INFO)
set_permissions(log_file)
def make_log_file(module, production_mode=True, path='./'):
"""Create the log file name based on the module name.
The name of the ``log_file`` is a combination of the name of the
module being logged and the current datetime.
Parameters
----------
module : str
The name of the module being logged.
production_mode : bool
Whether or not the output should be written to the production
environment.
path : str
Where to write the log if user-supplied path; default to
working dir.
Returns
-------
log_file : str
The full path to where the log file will be written to.
"""
timestamp = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M')
filename = '{0}_{1}.log'.format(module, timestamp)
user = pwd.getpwuid(os.getuid()).pw_name
settings = get_config()
admin_account = settings['admin_account']
log_path = settings['log_dir']
exempt_modules = []
if user != admin_account and module not in exempt_modules and production_mode:
module = os.path.join('dev', module)
if production_mode:
log_file = os.path.join(log_path, module, filename)
else:
log_file = os.path.join(path, filename)
ensure_dir_exists(os.path.dirname(log_file))
return log_file
def log_info(func):
"""Decorator to log useful system information.
This function can be used as a decorator to log user environment
and system information. Future packages we want to track can be
added or removed as necessary.
Parameters
----------
func : func
The function to decorate.
Returns
-------
wrapped : func
The wrapped function.
"""
return wrapped
def log_fail(func):
"""Decorator to log crashes in the decorated code.
Parameters
----------
func : func
The function to decorate.
Returns
-------
wrapped : func
The wrapped function.
"""
return wrapped
| 28.044534 | 111 | 0.627111 |
8a928ed1a44855a651b9670429234df930921f0a | 125 | py | Python | api/services/http.py | takos22/API-1 | 261ecd34648d610169caf27b3712256f757b100d | [
"MIT"
] | null | null | null | api/services/http.py | takos22/API-1 | 261ecd34648d610169caf27b3712256f757b100d | [
"MIT"
] | null | null | null | api/services/http.py | takos22/API-1 | 261ecd34648d610169caf27b3712256f757b100d | [
"MIT"
] | null | null | null | from aiohttp import ClientSession
from typing import Optional
session: Optional[ClientSession] = None
__all__ = (session,)
| 17.857143 | 39 | 0.8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.