blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2a99a7db473372aeeb7fa4a6ffac6b7e5ed25760 | 5058401352fd2b80bf33bd4a0c0acc77b0e05231 | /python/makeexec/makeexec.py | a4205f5d195154aaa03e44e18c1d69e964d28ad9 | []
| no_license | pgl/mcandre | 3b81ee64bf10ccaf02b9a1d44ed73e20cbcad4b6 | 81055895d872e2f93cd055f5a832c6d89848e3a3 | refs/heads/master | 2021-01-24T20:25:24.672765 | 2013-10-24T09:06:25 | 2013-10-24T09:06:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,189 | py | #!/usr/bin/env python
"""Properly shebang and mark a file as executable"""
__author__ = "Andrew Pennebaker ([email protected])"
__date__ = "3 Apr 2006"
__copyright__ = "Copyright 2006 Andrew Pennebaker"
import sys
import getopt
INTERPRETERS = {
"py":"#!/usr/bin/env python",
"pl":"#!/usr/bin/env perl",
"pm":"#!/usr/bin/env perl",
"lua":"#!/usr/bin/env lua",
"sh":"#!/bin/sh",
"rb":"#!/usr/bin/env ruby"
}
def update():
"""Update file"""
global INTERPRETERS
f = open("paths.conf", "r")
options = ("".join(f.readlines())).split("\n")
INTERPRETERS = {}
for option in options:
key, value = option.split(":")
INTERPRETERS[key] = value
def get_extension(filename):
"""Get a file's extension"""
return filename[filename.rindex(".")+1:]
def makeexec(filename, manual = None):
"""Make a file properly executable"""
auto = None
if manual:
auto = manual
else:
try:
auto = INTERPRETERS[get_extension(filename)]
except KeyError:
raise Exception("Cannot guess interpreter. Specify manual path.")
f = None
try:
f = open(filename, "r")
except IOError:
raise Exception("Error reading %s" % (filename))
lines = ("".join(f.readlines())).split("\n")
f.close()
if lines[0] != auto:
try:
f = open(filename, "w")
except IOError:
raise Exception("Error writing to %s" % (filename))
f.write("%s\n\n" % (auto))
for line in lines:
f.write("%s\n" % (line))
f.close()
def usage():
"""Print usage message"""
print "Usage: %s [options] <file1> <file2> <file3> <...>" % (sys.argv[0])
print "\n--manual <interpreter path>"
print "--help (usage)"
sys.exit()
def main():
"""CLI"""
system_args = sys.argv[1:] # ignore program name
manual = None
optlist = []
args = []
try:
optlist, args = getopt.getopt(system_args, "", ["manual=", "help"])
except getopt.GetoptError:
usage()
if len(args) < 1:
usage()
for option, value in optlist:
if option == "--help":
usage()
elif option == "--manual":
manual = value
for fn in args:
makeexec(fn, manual)
if __name__ == "__main__":
main()
update()
| [
"[email protected]"
]
| |
369600eb04fb78121977a41f64c63cab8ecd8243 | 6fa0d5d3b61fbce01fad5a7dd50258c09298ee00 | /Algorithm/BOJ/1051.py | 267eb1c6bd32d770b9a4e55572968cf549d5ad9d | []
| no_license | athletejuan/TIL | c8e6bd9f7e2c6f999dbac759adcdb6b2959de384 | 16b854928af2f27d91ba140ebc1aec0007e5eb04 | refs/heads/master | 2023-02-19T13:59:06.495110 | 2022-03-23T15:08:04 | 2022-03-23T15:08:04 | 188,750,527 | 1 | 0 | null | 2023-02-15T22:54:50 | 2019-05-27T01:27:09 | Python | UTF-8 | Python | false | false | 1,216 | py | N,M = map(int, input().split())
base = [input() for _ in range(N)]
def rectangular(l):
while l:
for i in range(M-l):
for j in range(N-l):
if base[j][i] == base[j][i+l] == base[j+l][i] == base[j+l][i+l]:
return (l+1)**2
return rectangular(l-1)
return 1
l = N-1 if N < M else M-1
print(rectangular(l))
# 1st try
# breaker = False
# if N < M:
# for i in range(N-1):
# for j in range(i+1):
# for k in range(M-N+i+1):
# if r[j][k] == r[j][k+N-1-i] == r[j+N-1-i][k] == r[j+N-1-i][k+N-1-i]:
# print((N-i)**2)
# breaker = True
# break
# if breaker:
# break
# if breaker:
# break
# else:
# for i in range(M-1):
# for j in range(i+1):
# for k in range(N-M+i+1):
# if r[k][j] == r[k][j+M-1-i] == r[k+M-1-i][j] == r[k+M-1-i][j+M-1-i]:
# print((M-i)**2)
# breaker = True
# break
# if breaker:
# break
# if breaker:
# break
# if not breaker:
# print(1) | [
"[email protected]"
]
| |
44bb08d1eb1cf06afe26eafcbb135c4b3e59f333 | e31d6c6c74a71daf27d618de4debf59e8cb9f188 | /gluon/losses.py | 22f050cb0f61e493be60803078e9ba0ce4e905eb | [
"MIT"
]
| permissive | vlomonaco/imgclsmob | 574ebfbfe4be7a11c8742f34261bc4e7cc1f30be | d0d1c49a848ab146213ef4cbd37239799d0102d8 | refs/heads/master | 2022-04-18T16:03:11.361053 | 2020-04-14T06:17:36 | 2020-04-14T06:17:36 | 255,555,032 | 0 | 1 | MIT | 2020-04-14T08:39:59 | 2020-04-14T08:39:58 | null | UTF-8 | Python | false | false | 5,009 | py | """
Loss functions.
"""
__all__ = ['SegSoftmaxCrossEntropyLoss', 'MixSoftmaxCrossEntropyLoss']
from mxnet.gluon.loss import Loss, _reshape_like, _apply_weighting
class SegSoftmaxCrossEntropyLoss(Loss):
"""
SoftmaxCrossEntropyLoss with ignore labels (for segmentation task).
Parameters
----------
axis : int, default -1
The axis to sum over when computing softmax and entropy.
sparse_label : bool, default True
Whether label is an integer array instead of probability distribution.
from_logits : bool, default False
Whether input is a log probability (usually from log_softmax) instead of unnormalized numbers.
weight : float or None
Global scalar weight for loss.
batch_axis : int, default 0
The axis that represents mini-batch.
ignore_label : int, default -1
The label to ignore.
size_average : bool, default False
Whether to re-scale loss with regard to ignored labels.
"""
def __init__(self,
sparse_label=True,
batch_axis=0,
ignore_label=-1,
size_average=True,
**kwargs):
super(SegSoftmaxCrossEntropyLoss, self).__init__(None, batch_axis, **kwargs)
self._sparse_label = sparse_label
self._ignore_label = ignore_label
self._size_average = size_average
def hybrid_forward(self, F, pred, label):
"""
Compute loss.
"""
softmaxout = F.SoftmaxOutput(
pred,
label.astype(pred.dtype),
ignore_label=self._ignore_label,
multi_output=self._sparse_label,
use_ignore=True,
normalization=("valid" if self._size_average else "null"))
if self._sparse_label:
loss = -F.pick(F.log(softmaxout), label, axis=1, keepdims=True)
else:
label = _reshape_like(F, label, pred)
loss = -F.sum(F.log(softmaxout) * label, axis=-1, keepdims=True)
loss = F.where(label.expand_dims(axis=1) == self._ignore_label, F.zeros_like(loss), loss)
return F.mean(loss, axis=self._batch_axis, exclude=True)
class MixSoftmaxCrossEntropyLoss(SegSoftmaxCrossEntropyLoss):
"""
SegSoftmaxCrossEntropyLoss with auxiliary loss support.
Parameters
----------
aux : bool, default True
Whether to use auxiliary loss.
aux_weight : float, default 0.2
The weight for aux loss.
ignore_label : int, default -1
The label to ignore.
"""
def __init__(self,
aux=True,
mixup=False,
aux_weight=0.2,
ignore_label=-1,
**kwargs):
super(MixSoftmaxCrossEntropyLoss, self).__init__(ignore_label=ignore_label, **kwargs)
self.aux = aux
self.mixup = mixup
self.aux_weight = aux_weight
def _aux_forward(self, F, pred1, pred2, label, **kwargs):
"""
Compute loss including auxiliary output.
"""
loss1 = super(MixSoftmaxCrossEntropyLoss, self).hybrid_forward(F, pred1, label, **kwargs)
loss2 = super(MixSoftmaxCrossEntropyLoss, self). hybrid_forward(F, pred2, label, **kwargs)
return loss1 + self.aux_weight * loss2
def _aux_mixup_forward(self, F, pred1, pred2, label1, label2, lam):
"""
Compute loss including auxiliary output.
"""
loss1 = self._mixup_forward(F, pred1, label1, label2, lam)
loss2 = self._mixup_forward(F, pred2, label1, label2, lam)
return loss1 + self.aux_weight * loss2
def _mixup_forward(self, F, pred, label1, label2, lam, sample_weight=None):
if not self._from_logits:
pred = F.log_softmax(pred, self._axis)
if self._sparse_label:
loss1 = -F.pick(pred, label1, axis=self._axis, keepdims=True)
loss2 = -F.pick(pred, label2, axis=self._axis, keepdims=True)
loss = lam * loss1 + (1 - lam) * loss2
else:
label1 = _reshape_like(F, label1, pred)
label2 = _reshape_like(F, label2, pred)
loss1 = -F.sum(pred * label1, axis=self._axis, keepdims=True)
loss2 = -F.sum(pred * label2, axis=self._axis, keepdims=True)
loss = lam * loss1 + (1 - lam) * loss2
loss = _apply_weighting(F, loss, self._weight, sample_weight)
return F.mean(loss, axis=self._batch_axis, exclude=True)
def hybrid_forward(self, F, preds, label, **kwargs):
"""
Compute loss.
"""
if self.aux:
if self.mixup:
return self._aux_mixup_forward(F, *preds, label, **kwargs)
else:
return self._aux_forward(F, *preds, label, **kwargs)
else:
if self.mixup:
return self._mixup_forward(F, *preds, label, **kwargs)
else:
return super(MixSoftmaxCrossEntropyLoss, self).hybrid_forward(F, *preds, label, **kwargs)
| [
"[email protected]"
]
| |
830510183c1a21c8df0759aa9047cb4d65f415a3 | 639c1f8c8bec57cb49665142ae2985d50da8c757 | /mysite/admin.py | 5bf0c8b4754467c15524a98be087b8fd1762af6d | []
| no_license | info3g/hospitalevent | 4c7d66d3af5b1e0e3f65cdd375b99324042f7c9b | fdd17bd7ae0828bd5bbdcf8fc61689f5485a571f | refs/heads/master | 2022-11-05T12:47:23.700584 | 2019-04-23T05:51:05 | 2019-04-23T05:51:05 | 166,350,181 | 0 | 1 | null | 2022-10-28T12:29:33 | 2019-01-18T05:39:27 | Python | UTF-8 | Python | false | false | 464 | py | from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(promisAnswers)
admin.site.register(diseases)
admin.site.register(symptoms)
admin.site.register(treatments)
admin.site.register(userProfile)
admin.site.register(userProfileSymptom)
admin.site.register(userProfileSymptomUpdate)
admin.site.register(userProfileTreatment)
admin.site.register(message)
admin.site.register(event)
admin.site.register(promisquestions) | [
"[email protected]"
]
| |
3b64b90411be6f00cfcba8c6d834c3c044629f05 | 8bb4a472344fda15985ac322d14e8f4ad79c7553 | /Python3-Core/src/test/prompto/runtime/o/TestFilter.py | 2b491c5cd234908a06c3b10241185209545c0ad8 | []
| no_license | prompto/prompto-python3 | c6b356f5af30c6826730ba7f2ad869f341983a2d | 64bd3d97d4702cc912097d41d961f7ab3fd82bee | refs/heads/master | 2022-12-24T12:33:16.251468 | 2022-11-27T17:37:56 | 2022-11-27T17:37:56 | 32,623,633 | 4 | 0 | null | 2019-05-04T11:06:05 | 2015-03-21T07:17:25 | Python | UTF-8 | Python | false | false | 550 | py | from prompto.parser.o.BaseOParserTest import BaseOParserTest
from prompto.runtime.utils.Out import Out
class TestFilter(BaseOParserTest):
def setUp(self):
super(type(self), self).setUp()
Out.init()
def tearDown(self):
Out.restore()
def testFilterFromIterable(self):
self.checkOutput("filter/filterFromIterable.poc")
def testFilterFromList(self):
self.checkOutput("filter/filterFromList.poc")
def testFilterFromSet(self):
self.checkOutput("filter/filterFromSet.poc")
| [
"[email protected]"
]
| |
8a87d3c15006f967d5b0d48dbd228929680398d2 | 1a23cc660649efe857808fef96740b4046f14713 | /mysite/views.py | 201c61bd6daca2e61d1d912d5faaef4ddd4cd0ba | []
| no_license | itd/djtest | b1df94b0651bf94582778338d472d42e583c1497 | 4903d0624892501ca3a361ce2feca18c12d8d082 | refs/heads/master | 2021-01-10T10:24:08.491299 | 2015-12-10T13:55:27 | 2015-12-10T13:55:27 | 47,764,800 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def index(request):
return HttpResponse("Hello world. Polls index.")
| [
"[email protected]"
]
| |
0d10c4b5302f777b5282afe718c2615460335268 | b7b8cac59c24c28efb3002f639865121d3b1f3e1 | /hyperion/densities/flared_disk.py | 2d8a944f205b11a0c8019c98cbbcd14e8881149c | [
"BSD-2-Clause"
]
| permissive | koepferl/hyperion | 51a461f3cde30faa6dc82f63803b659a831273d1 | d43e1d06889e8b649038b85ef6721c64dd269a4e | refs/heads/master | 2020-04-01T19:11:18.373471 | 2015-01-14T13:31:36 | 2015-03-30T15:38:08 | 34,328,089 | 0 | 0 | null | 2015-04-21T13:17:41 | 2015-04-21T13:17:40 | null | UTF-8 | Python | false | false | 13,172 | py | from __future__ import print_function, division
import numpy as np
from astropy import log as logger
from ..dust import SphericalDust
from ..util.constants import pi
from ..util.convenience import OptThinRadius
from ..util.integrate import integrate_powerlaw
from ..util.validator import validate_scalar
from .core import Disk
class FlaredDisk(Disk):
r'''
This class implements the density structure for a flared axisymmatric
disk, with a density given by:
.. math:: \rho(R,z,\phi) = \rho_0^{\rm disk}\,\left(\frac{R_0}{R}\right)^{\beta - p}\,\exp{\left[-\frac{1}{2}\left(\frac{z}{h(R)}\right)^2\right]} \\
where
.. math:: h(R) = h_0\left(\frac{R}{R_0}\right)^\beta
The :math:`\rho_0^{\rm disk}` parameter does not need to be set directly
(although it can be), and is instead automatically calculated when you set
the disk mass. The exact equation relating :math:`\rho_0^{\rm disk}` to the
disk mass can be found by integrating the equation for
:math:`\rho(R,z,\phi)` over three dimensions and setting the result equal
to the disk mass.
Once the :class:`~hyperion.densities.FlaredDisk` class has been
instantiated, the parameters for the density structure can be set via
attributes::
>>> from hyperion.util.constants import msun, au
>>> disk = FlaredDisk()
>>> disk.mass = 2. * msun
>>> disk.rmin = 0.1 * au
>>> disk.rmax = 100 * au
'''
def __init__(self, mass=None, rho_0=None, rmin=None, rmax=None, p=-1,
beta=-1.25, h_0=None, r_0=None, cylindrical_inner_rim=True,
cylindrical_outer_rim=True, star=None, dust=None):
# Start off by initializing mass and rho_0
self.mass = None
self.rho_0 = None
# Basic disk parameters
self.rmin = rmin
self.rmax = rmax
self.p = p
self.beta = beta
self.h_0 = h_0
self.r_0 = r_0
self.cylindrical_inner_rim = cylindrical_inner_rim
self.cylindrical_outer_rim = cylindrical_outer_rim
# Disk mass
if mass is not None and rho_0 is not None:
raise Exception("Cannot specify both mass and rho_0")
elif mass is not None:
self.mass = mass
elif rho_0 is not None:
self.rho_0 = rho_0
# Central star
self.star = star
# Dust
self.dust = dust
self._freeze()
@property
def mass(self):
"""
Total disk mass (g)
"""
if self._mass is not None:
return self._mass
elif self._rho_0 is None:
return None
else:
self._check_all_set()
if self.rmax <= self.rmin:
return 0.
int1 = integrate_powerlaw(self.rmin, self.rmax, 1.0 + self.p)
int1 *= self.r_0 ** -self.p
integral = (2. * pi) ** 1.5 * self.h_0 * int1
return self._rho_0 * integral
@mass.setter
def mass(self, value):
if value is not None:
validate_scalar('mass', value, domain='positive')
if self._rho_0 is not None:
logger.warn("Overriding value of rho_0 with value derived from mass")
self._rho_0 = None
self._mass = value
@property
def rho_0(self):
"""
Scale-factor for the disk density (g/cm^3)
"""
if self._rho_0 is not None:
return self._rho_0
elif self._mass is None:
return None
else:
self._check_all_set()
if self.rmax <= self.rmin:
return 0.
int1 = integrate_powerlaw(self.rmin, self.rmax, 1.0 + self.p)
int1 *= self.r_0 ** -self.p
integral = (2. * pi) ** 1.5 * self.h_0 * int1
return self._mass / integral
@rho_0.setter
def rho_0(self, value):
if value is not None:
validate_scalar('rho_0', value, domain='positive')
if self._mass is not None:
logger.warn("Overriding value of mass with value derived from rho_0")
self._mass = None
self._rho_0 = value
@property
def rmin(self):
'''inner radius (cm)'''
if isinstance(self._rmin, OptThinRadius):
return self._rmin.evaluate(self.star, self.dust)
else:
return self._rmin
@rmin.setter
def rmin(self, value):
if not isinstance(value, OptThinRadius) and value is not None:
validate_scalar('rmin', value, domain='positive', extra=' or an OptThinRadius instance')
self._rmin = value
@property
def rmax(self):
'''outer radius (cm)'''
if isinstance(self._rmax, OptThinRadius):
return self._rmax.evaluate(self.star, self.dust)
else:
return self._rmax
@rmax.setter
def rmax(self, value):
if not isinstance(value, OptThinRadius) and value is not None:
validate_scalar('rmax', value, domain='positive', extra=' or an OptThinRadius instance')
self._rmax = value
@property
def p(self):
'''surface density power-law exponent'''
return self._p
@p.setter
def p(self, value):
if value is not None:
validate_scalar('p', value, domain='real')
self._p = value
@property
def beta(self):
'''scaleheight power-law exponent'''
return self._beta
@beta.setter
def beta(self, value):
if value is not None:
validate_scalar('beta', value, domain='real')
self._beta = value
@property
def h_0(self):
'''scaleheight of the disk at ``r_0`` (cm)'''
return self._h_0
@h_0.setter
def h_0(self, value):
if value is not None:
validate_scalar('h_0', value, domain='positive')
self._h_0 = value
@property
def r_0(self):
'''radius at which ``h_0`` is defined (cm)'''
return self._r_0
@r_0.setter
def r_0(self, value):
if value is not None:
validate_scalar('r_0', value, domain='positive')
self._r_0 = value
@property
def cylindrical_inner_rim(self):
'''
Whether the inner edge of the disk should be defined as a truncation
in cylindrical or spherical polar coordinates
'''
return self._cylindrical_inner_rim
@cylindrical_inner_rim.setter
def cylindrical_inner_rim(self, value):
if not isinstance(value, bool):
raise ValueError("cylindrical_inner_rim should be a boolean")
self._cylindrical_inner_rim = value
@property
def cylindrical_outer_rim(self):
'''
Whether the outer edge of the disk should be defined as a truncation
in cylindrical or spherical polar coordinates
'''
return self._cylindrical_outer_rim
@cylindrical_outer_rim.setter
def cylindrical_outer_rim(self, value):
if not isinstance(value, bool):
raise ValueError("cylindrical_outer_rim should be a boolean")
self._cylindrical_outer_rim = value
@property
def dust(self):
'''dust properties (filename or dust object)'''
return self._dust
@dust.setter
def dust(self, value):
if isinstance(value, basestring):
self._dust = SphericalDust(value)
else:
self._dust = value
def __str__(self):
string = "= Flared disk =\n"
string += " - M_disk: %.3e\n" % self.mass
string += " - R_min: %.3e\n" % self.rmin
string += " - R_min: %.3e\n" % self.rmax
string += " - p: %.3f\n" % self.p
string += " - beta: %.3f\n" % self.beta
string += " - h_0: %.3e\n" % self.h_0
string += " - r_0: %.3e\n" % self.r_0
return string
def _check_all_set(self):
if self._mass is None and self._rho_0 is None:
raise Exception("either mass or rho_0 should be set")
if self.rmin is None:
raise Exception("rmin is not set")
if self.rmax is None:
raise Exception("rmax is not set")
if self.p is None:
raise Exception("p is not set")
if self.beta is None:
raise Exception("beta is not set")
if self.h_0 is None:
raise Exception("h_0 is not set")
if self.r_0 is None:
raise Exception("r_0 is not set")
if isinstance(self.rmin, OptThinRadius):
raise Exception("Inner disk radius needs to be computed first")
if isinstance(self.rmax, OptThinRadius):
raise Exception("Outer disk radius needs to be computed first")
def density(self, grid):
'''
Return the density grid
Parameters
----------
grid : :class:`~hyperion.grid.SphericalPolarGrid` or :class:`~hyperion.grid.CylindricalPolarGrid` instance.
The spherical or cylindrical polar grid object containing
information about the position of the grid cells.
Returns
-------
rho : np.ndarray
A 3-dimensional array containing the density of the disk inside
each cell. The shape of this array is the same as
``grid.shape``.
'''
self._check_all_set()
if self.rmax <= self.rmin:
logger.warn("Ignoring disk, since rmax < rmin")
return np.zeros(grid.shape)
if self.mass == 0:
return np.zeros(grid.shape)
# Find disk scaleheight at each cylindrical radius
h = self.h_0 * (grid.gw / self.r_0) ** self.beta
# Find disk density at all positions
rho = (self.r_0 / grid.gw) ** (self.beta - self.p) \
* np.exp(-0.5 * (grid.gz / h) ** 2)
# Truncate below rmin and above rmax
if self.cylindrical_inner_rim:
rho[grid.gw < self.rmin] = 0.
else:
rho[grid.gr < self.rmin] = 0.
if self.cylindrical_outer_rim:
rho[grid.gw > self.rmax] = 0.
else:
rho[grid.gr > self.rmax] = 0.
# Find density factor
rho *= self.rho_0
if np.sum(rho * grid.volumes) == 0. and self.mass > 0:
raise Exception("Discretized disk mass is zero, suggesting that the grid is too coarse")
norm = self.mass / np.sum(rho * grid.volumes)
logger.info("Disk density is being re-scaled by a factor of %.2f to give the correct mass." % norm)
if norm > 1.1 or norm < 1. / 1.1:
logger.warn("Re-scaling factor is significantly different from 1, which indicates that the grid may be too coarse to properly resolve the disk.")
# Normalize to total disk mass
rho = rho * norm
return rho
def midplane_cumulative_density(self, r):
'''
Find the cumulative column density as a function of radius.
The cumulative density is measured outwards from the origin, and in
the midplane.
Parameters
----------
r : np.ndarray
Array of values of the radius up to which to tabulate the
cumulative density.
Returns
-------
rho : np.ndarray
Array of values of the cumulative density.
'''
self._check_all_set()
if self.rmax <= self.rmin:
logger.warn("Ignoring disk, since rmax < rmin")
return np.zeros(r.shape)
int1 = integrate_powerlaw(self.rmin, r.clip(self.rmin, self.rmax), self.p - self.beta)
int1 *= self.r_0 ** (self.beta - self.p)
return self.rho_0 * int1
def _vertical_profile(self, r, theta):
self._check_all_set()
if self.rmax <= self.rmin:
logger.warn("Ignoring disk, since rmax < rmin")
return np.zeros(theta.shape)
# Convert coordinates to cylindrical polars
z = r * np.cos(theta)
w = r * np.sin(theta)
# Find disk scaleheight at each cylindrical radius
h = self.h_0 * (w / self.r_0) ** self.beta
# Find disk density at all positions
rho = (self.r_0 / w) ** (self.beta - self.p) \
* np.exp(-0.5 * (z / h) ** 2)
rho *= self.rho_0
# What about normalization
return rho
def vertical_cumulative_density(self, r, theta):
'''
Find the cumulative column density as a function of theta.
Parameters
----------
r : float
The spherical radius at which to calculate the cumulative density.
theta : np.ndarray
The theta values at which to tabulate the cumulative density.
Returns
-------
rho : np.ndarray
Array of values of the cumulative density.
'''
density = self._vertical_profile(r, theta)
d = r * np.radians(theta)
tau = density * d
tau[0] = 0.
return tau
def scale_height_at(self, r):
'''
Return the scaleheight of the disk at radius `r`
'''
return self.h_0 * (r / self.r_0) ** self.beta
| [
"[email protected]"
]
| |
d0ac59455b0338b38c3c6fb28dd4f59a9259b261 | 6eb35cb8d53ad226de2a6f78e16cb665644fbbdf | /orca/topology/alerts/matcher.py | b842957d8317f0f69e8a4cdeb3fccd3a67d98a4b | [
"Apache-2.0"
]
| permissive | openrca/orca | 631fbc55f72d7dd01563ebc784a259bf0fa75d22 | 3b3ddcb2c14cc550c586f64eb6ca01e827fbc451 | refs/heads/master | 2023-05-30T22:38:55.431661 | 2022-09-11T09:33:24 | 2022-09-11T09:33:24 | 218,142,874 | 88 | 18 | Apache-2.0 | 2023-05-01T21:16:56 | 2019-10-28T20:51:10 | Python | UTF-8 | Python | false | false | 1,211 | py | # Copyright 2020 OpenRCA Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from orca.topology import matcher
class Matcher(matcher.Matcher):
"""Base class for Alert matchers."""
class AlertToSourceMatcher(Matcher):
"""Generic matcher for links between Alert and source objects."""
def are_linked(self, alert, obj):
source_mapping = alert.properties.source_mapping
if not source_mapping.origin == obj.origin:
return False
if not source_mapping.kind == obj.kind:
return False
mapping_items = source_mapping.properties.items()
obj_items = obj.properties.items()
return all(item in obj_items for item in mapping_items)
| [
"[email protected]"
]
| |
7909f7285c70e1cd8f35d44e1e0df1567e7c7104 | d4a7ed22a20599c2f12a550b782327eea312cdc1 | /doc/src/tutorial/src-odespy/osc2.py | de868146836989337530b80f18b4bb76a3852882 | []
| no_license | rothnic/odespy | dcfca8593d738604fc7b6b66fbe8083c7358fc5c | dd50508030ab61047ca885bf0e842f1ad4ef38f4 | refs/heads/master | 2021-01-15T10:51:19.854871 | 2015-05-02T03:51:30 | 2015-05-02T03:51:30 | 35,228,290 | 1 | 0 | null | 2015-05-07T15:33:52 | 2015-05-07T15:33:52 | null | UTF-8 | Python | false | false | 1,357 | py | """As osc1.py, but testing several solvers and setting sin(theta) to theta."""
from math import pi, sqrt
class Problem:
def __init__(self, c, Theta):
self.c, self.Theta = float(c), float(Theta)
self.freq = sqrt(c)
self.period = 2*pi/self.freq
def f(self, u, t):
theta, omega = u; c = self.c
return [omega, -c*theta]
problem = Problem(c=1, Theta=pi/4)
import odespy
solvers = [
odespy.ThetaRule(problem.f, theta=0), # Forward Euler
odespy.ThetaRule(problem.f, theta=0.5), # Midpoint method
odespy.ThetaRule(problem.f, theta=1), # Backward Euler
odespy.RK4(problem.f),
odespy.MidpointIter(problem.f, max_iter=2, eps_iter=0.01),
odespy.LeapfrogFiltered(problem.f),
]
N_per_period = 20
T = 3*problem.period # final time
import numpy
import matplotlib.pyplot as plt
legends = []
for solver in solvers:
solver_name = str(solver) # short description of solver
print solver_name
solver.set_initial_condition([problem.Theta, 0])
N = N_per_period*problem.period
time_points = numpy.linspace(0, T, N+1)
u, t = solver.solve(time_points)
theta = u[:,0]
legends.append(solver_name)
plt.plot(t, theta)
plt.hold('on')
plt.legend(legends)
plotfile = __file__[:-3]
plt.savefig(plotfile + '.png'); plt.savefig(plotfile + '.pdf')
plt.show()
| [
"[email protected]"
]
| |
35c4bdebc781d3d87cdc25b59b881a5ba5da2bed | a438748ac89d53b19e7f4130529906896f059b25 | /Композиция.py | 8cc2c2abe97d5c1760e95ec575d7544e5ac3e6d1 | []
| no_license | Alexfordrop/Basics | 90ead9294727a823eb044e5f2f69d8f29133d150 | eda400424b2c72bd5e01a6c7cb14ad7ae29477d4 | refs/heads/master | 2023-06-08T16:42:26.704163 | 2021-06-27T20:46:27 | 2021-06-27T20:46:27 | 329,421,907 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 419 | py | class Salary:
def __init__(self, pay):
self.pay = pay
def getTotal(self):
return (self.pay*12)
class Employee:
def __init__(self, pay, bonus):
self.pay = pay
self.bonus = bonus
self.salary = Salary(self.pay)
def annualSalary(self):
return "Total: " + str(self.salary.getTotal() + self.bonus)
employee = Employee(100, 10)
print(employee.annualSalary()) | [
"[email protected]"
]
| |
5aef16b6aeb2d157280392287cf28cad33e25528 | 9ac205e4d8f111608d1abbcfa78b5b6598c17955 | /33.搜索旋转排序数组.py | d447ed77448cc4578fc79c195a6ccb85bf544fb8 | []
| no_license | oceanbei333/leetcode | 41ff0666da41750f7d3c82db53ec6f7f27125d3e | 5d29bcf7ea1a9e489a92bc36d2158456de25829e | refs/heads/main | 2023-03-16T18:17:25.232522 | 2021-02-28T04:56:40 | 2021-02-28T04:56:40 | 319,561,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,169 | py | #
# @lc app=leetcode.cn id=33 lang=python3
#
# [33] 搜索旋转排序数组
#
# @lc code=start
from typing import List
class Solution:
def search(self, nums: List[int], target: int) -> int:
return nums.index(target) if target in nums else -1
def search(self, nums: List[int], target: int) -> int:
left, right = 0, len(nums)-1
while left <= right:
mid = (left+right) >> 1
if nums[mid] == target:
return mid
# 只能在有序序列中进行二分查找
# nums[:mid+1] 升序
if nums[left] <= nums[mid]:
# target 在 nums[:mid+1]
if nums[mid] > target >= nums[left]:
right = mid - 1
else:
# target 在 nums[mid+1:]
left = mid+1
else:
# nums[mid:] 升序
if nums[mid] < target <= nums[right]:
# target 在 nums[mid+1:]
left = mid+1
else:
# target 在 nums[:mid]
right = mid - 1
return -1
# @lc code=end
| [
"[email protected]"
]
| |
61334443dff95bdd7751b514c74720f8be96eb4f | 1ab788ce84e446a98b085b62e1e17f8a2afa148d | /문제풀기/2112. [모의 SW 역량테스트] 보호 필름.py | f68fa9c9fa62e32bd2c49165bc5c321e56ed8bda | []
| no_license | kimjy392/exception | 884dd26e1ec6f1c0357c1fe000742b1562adbeaa | b37e9c2f70adae6b93b94b86f96512469f431739 | refs/heads/master | 2022-12-11T20:33:25.632561 | 2020-08-29T13:26:08 | 2020-08-29T13:26:08 | 195,989,162 | 1 | 0 | null | 2022-12-06T23:20:02 | 2019-07-09T10:43:35 | Python | UTF-8 | Python | false | false | 3,783 | py | # def count():
# global isuse
# isuse = [False] * W
# for j in range(W):
# i, start, cnt = 0, 0, 0
# while i < D:
# if tboard[start][j] == tboard[i][j]:
# cnt += 1
# else:
# cnt = 0
# start = i
# continue
# if cnt == K:
# isuse[j] = True
# break
# i += 1
# if sum(isuse) == W:
# return True
# else:
# return False
# def Cback(k, n):
# global tboard, result, isuse, abc
# if k == n:
# if result <= D - len(Cselect):
# return
# for i in Cselect:
# tboard[i] = board[i]
# if count():
# if (D - len(Cselect)) < result:
# result = (D - len(Cselect))
# tmp = [-1] * D
# for i in range(D):
# if i not in Cselect:
# tmp[i] = Mselect[i]
# abc.append(tmp)
#
# for i in Cselect:
# tboard[i] = [Mselect[i]] * W
# return
#
# Cselect.append(k)
# Cback(k+1, n)
# Cselect.pop()
# Cback(k+1, n)
#
# def Mback(k, n):
# global tboard, abc
# if k == n:
# for j in range(len(abc)):
# for i in range(D):
# if abc[j][i] == Mselect[i]:
# return
# tboard = []
# for i in Mselect:
# tboard.append([i] * W)
# Cback(0, D)
# return
#
#
# Mselect.append(1)
# Mback(k+1, n)
# Mselect.pop()
# Mselect.append(0)
# Mback(k+1, n)
# Mselect.pop()
#
# T = int(input())
#
# for tc in range(1, T+1):
# D, W, K = map(int, input().split())
# board = [list(map(int, input().split())) for _ in range(D)]
# Mselect = []
# result = 0xfff
# Cselect = []
# abc = []
# Mback(0, D)
# print('#{} {}'.format(tc, result))
from collections import deque
def count():
global tboard
isuse = [False] * W
for j in range(W):
i, start, cnt = 0, 0, 0
while i < D:
if tboard[start][j] == tboard[i][j]:
cnt += 1
else:
cnt = 0
start = i
continue
if cnt == K:
isuse[j] = True
break
i += 1
if sum(isuse) == W:
return True
else:
return False
# def bfs():
# global result, tboard
# stack = deque([(0, D, 0, [])])
#
# while stack:
# k, n, res, tmp = stack.popleft()
#
# tboard = []
# for i in range(len(tmp)):
# if tmp[i] == -1:
# tboard.append(board[i])
# else:
# tboard.append([tmp[i]] * W)
# if count():
# if res < result:
# result = res
# for i in -1, 0, 1:
# if i == -1:
# stack.append((k+1, n, res, tmp[:]+[-1]))
# else:
# stack.append((k+1, n, res+1, tmp[:]+[i]))
def back(k, n, res):
global result
if res >= result:
return
if count():
if res < result:
result = res
if k == n:
return
if -1 not in visit[k]:
visit[k].append(-1)
back(k+1, n, res)
for i in range(2):
if i not in visit[k]:
tmp, board[k] = board[k], [i] * W
visit[k].append(i)
back(k+1, n, res+1)
board[k] = tmp
T = int(input())
for tc in range(1, T+1):
D, W, K = map(int, input().split())
board = [list(map(int, input().split())) for _ in range(D)]
visit = [[] for _ in range(D)]
result = 0xfff
# back(0, D, 0)
bfs()
print('#{} {}'.format(tc, result))
| [
"[email protected]"
]
| |
5f82420827fe3d84a27b93bdb272851e78b8640a | 2970291ff52e98915abb47848aeb71517ed1fbab | /machines/migrations/0028_auto_20200321_2338.py | 7e985c1f5a57f54e980268faea52817ba7736ccf | []
| no_license | dannyswolf/MLShop_Django_Service_boook | dd33f4bb0352836897448bc45bbb09b7c49252c2 | 9ac5f85468487a53465e244ba31b9bc968300783 | refs/heads/master | 2023-07-15T15:06:53.298042 | 2021-08-29T11:49:42 | 2021-08-29T11:49:42 | 255,998,699 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 421 | py | # Generated by Django 3.0.4 on 2020-03-21 21:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('machines', '0027_auto_20200321_2337'),
]
operations = [
migrations.AlterField(
model_name='machines',
name='Μοντέλο',
field=models.CharField(blank=True, max_length=200, null=True),
),
]
| [
"[email protected]"
]
| |
805acfed79164fc851df2a9a99715665096fee50 | b004204644dd3cf9ad922a6cd5c0aab0c0e97598 | /addons_custom/res_partner_phonecall/models/__init__.py | b1530e36cc8f58540eb0fa9d228170ce9c4d9c39 | []
| no_license | butagreeza/korea_spa | 7a2fdd92ca73205945632ead6582ef1df424074e | 1524f87b62998eba29e491d0836714abec7059fd | refs/heads/master | 2023-04-26T09:45:51.931127 | 2021-05-17T16:45:33 | 2021-05-17T16:45:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 74 | py | from . import res_users
from . import phonecall
from . import res_partner
| [
"[email protected]"
]
| |
e68a12ed2dd20f27609111b77d780a6bbe47ed92 | e72ed9dfc5f90f4772d0b36da249ff7b2d39fd5f | /bible/forms.py | 748bfa871e6d5c9e2b1441ce2ce0f51c7a384224 | []
| no_license | mparkcode/django-retroplay | 58b0626bb4c6e80f96232a0e4886d1a6c2805bbd | 3f76b630469a7105d35708b450eaacb94d384ee4 | refs/heads/master | 2022-12-10T23:26:27.842708 | 2019-10-21T13:46:17 | 2019-10-21T13:46:17 | 143,025,309 | 1 | 3 | null | 2022-12-08T02:49:53 | 2018-07-31T14:25:44 | HTML | UTF-8 | Python | false | false | 186 | py | from django import forms
class IgdbSearchForm(forms.Form):
igdb_search = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'placeholder': 'Search the bible'}), label="") | [
"[email protected]"
]
| |
bfa7526cf02028ee81f5be260236d207fd71ada4 | 88cfeb8f7076450e7a38d31ab2d11883c1818c8d | /net/densenet.py | f37cd38d0ead26afb7480a9c9c2189f1ef9a2c08 | []
| no_license | ZQPei/Alibaba_Cloud_German_AI_Challenge_for_Earth_Observation | 4e5a127c12e0c02ed1914ab000a131e1a7f7d844 | c2efb32763af0a56a3a7ecb9d83c0744f71d5c14 | refs/heads/master | 2020-04-26T04:31:57.731178 | 2019-02-17T01:10:55 | 2019-02-17T01:10:55 | 173,305,034 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,159 | py | import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.checkpoint as cp
from collections import OrderedDict
def _bn_function_factory(norm, relu, conv):
def bn_function(*inputs):
concated_features = torch.cat(inputs, 1)
bottleneck_output = conv(relu(norm(concated_features)))
return bottleneck_output
return bn_function
class _DenseLayer(nn.Module):
def __init__(self, num_input_features, growth_rate, bn_size, drop_rate, efficient=False):
super(_DenseLayer, self).__init__()
self.add_module('norm1', nn.BatchNorm2d(num_input_features)),
self.add_module('relu1', nn.ReLU(inplace=True)),
self.add_module('conv1', nn.Conv2d(num_input_features, bn_size *
growth_rate, kernel_size=1, stride=1, bias=False)),
self.add_module('norm2', nn.BatchNorm2d(bn_size * growth_rate)),
self.add_module('relu2', nn.ReLU(inplace=True)),
self.add_module('conv2', nn.Conv2d(bn_size * growth_rate, growth_rate,
kernel_size=3, stride=1, padding=1, bias=False)),
self.drop_rate = drop_rate
self.efficient = efficient
def forward(self, *prev_features):
bn_function = _bn_function_factory(self.norm1, self.relu1, self.conv1)
if self.efficient and any(prev_feature.requires_grad for prev_feature in prev_features):
bottleneck_output = cp.checkpoint(bn_function, *prev_features)
else:
bottleneck_output = bn_function(*prev_features)
new_features = self.conv2(self.relu2(self.norm2(bottleneck_output)))
if self.drop_rate > 0:
new_features = F.dropout(new_features, p=self.drop_rate, training=self.training)
return new_features
class _Transition(nn.Sequential):
def __init__(self, num_input_features, num_output_features):
super(_Transition, self).__init__()
self.add_module('norm', nn.BatchNorm2d(num_input_features))
self.add_module('relu', nn.ReLU(inplace=True))
self.add_module('conv', nn.Conv2d(num_input_features, num_output_features,
kernel_size=1, stride=1, bias=False))
self.add_module('pool', nn.AvgPool2d(kernel_size=2, stride=2))
class _DenseBlock(nn.Module):
def __init__(self, num_layers, num_input_features, bn_size, growth_rate, drop_rate, efficient=False):
super(_DenseBlock, self).__init__()
for i in range(num_layers):
layer = _DenseLayer(
num_input_features + i * growth_rate,
growth_rate=growth_rate,
bn_size=bn_size,
drop_rate=drop_rate,
efficient=efficient,
)
self.add_module('denselayer%d' % (i + 1), layer)
def forward(self, init_features):
features = [init_features]
for name, layer in self.named_children():
new_features = layer(*features)
features.append(new_features)
return torch.cat(features, 1)
class DenseNet(nn.Module):
"""Densenet-BC model class, based on
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`
Args:
growth_rate (int) - how many filters to add each layer (`k` in paper)
block_config (list of 3 or 4 ints) - how many layers in each pooling block
num_init_features (int) - the number of filters to learn in the first convolution layer
bn_size (int) - multiplicative factor for number of bottle neck layers
(i.e. bn_size * k features in the bottleneck layer)
drop_rate (float) - dropout rate after each dense layer
num_classes (int) - number of classification classes
small_inputs (bool) - set to True if images are 32x32. Otherwise assumes images are larger.
efficient (bool) - set to True to use checkpointing. Much more memory efficient, but slower.
"""
def __init__(self, growth_rate=12, block_config=(16, 16, 16), compression=0.5,
num_init_features=24, bn_size=4, drop_rate=0,
num_classes=17, small_inputs=True, efficient=False):
super(DenseNet, self).__init__()
assert 0 < compression <= 1, 'compression of densenet should be between 0 and 1'
# self.avgpool_size = 8 if small_inputs else 7
self.avgpool_size = 8
# First convolution
if small_inputs:
self.features = nn.Sequential(OrderedDict([
('conv0', nn.Conv2d(10, num_init_features, kernel_size=3, stride=1, padding=1, bias=False)),
]))
else:
self.features = nn.Sequential(OrderedDict([
('conv0', nn.Conv2d(10, num_init_features, kernel_size=7, stride=2, padding=3, bias=False)),
]))
self.features.add_module('norm0', nn.BatchNorm2d(num_init_features))
self.features.add_module('relu0', nn.ReLU(inplace=True))
self.features.add_module('pool0', nn.MaxPool2d(kernel_size=3, stride=2, padding=1,
ceil_mode=False))
# Each denseblock
num_features = num_init_features
for i, num_layers in enumerate(block_config):
block = _DenseBlock(
num_layers=num_layers,
num_input_features=num_features,
bn_size=bn_size,
growth_rate=growth_rate,
drop_rate=drop_rate,
efficient=efficient,
)
self.features.add_module('denseblock%d' % (i + 1), block)
num_features = num_features + num_layers * growth_rate
if i != len(block_config) - 1:
trans = _Transition(num_input_features=num_features,
num_output_features=int(num_features * compression))
self.features.add_module('transition%d' % (i + 1), trans)
num_features = int(num_features * compression)
# Final batch norm
self.features.add_module('norm_final', nn.BatchNorm2d(num_features))
# Linear layer
self.classifier = nn.Linear(num_features, num_classes)
# Initialization
for name, param in self.named_parameters():
if 'conv' in name and 'weight' in name:
n = param.size(0) * param.size(2) * param.size(3)
param.data.normal_().mul_(math.sqrt(2. / n))
elif 'norm' in name and 'weight' in name:
param.data.fill_(1)
elif 'norm' in name and 'bias' in name:
param.data.fill_(0)
elif 'classifier' in name and 'bias' in name:
param.data.fill_(0)
def forward(self, x):
features = self.features(x)
out = F.relu(features, inplace=True)
out = F.avg_pool2d(out, kernel_size=self.avgpool_size).view(features.size(0), -1)
out = self.classifier(out)
return out | [
"[email protected]"
]
| |
e05bcb65006e0ceeb16eb2c70a9ef633d6e7c8b5 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_6404600001200128_0/Python/knabbers/A.py | bbd8fd315049627fb82af60f5c2e8854e2c57ff3 | []
| no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 691 | py | from collections import defaultdict
with open('in.txt','rb') as fin, open('output.txt','w') as fout:
case = 1
it = iter(fin.readlines())
_ = next(it) # cases
for line in it:
print ("\n")
print ("case " + str(case))
N = int(line)
line=next(it)
xs = [int(c) for c in line.split(" ")]
print xs
m1 = 0
m2 = 0
for i in range(N-1):
if xs[i+1] - xs[i] < 0:
m1 -= (xs[i+1] - xs[i])
if xs[i+1] < xs[i]:
m2 = max(m2,xs[i] - xs[i+1])
m3 = 0
for i in range(N-1):
#how much can she eat of current one
m3 += min(m2,xs[i])
best = 1
fout.write("Case #" + str(case) + ": " + str(m1) + " " + str(m3) + "\n")
case += 1 | [
"[email protected]"
]
| |
63dfb28677eaa87faeab89b154711257dc907fc9 | 1f9897e86f93438eed2555d6da1716099df54147 | /2020/jokenpo.py | c69ace473e1326c65b70a76621b05b55119baf5f | []
| no_license | AfonsoArtoni/PUG-PE-Dojo | 10371ec321dc11d0280b8ac01dd70f47d29127a3 | 974a5293f58a721491915b2ee4d2e95e2247e745 | refs/heads/master | 2020-12-21T00:44:18.466602 | 2020-01-25T22:03:59 | 2020-01-25T22:03:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,269 | py | """Jokenpo.
Jokenpo é uma brincadeira japonesa, onde dois jogadores escolhem um dentre
três possíveis itens: Pedra, Papel ou Tesoura. O objetivo é fazer um juiz de
Jokenpo que dada a jogada dos dois jogadores informa o resultado da partida.
As regras são as seguintes:
- Pedra empata com Pedra e ganha de Tesoura
- Tesoura empata com Tesoura e ganha de Papel
- Papel empata com Papel e ganha de Pedra
"""
def jokenpo(entrada1, entrada2):
"""
>>> jokenpo('pedra','pedra')
(0, 'empate')
>>> jokenpo('tesoura', 'tesoura')
(0, 'empate')
>>> jokenpo('papel', 'papel')
(0, 'empate')
>>> jokenpo('tesoura', 'pedra')
(2, 'pedra')
>>> jokenpo('pedra', 'tesoura')
(1, 'pedra')
>>> jokenpo('pedra', 'papel')
(2, 'papel')
>>> jokenpo('papel', 'pedra')
(1, 'papel')
>>> jokenpo('tesoura', 'papel')
(1, 'tesoura')
>>> jokenpo('papel', 'tesoura')
(2, 'tesoura')
"""
d = {
'tesoura': 'papel',
'pedra': 'tesoura',
'papel': 'pedra'
}
if d[entrada1] == entrada2:
return (1, entrada1)
if d[entrada2] == entrada1:
return (2, entrada2)
return (0, 'empate')
if __name__ == "__main__":
import doctest
doctest.testmod()
| [
"[email protected]"
]
| |
f59b0e05422e2f0ed0e20fd76f2efe583c8387d0 | afd2087e80478010d9df66e78280f75e1ff17d45 | /test/dynamo/test_subclasses.py | 938215cb807c44ef22ce95b85305d0d6a3a17192 | [
"BSD-3-Clause",
"BSD-2-Clause",
"LicenseRef-scancode-secret-labs-2011",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0"
]
| permissive | pytorch/pytorch | 7521ac50c47d18b916ae47a6592c4646c2cb69b5 | a6f7dd4707ac116c0f5fb5f44f42429f38d23ab4 | refs/heads/main | 2023-08-03T05:05:02.822937 | 2023-08-03T00:40:33 | 2023-08-03T04:14:52 | 65,600,975 | 77,092 | 24,610 | NOASSERTION | 2023-09-14T21:58:39 | 2016-08-13T05:26:41 | Python | UTF-8 | Python | false | false | 3,096 | py | # Owner(s): ["module: dynamo"]
import contextlib
import torch
import torch._dynamo.test_case
import torch._dynamo.testing
import torch._functorch.config
import torch.utils.checkpoint
class MockSubclass(torch.Tensor):
@classmethod
def __torch_function__(cls, func, types, args=(), kwargs=None):
if kwargs is None:
kwargs = {}
return func(*args, **kwargs)
@contextlib.contextmanager
def preserve_subclass_config():
old_subclass_set = set(torch._dynamo.config.traceable_tensor_subclasses)
try:
torch._dynamo.config.traceable_tensor_subclasses.add(MockSubclass)
yield
finally:
torch._dynamo.config.traceable_tensor_subclasses.clear()
torch._dynamo.config.traceable_tensor_subclasses.update(old_subclass_set)
class SubclassTests(torch._dynamo.test_case.TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls._exit_stack.enter_context(preserve_subclass_config())
@classmethod
def tearDownClass(cls):
cls._exit_stack.close()
def test_torch_function_state_graph_break(self):
@torch.compile(backend="eager")
def fn(x):
with torch._C.DisableTorchFunctionSubclass():
torch._dynamo.graph_break()
return torch._C._is_torch_function_enabled(), torch.add(x, 1.0)
input = torch.ones(2, 2)
res, _ = fn(input)
self.assertFalse(res)
def test_torch_function_state_tracing(self):
@torch.compile(backend="eager", fullgraph=True)
def fn(x):
with torch._C.DisableTorchFunctionSubclass():
torch.add(x, 1.0)
input = torch.ones(2, 2)
res = fn(input)
def test_torch_function_state_guards(self):
cnt = torch._dynamo.testing.CompileCounter()
@torch.compile(backend=cnt, fullgraph=True)
def fn(x):
torch.add(x, 1.0)
input = torch.ones(2, 2)
with torch._C.DisableTorchFunctionSubclass():
res = fn(input)
res = fn(input)
self.assertEqual(cnt.frame_count, 2)
def test_return_subclass(self):
@torch.compile(backend="eager", fullgraph=True)
def fn(x):
return MockSubclass(torch.add(x, 1.0))
input = torch.ones(2, 2)
res = fn(input)
self.assertIsInstance(res, MockSubclass)
def test_return_local_subclass(self):
class LocalSubclass(torch.Tensor):
@classmethod
def __torch_function__(cls, func, types, args=(), kwargs=None):
if kwargs is None:
kwargs = {}
return func(*args, **kwargs)
torch._dynamo.config.traceable_tensor_subclasses.add(LocalSubclass)
@torch.compile(backend="eager", fullgraph=True)
def fn(x):
return LocalSubclass(torch.add(x, 1.0))
input = torch.ones(2, 2)
res = fn(input)
self.assertIsInstance(res, LocalSubclass)
if __name__ == "__main__":
from torch._dynamo.test_case import run_tests
run_tests()
| [
"[email protected]"
]
| |
c0546b68f5584ad3b7da2cf791a2c1c65b27dbfe | c4e2e1aded20c81fa9ab2a38620cfda71639c4c8 | /print_updates.py | c63e212818cd24e22e2d09f8972571f4a4a7c587 | [
"MIT"
]
| permissive | russss/pydsn | 118f341191f2ce6c702e9a81b3c0fd4da00f54b4 | 84e3b441effded7cfb4716cfa04e7b69d98d8ac1 | refs/heads/master | 2020-12-25T17:25:11.405678 | 2020-09-05T11:51:54 | 2020-09-05T11:51:54 | 21,252,624 | 7 | 5 | null | null | null | null | UTF-8 | Python | false | false | 1,098 | py | # coding=utf-8
from __future__ import division, absolute_import, print_function, unicode_literals
import logging
from dsn import DSN
def to_GHz(freq):
if freq is None:
return None
return str(round(float(freq) / 10 ** 9, 4))
def update_callback(antenna, old, new):
if len(new['down_signal']) == 0:
return
for i in range(0, len(new['down_signal'])):
signal = new['down_signal'][i]
if len(old['down_signal']) > i:
old_signal = old['down_signal'][i]
if (to_GHz(signal['frequency']) == to_GHz(old_signal['frequency']) and
signal['debug'] == old_signal['debug'] and
signal['spacecraft'] == old_signal['spacecraft']):
# No change, don't print anything
return
print("%s channel %s\ttracking %s\tstatus: %s\tinfo: %s\tfrequency: %sGHz" %
(antenna, i, signal['spacecraft'], signal['type'],
signal['debug'], to_GHz(signal['frequency'])))
logging.basicConfig()
dsn = DSN()
dsn.update_callback = update_callback
dsn.run()
| [
"[email protected]"
]
| |
b4c98948d06b56b3abe16f50d15b2211226c7ba5 | 9e9d1a5b711191f87a849f2ea34eb00e17587080 | /chalk_line/materials/rhythm/segment_03/rhythm_makers.py | f283ec2f8c528fd879e5d10bb8a44e8c6e0db308 | []
| no_license | GregoryREvans/chalk_line | c72e3bbdd383d6032e8afd8eba6f41d895f1c673 | e333343ccb039b83393690d46d06e4d5225d6327 | refs/heads/master | 2022-02-23T17:40:38.569781 | 2022-02-10T13:52:13 | 2022-02-10T13:52:13 | 241,491,131 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 815 | py | import evans
rmaker_one = evans.RTMMaker(
rtm=[
"(1 ((2 (1 1 1)) 1 -1))",
"(1 (1 2 3))",
"(1 (1 3))",
"(1 (1 1 2))",
"(1 (1 1))",
"(1 (1))",
"(1 (2 2 1 -1))",
"(1 (1))",
"(1 ((2 (1 1 1)) 1 -1))",
"(1 (1 2 3))",
"(1 (1 3))",
"(1 (1 1 2))",
"(1 (1 1))",
"(1 (1))",
"(1 (2 2 1 -1))",
"(1 (1))",
"(1 (2 1))",
"(1 (3 2 1))",
"(1 (1 2 3 4))",
"(1 (1 2 3 4 5 6))",
"(1 ((2 (1 1 1)) 1 -1))",
"(1 (1 2 3))",
"(1 (1 3))",
"(1 (1 1 2))",
"(1 (1 1))",
"(1 (1))",
"(1 (2 2 1 -1))",
"(1 (1))",
"(1 (2 1))",
"(1 (3 2 1))",
"(1 (1 2 3 4))",
"(1 (1 2 3 4 5 6))",
]
)
| [
"[email protected]"
]
| |
0cc8dc6447958f0d1ae2c4592706c40edc3f05ca | 4ef688b93866285bcc27e36add76dc8d4a968387 | /moto/support/__init__.py | 560832ad6cc4d47ff83106b5e3ee71520a925355 | [
"Apache-2.0"
]
| permissive | localstack/moto | cec77352df216cac99d5e0a82d7ada933950a0e6 | b0b2947e98e05d913d7ee2a0379c1bec73f7d0ff | refs/heads/localstack | 2023-09-01T05:18:16.680470 | 2023-07-10T09:00:26 | 2023-08-07T14:10:06 | 118,838,444 | 22 | 42 | Apache-2.0 | 2023-09-07T02:07:17 | 2018-01-25T00:10:03 | Python | UTF-8 | Python | false | false | 127 | py | from .models import support_backends
from ..core.models import base_decorator
mock_support = base_decorator(support_backends)
| [
"[email protected]"
]
| |
c5b24563692c7be59ada5e6c4bae377ad2ee98b4 | 7bf617f77a55d8ec23fa8156c1380b563a5ac7f6 | /CG/SciPy/circle_1.py | 58a991f780e853de703aa238f4858b7803f7d0fc | []
| no_license | anyatran/school | c06da0e08b148e3d93aec0e76329579bddaa85d5 | 24bcfd75f4a6fe9595d790808f8fca4f9bf6c7ec | refs/heads/master | 2021-06-17T10:45:47.648361 | 2017-05-26T12:57:23 | 2017-05-26T12:57:23 | 92,509,148 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 976 | py | """
Program name: circle_1.py
Objective: A circle is a special case of an oval.
Keywords: canvas, oval, circle
============================================================================79
Explanation: A circle is a special case of an oval and is defined by the
box it fits inside. The bounding box is specified the same as rectangles,
from bottom-left to top-right.
Author: Mike Ohlson de Fine
"""
# circle_1.py
#>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
from Tkinter import *
root = Tk()
root.title('A circle')
cw = 150 # canvas width
ch = 140 # canvas height
canvas_1 = Canvas(root, width=cw, height=ch, background="white")
canvas_1.grid(row=0, column=1)
# specify bottom-left and top-right as a set of four numbers named 'xy'
xy = 20, 20, 120, 120
canvas_1.create_oval(xy)
root.mainloop()
#>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
| [
"[email protected]"
]
| |
1a2c312399d2472dde204fc5e36017a06ffad7c6 | 6336828aeab3ea2ba3e1cf9452a8a3f3a084b327 | /fundooNotes-master/virtual-env/bin/nosetests-3.4 | 121dcd17ab6d074ffdd65beef38adefe3c734d61 | [
"MIT"
]
| permissive | kalereshma96/DjangoNewRepository | 85f2eaed6b689be273af48d328c0a388244bbe2b | 37fd232c2ac91eb6940300f20118f93d17926f9a | refs/heads/master | 2020-04-12T18:12:15.698279 | 2019-01-21T13:46:37 | 2019-01-21T13:46:37 | 162,672,682 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 288 | 4 | #!/home/admin1/PycharmProjects/mynewpythonproject/fundooNotes-master/virtual-env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from nose import run_exit
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run_exit())
| [
"[email protected]"
]
| |
5c40de5392621022c043aa6b16b4466166b125b6 | 466912406272829982f75854cf0104c6ce8c9814 | /data/spider2/crawler/news/sspai_news.py | bb4925dc81d735b591cce05730a0c9da5dc5cc28 | []
| no_license | logonmy/Codes | 9631fa103fc499663361fa7eeccd7cedb9bb08e4 | 92723efdeccfc193f9ee5d0ab77203c254f34bc2 | refs/heads/master | 2021-09-21T18:07:22.985184 | 2018-08-30T05:53:26 | 2018-08-30T05:53:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,219 | py | # -*- coding: utf-8 -*-
import os, sys, re
import datetime, time
from lxml import html
from pyquery import PyQuery as pq
from pymongo import MongoClient
reload(sys)
sys.setdefaultencoding("utf-8")
sys.path.append(os.path.join(os.path.split(os.path.realpath(__file__))[0], '..'))
import BaseCrawler
sys.path.append(os.path.join(os.path.split(os.path.realpath(__file__))[0], '../../../../util'))
import loghelper, db, util, extract, url_helper, json,download
sys.path.append(os.path.join(os.path.split(os.path.realpath(__file__))[0], '../../parser/util2'))
import parser_mysql_util
import parser_mongo_util
# logger
loghelper.init_logger("crawler_sspai_news", stream=True)
logger = loghelper.get_logger("crawler_sspai_news")
# mongo
# mongo = db.connect_mongo()
# collection_news = mongo.article.news
MAX_PAGE_ALL = 50
CURRENT_PAGE = 0
SOURCE = 13814
TYPE = 60001
class SspaiCrawler(BaseCrawler.BaseCrawler):
def __init__(self):
BaseCrawler.BaseCrawler.__init__(self, use_proxy=1) # todo!
# 实现
def is_crawl_success(self, url, content):
try:
json.loads(content)
return True
except Exception, ex:
print Exception, ":", ex
return False
return False
class SspaiNewsCrawler(BaseCrawler.BaseCrawler):
def __init__(self):
BaseCrawler.BaseCrawler.__init__(self, use_proxy=1)
# 实现
def is_crawl_success(self, url, content):
if content.find("</html>") == -1:
return False
d = pq(html.fromstring(content.decode("utf-8")))
title = d('head> title').text().strip()
if title.find("少数派") >= 0:
return True
return False
def has_news_content(content):
d = pq(html.fromstring(content.decode("utf-8")))
title = d('head> title').text().strip()
temp = title.split("-")
if title.find("页面找不到了") >= 0:
return False
return True
def process_news(content, news_key, url, news_posttime):
# if has_news_content(content):
if 1:
download_crawler = download.DownloadCrawler(use_proxy=False)
j=json.loads(content)
title = j['title']
news_time = datetime.datetime.strptime(news_posttime, '%Y-%m-%d %H:%M:%S')
key = news_key
tags=[i['title'] for i in j['tags']]
category = 60102
postraw = 'https://cdn.sspai.com/' + j['banner']
brief = j['summary']
logger.info("%s, %s, %s, %s, %s -> %s, %s", key, title, news_time, brief, ":".join(tags),
category, postraw)
article = j['body']
# logger.info(article)
contents = extract.extractContents(url, article)
mongo = db.connect_mongo()
collection_news = mongo.article.news
if collection_news.find_one({"source": SOURCE, "key_int": int(key)}) is not None:
return
# collection_news.delete_one({"source": SOURCE, "key_int": int(key)})
if collection_news.find_one({"title": title, "source": {"$ne": SOURCE}}) is not None:
return
# collection_news.delete_many({"title": title, "source": {"$ne": SOURCE}})
mongo.close()
flag, domain = url_helper.get_domain(url)
dnews = {
"date": news_time - datetime.timedelta(hours=8),
"title": title,
"link": url,
"createTime": datetime.datetime.now(),
"source": SOURCE,
"key": key,
"key_int": int(key),
"type": TYPE,
"original_tags": tags,
"processStatus": 0,
# "companyId": None,
"companyIds": [],
"category": category,
"domain": domain,
"categoryNames": []
}
dcontents = []
rank = 1
for c in contents:
if c["type"] == "text":
dc = {
"rank": rank,
"content": c["data"],
"image": "",
"image_src": "",
}
else:
if download_crawler is None:
dc = {
"rank": rank,
"content": "",
"image": "",
"image_src": c["data"],
}
else:
(imgurl, width, height) = parser_mysql_util.get_logo_id_new(c["data"], download_crawler, SOURCE, key, "news")
if imgurl is not None:
dc = {
"rank": rank,
"content": "",
"image": str(imgurl),
"image_src": "",
"height": int(height),
"width": int(width)
}
else:
continue
dcontents.append(dc)
rank += 1
dnews["contents"] = dcontents
if brief is None or brief.strip() == "":
brief = util.get_brief_from_news(dcontents)
dnews["brief"] = brief
# posturl = parser_mysql_util.get_logo_id(postraw, download_crawler, SOURCE, key, "news")
(posturl, width, height) = parser_mysql_util.get_logo_id_new(postraw, download_crawler, SOURCE, key, "news")
if posturl is not None:
post = str(posturl)
else:
post = None
if post is None or post.strip() == "":
post = util.get_posterId_from_news(dcontents)
if download_crawler is None:
dnews["post"] = post
else:
dnews["postId"] = post
if news_time > datetime.datetime.now():
logger.info("Time: %s is not correct with current time", news_time)
dnews["date"] = datetime.datetime.now() - datetime.timedelta(hours=8)
# collection_news.insert(dnews)
#
# logger.info("Done")
nid = parser_mongo_util.save_mongo_news(dnews)
logger.info("Done: %s", nid)
def process(content, page_crawler, flag):
j = json.loads(content.decode("utf-8"))
cnt = 0
# logger.info(lis)
for li in j['list']:
title = li['title']
href = 'https://sspai.com/post/' + str(li['id'])
news_key = href.split("/")[-1]
news_url = href
api_url='https://sspai.com/api/v1/articles/%s'%str(li['id'])
news_posttime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(li['released_at']))
logger.info("%s, %s, %s, %s", title, news_key, news_url, news_posttime)
mongo = db.connect_mongo()
collection_news = mongo.article.news
item = collection_news.find_one({"source": SOURCE, "key_int": int(news_key)})
newses = list(collection_news.find({"title": title, "source": {"$ne": SOURCE}}))
mongo.close()
if item is None or flag == "all":
craw = True
for news in newses:
if news.has_key("type") and news["type"] > 0:
craw = False
break
if craw:
while True:
result = page_crawler.crawl(api_url, agent=True)
if result['get'] == 'success':
# logger.info(result["content"])
try:
process_news(result['content'], news_key, news_url, news_posttime)
cnt += 1
except Exception, ex:
pass
logger.exception(ex)
break
return cnt
def run(flag):
global CURRENT_PAGE
crawler = SspaiCrawler()
page_crawler = SspaiCrawler()
cnt = 1
while True:
key = CURRENT_PAGE * 10
# logger.info("key=%s", key)
if flag == "all":
if key > MAX_PAGE_ALL:
return
else:
if cnt == 0:
return
if key > MAX_PAGE_ALL:
return
url = 'https://sspai.com/api/v1/articles?offset=%s&limit=10&type=recommend_to_home&sort=recommend_to_home_at' % key
while True:
result = crawler.crawl(url, agent=True)
if result['get'] == 'success':
# logger.info(result["content"])
try:
cnt = process(result['content'], page_crawler, flag)
logger.info("%s has %s news", url, cnt)
except Exception, ex:
logger.exception(ex)
cnt = 0
break
CURRENT_PAGE += 1
def start_run(concurrent_num, flag):
global CURRENT_PAGE
while True:
logger.info("Sspai news %s start...", flag)
CURRENT_PAGE = 0
run(flag)
logger.info("Sspai news %s end.", flag)
if flag == "incr":
time.sleep(60 * 15) # 5hour
else:
return
# gevent.sleep(86400*3) #3 days
if __name__ == "__main__":
flag = "incr"
concurrent_num = 1
if len(sys.argv) > 1:
flag = sys.argv[1]
start_run(concurrent_num, flag)
| [
"[email protected]"
]
| |
b9e96932a14e41fe2293414f813539a41dac1547 | e60487a8f5aad5aab16e671dcd00f0e64379961b | /python_stack/Algos/Fundamentals/bracesValid.py | 1cf44dec62b3abd0f888db3f006ddf5ae829bf98 | []
| no_license | reenadangi/python | 4fde31737e5745bc5650d015e3fa4354ce9e87a9 | 568221ba417dda3be7f2ef1d2f393a7dea6ccb74 | refs/heads/master | 2021-08-18T08:25:40.774877 | 2021-03-27T22:20:17 | 2021-03-27T22:20:17 | 247,536,946 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 685 | py | # it are valid. For example, given the input string
# "w(a{t}s[o(n{c}o)m]e)h[e{r}e]!" , return
# true . Given "d(i{a}l[t]o)n{e" , return
# false . Given "a(1)s[O(n]0{t)0}k" , return
# false .
def bracesValid(str):
mapping={"(":")","{":"}","[":"]"}
myStack=[]
for c in str:
if c in ('(','{','['):
myStack.append(c)
elif c in (')','}',']'):
if myStack:
top=myStack.pop()
if c!=mapping[top]:
return False
else:
return False
if myStack: return False
else: return True
print(bracesValid("w(a{t}s[o(n{c}o)m]e)h[e{r}e]!"))
| [
"[email protected]"
]
| |
091a9424420358b3ed9ba3e230a989f86525cafd | c3998b61e8644efc51aff860372b80247666232c | /backend/tracim_backend/tests/library/test_webdav.py | c7b6d9a2b2ee9c26c50c3ca240763664e43e8477 | [
"MIT"
]
| permissive | buxx/tracim | 55c60201d44ff87c73c0201ef881dde9af484578 | 3348e5c169859da9204068071d6a17cb1661841a | refs/heads/master | 2021-01-21T03:49:22.447651 | 2019-02-15T13:11:56 | 2019-02-15T13:11:56 | 45,355,659 | 0 | 0 | null | 2018-01-21T06:36:47 | 2015-11-01T20:10:51 | JavaScript | UTF-8 | Python | false | false | 21,186 | py | # -*- coding: utf-8 -*-
from unittest.mock import MagicMock
from wsgidav import util
from tracim_backend import WebdavAppFactory
from tracim_backend.fixtures.content import Content as ContentFixtures
from tracim_backend.fixtures.users_and_groups import Base as BaseFixture
from tracim_backend.lib.core.notifications import DummyNotifier
from tracim_backend.lib.core.user import UserApi
from tracim_backend.lib.webdav import TracimDomainController
from tracim_backend.lib.webdav.dav_provider import Provider
from tracim_backend.lib.webdav.dav_provider import WebdavTracimContext
from tracim_backend.lib.webdav.resources import RootResource
from tracim_backend.models.data import Content
from tracim_backend.models.data import ContentRevisionRO
from tracim_backend.tests import StandardTest
from tracim_backend.tests import eq_
class TestWebdavFactory(StandardTest):
config_section = 'webdav_test'
def test_unit__initConfig__ok__nominal_case(self):
"""
Check if config is correctly modify for wsgidav using mocked
wsgidav and tracim conf (as dict)
:return:
"""
tracim_settings = self.settings
mock = MagicMock()
mock._initConfig = WebdavAppFactory._initConfig
config = mock._initConfig(self, **tracim_settings)
assert config
assert config['acceptbasic'] is True
assert config['acceptdigest'] is False
assert config['defaultdigest'] is False
# TODO - G.M - 25-05-2018 - Better check for middleware stack config
assert 'middleware_stack' in config
assert len(config['middleware_stack']) == 6
assert 'provider_mapping' in config
assert '/' in config['provider_mapping']
assert isinstance(config['provider_mapping']['/'], Provider) # nopep8
assert 'domaincontroller' in config
assert isinstance(config['domaincontroller'], TracimDomainController)
class TestWebDav(StandardTest):
fixtures = [BaseFixture, ContentFixtures]
def _get_provider(self, config):
return Provider(
show_archived=False,
show_deleted=False,
show_history=False,
app_config=config,
)
def _get_environ(
self,
provider: Provider,
username: str,
) -> dict:
environ = {
'http_authenticator.username': username,
'http_authenticator.realm': '/',
'wsgidav.provider': provider,
'tracim_user': self._get_user(username),
}
tracim_context = WebdavTracimContext(
app_config=self.app_config,
session=self.session,
environ=environ,
)
environ['tracim_context'] = tracim_context
return environ
def _get_user(self, email):
return UserApi(None,
self.session,
self.app_config
).get_one_by_email(email)
def _put_new_text_file(
self,
provider,
environ,
file_path,
file_content,
):
# This part id a reproduction of
# wsgidav.request_server.RequestServer#doPUT
# Grab parent folder where create file
parentRes = provider.getResourceInst(
util.getUriParent(file_path),
environ,
)
assert parentRes, 'we should found folder for {0}'.format(file_path)
new_resource = parentRes.createEmptyResource(
util.getUriName(file_path),
)
write_object = new_resource.beginWrite(
contentType='application/octet-stream',
)
write_object.write(file_content)
write_object.close()
new_resource.endWrite(withErrors=False)
# Now file should exist
return provider.getResourceInst(
file_path,
environ,
)
def test_unit__get_root__ok(self):
provider = self._get_provider(self.app_config)
root = provider.getResourceInst(
'/',
self._get_environ(
provider,
'[email protected]',
)
)
assert root, 'Path / should return a RootResource instance'
assert isinstance(root, RootResource)
def test_unit__list_workspaces_with_user__ok(self):
provider = self._get_provider(self.app_config)
root = provider.getResourceInst(
'/',
self._get_environ(
provider,
'[email protected]',
)
)
assert root, 'Path / should return a RootResource instance'
assert isinstance(root, RootResource), 'Path / should return a RootResource instance'
children = root.getMemberList()
eq_(
2,
len(children),
msg='RootResource should return 2 workspaces instead {0}'.format(
len(children),
)
)
workspaces_names = [w.name for w in children]
assert 'Recipes' in workspaces_names, \
'Recipes should be in names ({0})'.format(
workspaces_names,
)
assert 'Others' in workspaces_names, 'Others should be in names ({0})'.format(
workspaces_names,
)
def test_unit__list_workspaces_with_admin__ok(self):
provider = self._get_provider(self.app_config)
root = provider.getResourceInst(
'/',
self._get_environ(
provider,
'[email protected]',
)
)
assert root, 'Path / should return a RootResource instance'
assert isinstance(root, RootResource), 'Path / should return a RootResource instance'
children = root.getMemberList()
eq_(
2,
len(children),
msg='RootResource should return 3 workspaces instead {0}'.format(
len(children),
)
)
workspaces_names = [w.name for w in children]
assert 'Recipes' in workspaces_names, 'Recipes should be in names ({0})'.format(
workspaces_names,
)
assert 'Business' in workspaces_names, 'Business should be in names ({0})'.format(
workspaces_names,
)
def test_unit__list_workspace_folders__ok(self):
provider = self._get_provider(self.app_config)
Recipes = provider.getResourceInst(
'/Recipes/',
self._get_environ(
provider,
'[email protected]',
)
)
assert Recipes, 'Path /Recipes should return a Wrkspace instance'
children = Recipes.getMemberList()
eq_(
2,
len(children),
msg='Recipes should list 2 folders instead {0}'.format(
len(children),
),
)
folders_names = [f.name for f in children]
assert 'Salads' in folders_names, 'Salads should be in names ({0})'.format(
folders_names,
)
assert 'Desserts' in folders_names, 'Desserts should be in names ({0})'.format(
folders_names,
)
def test_unit__list_content__ok(self):
provider = self._get_provider(self.app_config)
Salads = provider.getResourceInst(
'/Recipes/Desserts',
self._get_environ(
provider,
'[email protected]',
)
)
assert Salads, 'Path /Salads should return a Wrkspace instance'
children = Salads.getMemberList()
eq_(
5,
len(children),
msg='Salads should list 5 Files instead {0}'.format(
len(children),
),
)
content_names = [c.name for c in children]
assert 'Brownie Recipe.html' in content_names, \
'Brownie Recipe.html should be in names ({0})'.format(
content_names,
)
assert 'Best Cakesʔ.thread.html' in content_names,\
'Best Cakesʔ.thread.html should be in names ({0})'.format(
content_names,
)
assert 'Apple_Pie.txt' in content_names,\
'Apple_Pie.txt should be in names ({0})'.format(content_names,)
assert 'Fruits Desserts' in content_names, \
'Fruits Desserts should be in names ({0})'.format(
content_names,
)
assert 'Tiramisu Recipe.document.html' in content_names,\
'Tiramisu Recipe.document.html should be in names ({0})'.format(
content_names,
)
def test_unit__get_content__ok(self):
provider = self._get_provider(self.app_config)
pie = provider.getResourceInst(
'/Recipes/Desserts/Apple_Pie.txt',
self._get_environ(
provider,
'[email protected]',
)
)
assert pie, 'Apple_Pie should be found'
eq_('Apple_Pie.txt', pie.name)
def test_unit__delete_content__ok(self):
provider = self._get_provider(self.app_config)
pie = provider.getResourceInst(
'/Recipes/Desserts/Apple_Pie.txt',
self._get_environ(
provider,
'[email protected]',
)
)
content_pie = self.session.query(ContentRevisionRO) \
.filter(Content.label == 'Apple_Pie') \
.one() # It must exist only one revision, cf fixtures
eq_(
False,
content_pie.is_deleted,
msg='Content should not be deleted !'
)
content_pie_id = content_pie.content_id
pie.delete()
self.session.flush()
content_pie = self.session.query(ContentRevisionRO) \
.filter(Content.content_id == content_pie_id) \
.order_by(Content.revision_id.desc()) \
.first()
eq_(
True,
content_pie.is_deleted,
msg='Content should be deleted!'
)
result = provider.getResourceInst(
'/Recipes/Desserts/Apple_Pie.txt',
self._get_environ(
provider,
'[email protected]',
)
)
eq_(None, result, msg='Result should be None instead {0}'.format(
result
))
def test_unit__create_content__ok(self):
provider = self._get_provider(self.app_config)
environ = self._get_environ(
provider,
'[email protected]',
)
result = provider.getResourceInst(
'/Recipes/Salads/greek_salad.txt',
environ,
)
eq_(None, result, msg='Result should be None instead {0}'.format(
result
))
result = self._put_new_text_file(
provider,
environ,
'/Recipes/Salads/greek_salad.txt',
b'Greek Salad\n',
)
assert result, 'Result should not be None instead {0}'.format(
result
)
eq_(
b'Greek Salad\n',
result.content.depot_file.file.read(),
msg='fiel content should be "Greek Salad\n" but it is {0}'.format(
result.content.depot_file.file.read()
)
)
def test_unit__create_delete_and_create_file__ok(self):
provider = self._get_provider(self.app_config)
environ = self._get_environ(
provider,
'[email protected]',
)
new_file = provider.getResourceInst(
'/Recipes/Salads/greek_salad.txt',
environ,
)
eq_(None, new_file, msg='Result should be None instead {0}'.format(
new_file
))
# create it
new_file = self._put_new_text_file(
provider,
environ,
'/Recipes/Salads/greek_salad.txt',
b'Greek Salad\n',
)
assert new_file, 'Result should not be None instead {0}'.format(
new_file
)
content_new_file = self.session.query(ContentRevisionRO) \
.filter(Content.label == 'greek_salad') \
.one() # It must exist only one revision
eq_(
False,
content_new_file.is_deleted,
msg='Content should not be deleted!'
)
content_new_file_id = content_new_file.content_id
# Delete if
new_file.delete()
self.session.flush()
content_pie = self.session.query(ContentRevisionRO) \
.filter(Content.content_id == content_new_file_id) \
.order_by(Content.revision_id.desc()) \
.first()
eq_(
True,
content_pie.is_deleted,
msg='Content should be deleted!'
)
result = provider.getResourceInst(
'/Recipes/Salads/greek_salad.txt',
self._get_environ(
provider,
'[email protected]',
)
)
eq_(None, result, msg='Result should be None instead {0}'.format(
result
))
# Then create it again
new_file = self._put_new_text_file(
provider,
environ,
'/Recipes/Salads/greek_salad.txt',
b'greek_salad\n',
)
assert new_file, 'Result should not be None instead {0}'.format(
new_file
)
# Previous file is still dleeted
self.session.flush()
content_pie = self.session.query(ContentRevisionRO) \
.filter(Content.content_id == content_new_file_id) \
.order_by(Content.revision_id.desc()) \
.first()
eq_(
True,
content_pie.is_deleted,
msg='Content should be deleted!'
)
# And an other file exist for this name
content_new_new_file = self.session.query(ContentRevisionRO) \
.filter(Content.label == 'greek_salad') \
.order_by(Content.revision_id.desc()) \
.first()
assert content_new_new_file.content_id != content_new_file_id,\
'Contents ids should not be same!'
eq_(
False,
content_new_new_file.is_deleted,
msg='Content should not be deleted!'
)
def test_unit__rename_content__ok(self):
provider = self._get_provider(self.app_config)
environ = self._get_environ(
provider,
'[email protected]',
)
pie = provider.getResourceInst(
'/Recipes/Desserts/Apple_Pie.txt',
environ,
)
content_pie = self.session.query(ContentRevisionRO) \
.filter(Content.label == 'Apple_Pie') \
.one() # It must exist only one revision, cf fixtures
assert content_pie, 'Apple_Pie should be exist'
content_pie_id = content_pie.content_id
pie.moveRecursive('/Recipes/Desserts/Apple_Pie_RENAMED.txt')
# Database content is renamed
content_pie = self.session.query(ContentRevisionRO) \
.filter(ContentRevisionRO.content_id == content_pie_id) \
.order_by(ContentRevisionRO.revision_id.desc()) \
.first()
eq_(
'Apple_Pie_RENAMED',
content_pie.label,
msg='File should be labeled Apple_Pie_RENAMED, not {0}'.format(
content_pie.label
)
)
def test_unit__move_content__ok(self):
provider = self._get_provider(self.app_config)
environ = self._get_environ(
provider,
'[email protected]',
)
pie = provider.getResourceInst(
'/Recipes/Desserts/Apple_Pie.txt',
environ,
)
content_pie = self.session.query(ContentRevisionRO) \
.filter(Content.label == 'Apple_Pie') \
.one() # It must exist only one revision, cf fixtures
assert content_pie, 'Apple_Pie should be exist'
content_pie_id = content_pie.content_id
content_pie_parent = content_pie.parent
eq_(
content_pie_parent.label,
'Desserts',
msg='field parent should be Desserts',
)
pie.moveRecursive('/Recipes/Salads/Apple_Pie.txt') # move in f2
# Database content is moved
content_pie = self.session.query(ContentRevisionRO) \
.filter(ContentRevisionRO.content_id == content_pie_id) \
.order_by(ContentRevisionRO.revision_id.desc()) \
.first()
assert content_pie.parent.label != content_pie_parent.label,\
'file should be moved in Salads but is in {0}'.format(
content_pie.parent.label
)
def test_unit__move_and_rename_content__ok(self):
provider = self._get_provider(self.app_config)
environ = self._get_environ(
provider,
'[email protected]',
)
pie = provider.getResourceInst(
'/Recipes/Desserts/Apple_Pie.txt',
environ,
)
content_pie = self.session.query(ContentRevisionRO) \
.filter(Content.label == 'Apple_Pie') \
.one() # It must exist only one revision, cf fixtures
assert content_pie, 'Apple_Pie should be exist'
content_pie_id = content_pie.content_id
content_pie_parent = content_pie.parent
eq_(
content_pie_parent.label,
'Desserts',
msg='field parent should be Desserts',
)
pie.moveRecursive('/Business/Menus/Apple_Pie_RENAMED.txt')
content_pie = self.session.query(ContentRevisionRO) \
.filter(ContentRevisionRO.content_id == content_pie_id) \
.order_by(ContentRevisionRO.revision_id.desc()) \
.first()
assert content_pie.parent.label != content_pie_parent.label,\
'file should be moved in Recipesf2 but is in {0}'.format(
content_pie.parent.label
)
eq_(
'Apple_Pie_RENAMED',
content_pie.label,
msg='File should be labeled Apple_Pie_RENAMED, not {0}'.format(
content_pie.label
)
)
def test_unit__move_content__ok__another_workspace(self):
provider = self._get_provider(self.app_config)
environ = self._get_environ(
provider,
'[email protected]',
)
content_to_move_res = provider.getResourceInst(
'/Recipes/Desserts/Apple_Pie.txt',
environ,
)
content_to_move = self.session.query(ContentRevisionRO) \
.filter(Content.label == 'Apple_Pie') \
.one() # It must exist only one revision, cf fixtures
assert content_to_move, 'Apple_Pie should be exist'
content_to_move_id = content_to_move.content_id
content_to_move_parent = content_to_move.parent
eq_(
content_to_move_parent.label,
'Desserts',
msg='field parent should be Desserts',
)
content_to_move_res.moveRecursive('/Business/Menus/Apple_Pie.txt') # move in Business, f1
# Database content is moved
content_to_move = self.session.query(ContentRevisionRO) \
.filter(ContentRevisionRO.content_id == content_to_move_id) \
.order_by(ContentRevisionRO.revision_id.desc()) \
.first()
assert content_to_move.parent, 'Content should have a parent'
assert content_to_move.parent.label == 'Menus',\
'file should be moved in Infos but is in {0}'.format(
content_to_move.parent.label
)
def test_unit__update_content__ok(self):
provider = self._get_provider(self.app_config)
environ = self._get_environ(
provider,
'[email protected]',
)
result = provider.getResourceInst(
'/Recipes/Salads/greek_salad.txt',
environ,
)
eq_(None, result, msg='Result should be None instead {0}'.format(
result
))
result = self._put_new_text_file(
provider,
environ,
'/Recipes/Salads/greek_salad.txt',
b'hello\n',
)
assert result, 'Result should not be None instead {0}'.format(
result
)
eq_(
b'hello\n',
result.content.depot_file.file.read(),
msg='fiel content should be "hello\n" but it is {0}'.format(
result.content.depot_file.file.read()
)
)
# ReInit DummyNotifier counter
DummyNotifier.send_count = 0
# Update file content
write_object = result.beginWrite(
contentType='application/octet-stream',
)
write_object.write(b'An other line')
write_object.close()
result.endWrite(withErrors=False)
eq_(
1,
DummyNotifier.send_count,
msg='DummyNotifier should send 1 mail, not {}'.format(
DummyNotifier.send_count
),
)
| [
"[email protected]"
]
| |
912e0ef322d0210628742b89e0e9105897dc42f6 | 1e14e73b66aa4e60a528addf6358d5c009705e9e | /scripts/visualize.py | 8f3dca025adabe11f2e73709dd4ea46d5d23a0ed | [
"MIT"
]
| permissive | cannin/covid-sicr-test | aeaa5935c5ddc99e616bdf1b9527a82eedcaa641 | f842946357428730265b7d0a6640172dc757ecae | refs/heads/master | 2022-10-08T04:46:31.415917 | 2020-06-11T21:06:30 | 2020-06-11T21:06:30 | 271,634,733 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,321 | py | #!/usr/bin/env python
# coding: utf-8
import argparse
import logging
from multiprocessing import Pool
import pandas as pd
import papermill as pm
from pathlib import Path
from tqdm import tqdm
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
for lib in ['blib2to3', 'papermill']:
logger = logging.getLogger(lib)
logger.setLevel(logging.WARNING)
from niddk_covid_sicr import get_data_prefix, get_ending, list_rois
notebook_path = Path(__file__).parent.parent / 'notebooks'
# Parse all the command-line arguments
parser = argparse.ArgumentParser(description=('Executes all of the analysis '
'notebooks'))
parser.add_argument('model_name',
help='Name of the Stan model file (without extension)')
parser.add_argument('-dp', '--data_path', default='./data',
help='Path to directory containing the data files')
parser.add_argument('-fp', '--fits_path', default='./fits',
help='Path to directory containing pickled fit files')
parser.add_argument('-rp', '--results_path', default='./results/vis-notebooks',
help=('Path to directory where resulting notebooks '
'will be stored'))
parser.add_argument('-mp', '--models_path', default='./models',
help='Path to directory containing .stan files')
parser.add_argument('-r', '--rois', default=[], nargs='+',
help='Space separated list of ROIs')
parser.add_argument('-n', '--n_threads', type=int, default=16, nargs='+',
help='Number of threads to use for analysis')
parser.add_argument('-f', '--fit_format', type=int, default=1,
help='Version of fit format')
parser.add_argument('-v', '--verbose', type=int, default=0,
help='Verbose error reporting')
args = parser.parse_args()
for key, value in args.__dict__.items():
if '_path' in key and 'results' not in key:
assert Path(value).is_dir(),\
"%s is not a directory" % Path(value).resolve()
# pathlibify some paths
data_path = Path(args.data_path)
fits_path = Path(args.fits_path)
models_path = Path(args.models_path)
results_path = Path(args.results_path)
results_path.mkdir(parents=True, exist_ok=True)
assert any([x.name.endswith('.csv') for x in data_path.iterdir()]),\
"No .csv files found in data_path %s" % (data_path.resolve())
assert any([x.name.endswith('.stan') for x in models_path.iterdir()]),\
"No .stan files found in models_path %s" % (models_path.resolve())
assert any([x.name.endswith('.pkl') or x.name.endswith('.csv')
for x in fits_path.iterdir()]),\
"No .pkl or .csv files found in fits_path %s" % (fits_path.resolve())
ending = get_ending(args.fit_format)
if not args.rois:
data_rois = list_rois(data_path, get_data_prefix(), '.csv')
fit_rois = list_rois(fits_path, args.model_name, ending)
args.rois = list(set(data_rois).intersection(fit_rois))
args.n_threads = min(args.n_threads, len(args.rois))
print("Running visualization notebook for %d rois on model '%s'" %
(len(args.rois), args.model_name))
# Make sure all ROI pickle files exist
for roi in args.rois:
file = fits_path / ('%s_%s%s' % (args.model_name, roi, ending))
assert file.is_file(), "No such %s file: %s" % (ending, file.resolve())
# Function to be execute on each ROI
def execute(model_name, roi, data_path, fits_path, model_path, notebook_path,
results_path, fit_format, verbose=False):
try:
result = pm.execute_notebook(
str(notebook_path / 'visualize.ipynb'),
str(results_path / ('visualize_%s_%s.ipynb' % (model_name, roi))),
parameters={'model_name': model_name,
'roi': roi,
'data_path': str(data_path),
'fits_path': str(fits_path),
'models_path': str(models_path),
'fit_format': fit_format},
nest_asyncio=True)
except pm.PapermillExecutionError as e:
exception = '%s: %s' % (e.ename, e.evalue)
except Exception as e:
exception = str(e.split('\n')[-1:])
else:
# Possible exception that was raised
# (or `None` if notebook completed successfully)
exception = str(result['metadata']['papermill']['exception'])
if exception and verbose:
print(roi, exception)
return exception
# Top progress bar (how many ROIs have finished)
pbar = tqdm(total=len(args.rois), desc="All notebooks", leave=True)
def update(*a):
pbar.update()
# Execute up to 16 ROIs notebooks at once
pool = Pool(processes=args.n_threads)
jobs = {roi: pool.apply_async(execute,
[args.model_name, roi, data_path, fits_path,
models_path, notebook_path, results_path,
args.fit_format],
{'verbose': args.verbose},
callback=update)
for roi in args.rois}
pool.close()
pool.join()
print('\n')
error_table = pd.Series({roi: job.get() for roi, job in jobs.items()})
error_table = error_table[error_table != 'None']
if len(error_table):
print("Errors:")
print(error_table)
| [
"[email protected]"
]
| |
809e15e8c97fec14f1187a7f5c9189c43e53ad04 | a1b8b807a389fd3971ac235e46032c0be4795ff1 | /testrepo/Zips/script.module.streamhublive/resources/modules/downloader.py | 97608f2e48e1c7d67e403ad3122dc6daeb436677 | []
| no_license | sClarkeIsBack/StreamHub | 0cd5da4b3229592a4e2cf7ce3e857294c172aaba | 110983579645313b8b60eac08613435c033eb92d | refs/heads/master | 2020-05-23T09:09:54.898715 | 2020-02-29T12:15:32 | 2020-02-29T12:15:32 | 80,440,827 | 9 | 20 | null | 2017-10-04T07:32:52 | 2017-01-30T16:43:46 | Python | UTF-8 | Python | false | false | 2,512 | py | import xbmcgui
import urllib
import time
from urllib import FancyURLopener
import sys
class MyOpener(FancyURLopener):
version = '[COLOR ffff0000][B]StreamHub[/B][/COLOR]'
myopener = MyOpener()
urlretrieve = MyOpener().retrieve
urlopen = MyOpener().open
def download(url, dest, dp = None):
start_time=time.time()
urlretrieve(url, dest, lambda nb, bs, fs: _pbhook(nb, bs, fs, dp, start_time))
def auto(url, dest, dp = None):
start_time=time.time()
urlretrieve(url, dest, lambda nb, bs, fs: _pbhookauto(nb, bs, fs, dp, start_time))
def _pbhookauto(numblocks, blocksize, filesize, url, dp):
none = 0
def _pbhook(numblocks, blocksize, filesize, dp, start_time):
try:
percent = min(numblocks * blocksize * 100 / filesize, 100)
currently_downloaded = float(numblocks) * blocksize / (1024 * 1024)
kbps_speed = numblocks * blocksize / (time.time() - start_time)
if kbps_speed > 0:
eta = (filesize - numblocks * blocksize) / kbps_speed
else:
eta = 0
kbps_speed = kbps_speed / 1024
mbps_speed = kbps_speed / 1024
total = float(filesize) / (1024 * 1024)
mbs = '[COLOR white]%.02f MB[/COLOR] of %.02f MB' % (currently_downloaded, total)
e = 'Speed: [COLOR lime]%.02f Mb/s ' % mbps_speed + '[/COLOR]'
e += 'ETA: [COLOR yellow]%02d:%02d' % divmod(eta, 60) + '[/COLOR]'
except:
percent = 100
def unzip(zip,dest):
import zipfile
zip_ref = zipfile.ZipFile(zip, 'r')
zip_ref.extractall(dest)
zip_ref.close()
def getmodules():
import os,re,xbmc
zip = 'https://github.com/sClarkeIsBack/StreamHub/raw/master/StreamHubLive/rootdownloads.zip'
root = xbmc.translatePath('special://home/addons/script.module.streamhublive/resources/root/')
udata = xbmc.translatePath('special://home/userdata/addon_data/script.module.streamhublive/downloads/')
dest = xbmc.translatePath(os.path.join('special://home/userdata/addon_data/script.module.streamhublive/downloads/', 'root.zip'))
if not os.path.exists(udata):
os.makedirs(udata)
try:
download(zip,dest)
unzip(dest,root)
except:
xbmcgui.Dialog().ok('[COLOR ffff0000][B]StreamHub[/B][/COLOR]','Oops..Something went wrong with our auto update feature, Please Inform us at','http://facebook.com/groups/streamh')
try:
os.remove(dest)
except:
pass | [
"[email protected]"
]
| |
8f91301ee92109eaebdec1ed72f4f25409581a1b | 9d69d37c930821f4ebf265f3c1f214c2cc558502 | /scripts/extra/csv_to_coco_json_result.py | 3fc14bfbe55bbd61d296159e00b5d157b88d6e32 | [
"Apache-2.0"
]
| permissive | mayanks888/mAP | 255b35e25384659dfaf97e6e3eec53bafb5bb3cc | 7e6a6c4b916223e737d30c76ebb11a75ed15d984 | refs/heads/master | 2023-01-29T11:05:21.099541 | 2020-12-13T20:54:44 | 2020-12-13T20:54:44 | 278,285,070 | 0 | 0 | Apache-2.0 | 2020-07-09T06:44:56 | 2020-07-09T06:44:55 | null | UTF-8 | Python | false | false | 1,600 | py | from collections import namedtuple
import os
import pandas as pd
import json
from utils import *
coco91class = coco80_to_coco91_class()
csv_path='yolo_txt_to_csv.csv'
# csv_path='yolo1.csv'
data = pd.read_csv(csv_path)
print(data.head())
def split(df, group):
data = namedtuple('data', ['filename', 'object'])
# filename='img_name'
# data = namedtuple('data', ['img_name', 'obj_class'])
gb = df.groupby(group)
return [data(filename, gb.get_group(x)) for filename, x in zip(gb.groups.keys(), gb.groups)]
grouped = split(data, 'filename')
jdict= []
for group in grouped:
# filename = group.filename.encode('utf8')
filename = group.filename
print(filename)
for index, row in group.object.iterrows():
xmin=(row['xmin'])
ymin = (row['ymin'])
width= (row['xmax'])-xmin
height=(row['ymax'])-ymin
# box_=[xmin,ymin,xmax,ymax]
# box2=xyxy2xywh(box_)
# obj_id = obj['category_id']
# print(obj_id)
score=row['conf']
obj_name=row["class"]
obj_cat=row["obj_category"]
################3
obj_cat=coco91class[int(obj_cat)]
#####################
bbox = ((xmin), (ymin), (width), (height))
# bbox = box2
jdict.append({'image_id': int(filename), 'category_id': obj_cat, 'bbox': [round(x, 3) for x in bbox], 'score': round(score, 5)})
print('\nGenerating json detection for pycocotools...')
with open('results.json', 'w') as file:
json.dump(jdict, file)
| [
"[email protected]"
]
| |
e86947e81e355edde5f00faccb2b3b4b7adfe1b7 | 4de28b1f6d97640834e4a795e68ca9987f9e2cd5 | /check plugins 2.0/dell_powervault_me4/checks/agent_dellpowervault | 872a4202350c171f757ac3e0fb517852ad2a04de | []
| no_license | Yogibaer75/Check_MK-Things | affa0f7e6e772074c547f7b1df5c07a37dba80b4 | 029c546dc921c4157000d8ce58a878618e7bfa97 | refs/heads/master | 2023-09-01T15:52:28.610282 | 2023-08-29T06:18:52 | 2023-08-29T06:18:52 | 20,382,895 | 47 | 16 | null | 2023-07-30T15:52:22 | 2014-06-01T18:04:07 | Python | UTF-8 | Python | false | false | 1,216 | #!/usr/bin/env python3
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# (c) Andreas Doehler <[email protected]/[email protected]>
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# ails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
def agent_dellpowervault_arguments(params, hostname, ipaddress):
args = ''
if params["user"] != "":
args += " -u " + quote_shell_string(params["user"])
if params["password"] != "":
args += " -p " + quote_shell_string(params["password"])
args += " " + quote_shell_string(ipaddress)
return args
special_agent_info['dellpowervault'] = agent_dellpowervault_arguments
| [
"[email protected]"
]
| ||
906e43db8bb1001f90d120dced7b2b11273ffe1e | 35f7c36a55a98cd4150abe51c24bf6b2313ee9d5 | /pytestFrame_demon1/TestCase/testmy.py | 4d60e67571f998cd5a788db480a9d3cbf2532dcf | []
| no_license | jingshiyue/zhongkeyuan_workspace | 58b12e46223d398b184c48c4c6b799e5235e4470 | aa0749f4a237ee76a61579dc5984635a7127a631 | refs/heads/master | 2021-07-15T15:18:40.186561 | 2020-08-12T05:32:45 | 2020-08-12T05:34:20 | 197,749,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | import sys
import os
path0 = os.path.realpath(__file__) #'D:\\workfile\\workspace\\pytestFrame_demon1\\TestCase\\testmy.py'
path1 = os.path.dirname(path0)
GRANDFA = os.path.dirname(path1)
sys.path.append(GRANDFA ) # 将祖父路径加入sys中
print("ok")
sys.path.append(sys.path.append(sys.path[0] + r"\..\.."))
sys.path.append(sys.path.append(sys.path[0] + r"\..")) | [
"[email protected]"
]
| |
7ba10f0c3964f4661b5d6b9a7ceeb672eab5cd06 | f8777c76ec7c8da686c72a2975c17bbd294edc0e | /eden/integration/hg/rebase_test.py | 760939f42d2236172ff58608b16ec5ab79b6e6be | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
]
| permissive | jmswen/eden | 3a8e96bf0fbbf6c987f4b17bbd79dcbe0964c033 | 5e0b051703fa946cc77fc43004435ae6b20599a1 | refs/heads/master | 2020-06-06T06:08:28.946268 | 2019-06-19T04:45:11 | 2019-06-19T04:45:11 | 192,659,804 | 0 | 0 | NOASSERTION | 2019-06-19T04:43:36 | 2019-06-19T04:43:36 | null | UTF-8 | Python | false | false | 11,213 | py | #!/usr/bin/env python3
#
# Copyright (c) 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
import os
import threading
from eden.integration.lib import eden_server_inspector, hgrepo
from .lib.hg_extension_test_base import EdenHgTestCase, hg_test
@hg_test
class RebaseTest(EdenHgTestCase):
_base_commit: str
_c11: str
_c12: str
_c13: str
_c14: str
_c15: str
_c21: str
_c22: str
_c23: str
_c24: str
_c25: str
def populate_backing_repo(self, repo: hgrepo.HgRepository) -> None:
repo.mkdir("numbers")
repo.write_file("numbers/README", "this will have two directories")
self._base_commit = repo.commit("commit")
repo.mkdir("numbers/1")
repo.write_file("numbers/1/11", "11\n")
self._c11 = repo.commit("c11")
repo.write_file("numbers/1/12", "12\n")
self._c12 = repo.commit("c12")
repo.write_file("numbers/1/13", "13\n")
self._c13 = repo.commit("c13")
repo.write_file("numbers/1/14", "14\n")
self._c14 = repo.commit("c14")
repo.write_file("numbers/1/15", "15\n")
self._c15 = repo.commit("c15")
repo.update(self._base_commit)
repo.mkdir("numbers/2")
repo.write_file("numbers/2/21", "21\n")
self._c21 = repo.commit("c21")
repo.write_file("numbers/2/22", "22\n")
self._c22 = repo.commit("c22")
repo.write_file("numbers/2/23", "23\n")
self._c23 = repo.commit("c23")
repo.write_file("numbers/2/24", "24\n")
self._c24 = repo.commit("c24")
repo.write_file("numbers/2/25", "25\n")
self._c25 = repo.commit("c25")
repo.update(self._base_commit)
def test_rebase_commit_with_independent_folder(self) -> None:
stdout = self.hg("--debug", "rebase", "-s", self._c11, "-d", self._c25)
self.assertIn(f'rebasing 1:{self._c11[:12]} "c11"\n', stdout)
self.assertIn(f'rebasing 2:{self._c12[:12]} "c12"\n', stdout)
self.assertIn(f'rebasing 3:{self._c13[:12]} "c13"\n', stdout)
self.assertIn(f'rebasing 4:{self._c14[:12]} "c14"\n', stdout)
self.assertIn(f'rebasing 5:{self._c15[:12]} "c15"\n', stdout)
# Note that these are empirical values, not desired values.
# We need to figure out why this hits the slow path and fix it!
self.assert_update_logic(stdout, num_fast_path=2, num_slow_path=5)
# Get the hash of the new head created as a result of the rebase.
new_head = self.repo.log(revset=f"successors({self._c15})-{self._c15}")[0]
# Record the pre-update inode count.
inspector = eden_server_inspector.EdenServerInspector(self.eden, self.repo.path)
inspector.unload_inode_for_path("numbers")
pre_update_count = inspector.get_inode_count("numbers")
print(f"loaded inode count before `hg update`: {pre_update_count}")
# Verify that updating to the new head that was created as a result of
# the rebase leaves Hg in the correct state.
self.assertEqual(
1,
len(self.repo.log()),
msg=("At the base commit, `hg log` should have only one entry."),
)
stdout = self.hg("--debug", "update", new_head)
self.assert_update_logic(stdout, num_fast_path=1)
self.assertEqual(
11,
len(self.repo.log()),
msg=("The new head should include all the commits."),
)
# Verify the post-update inode count.
post_update_count = inspector.get_inode_count("numbers")
print(f"loaded inode count after `hg update`: {post_update_count}")
self.assertGreaterEqual(
post_update_count,
pre_update_count,
msg=("The inode count should not decrease due to `hg update`."),
)
num_new_inodes = post_update_count - pre_update_count
self.assertLessEqual(
num_new_inodes,
2,
msg=(
"There should be no more than 2 new inodes as a result of the "
"update. At the time this test was created, num_new_inodes is 0, "
"but if we included unloaded inodes, there would be 2: one for "
"numbers/1 and one for numbers/2."
),
)
def test_rebasing_a_commit_that_removes_a_file(self) -> None:
# Rebase a commit that removes the numbers/README file.
self.hg("rm", "numbers/README")
removal_commit = self.repo.commit("removing README")
self.hg("rebase", "-s", removal_commit, "-d", self._c15)
# Verify we end up in the expected state.
self.assert_status_empty()
self.assertFalse(os.path.exists(self.get_path("numbers/README")))
self.assertEqual(7, len(self.repo.log()))
def test_rebase_stack_with_conflicts(self) -> None:
"""Create a stack of commits that has conflicts with the stack onto
which we rebase and verify that if we merge the expected conflicts along
the way, then we end up in the expected state."""
self.mkdir("numbers/1")
self.write_file("numbers/1/11", "new 11\n")
self.repo.add_file("numbers/1/11")
self.write_file("numbers/1/12", "new 12\n")
self.repo.add_file("numbers/1/12")
commit = self.repo.commit("Introduce 1/11 and 1/12.")
self.write_file("numbers/1/12", "change 12 again\n")
self.write_file("numbers/1/13", "new 13\n")
self.repo.add_file("numbers/1/13")
self.write_file("numbers/1/14", "new 14\n")
self.repo.add_file("numbers/1/14")
self.repo.commit("Introduce 1/13 and 1/14.")
with self.assertRaises(hgrepo.HgError) as context:
self.hg("rebase", "-s", commit, "-d", self._c15)
self.assertIn(
b"conflicts while merging numbers/1/11! "
b"(edit, then use 'hg resolve --mark')",
# pyre-fixme[16]: `_E` has no attribute `stderr`.
context.exception.stderr,
)
self.assert_unresolved(unresolved=["numbers/1/11", "numbers/1/12"])
self.assert_status({"numbers/1/11": "M", "numbers/1/12": "M"}, op="rebase")
self.assert_file_regex(
"numbers/1/11",
"""\
<<<<<<< dest: .*
11
=======
new 11
>>>>>>> source: .*
""",
)
self.assert_file_regex(
"numbers/1/12",
"""\
<<<<<<< dest: .*
12
=======
new 12
>>>>>>> source: .*
""",
)
self.write_file("numbers/1/11", "11 merged.\n")
self.write_file("numbers/1/12", "12 merged.\n")
self.hg("resolve", "--mark", "numbers/1/11", "numbers/1/12")
with self.assertRaises(hgrepo.HgError) as context:
self.hg("rebase", "--continue")
self.assertIn(
b"conflicts while merging numbers/1/12! "
b"(edit, then use 'hg resolve --mark')",
# pyre-fixme[16]: `_E` has no attribute `stderr`.
context.exception.stderr,
)
self.assert_unresolved(
unresolved=["numbers/1/12", "numbers/1/13", "numbers/1/14"]
)
self.assert_status(
{"numbers/1/12": "M", "numbers/1/13": "M", "numbers/1/14": "M"}, op="rebase"
)
self.assert_file_regex(
"numbers/1/12",
"""\
<<<<<<< dest: .*
12 merged.
=======
change 12 again
>>>>>>> source: .*
""",
)
self.assert_file_regex(
"numbers/1/13",
"""\
<<<<<<< dest: .*
13
=======
new 13
>>>>>>> source: .*
""",
)
self.assert_file_regex(
"numbers/1/14",
"""\
<<<<<<< dest: .*
14
=======
new 14
>>>>>>> source: .*
""",
)
self.write_file("numbers/1/12", "merged.\n")
self.write_file("numbers/1/13", "merged.\n")
self.write_file("numbers/1/14", "merged.\n")
self.hg("resolve", "--mark", "numbers/1/12", "numbers/1/13", "numbers/1/14")
self.hg("rebase", "--continue")
commits = self.repo.log()
self.assertEqual(8, len(commits))
self.assertEqual(
[self._base_commit, self._c11, self._c12, self._c13, self._c14, self._c15],
commits[0:6],
)
def assert_update_logic(
self, stdout: str, num_fast_path: int = 0, num_slow_path: int = 0
) -> None:
"""Helper function to examine the stdout of an `hg --debug update` call
and verify the number of times our Hg extension exercised the "fast
path" for Eden when doing an update versus the number of times it
exercised the "slow path."
"""
self.assertEqual(
num_fast_path,
stdout.count("using eden update code path\n"),
msg=(
"Number of times `hg update` should exercise the fast path: "
+ str(num_fast_path)
),
)
self.assertEqual(
num_slow_path,
stdout.count("falling back to non-eden update code path: "),
msg=(
"Number of times `hg update` should exercise the slow path: "
+ str(num_slow_path)
),
)
def test_rebase_with_concurrent_status(self) -> None:
"""
Test using `hg rebase` to rebase a stack while simultaneously running
`hg status`
"""
stop = threading.Event()
def status_thread():
while not stop.is_set():
self.repo.run_hg("status", stdout=None, stderr=None)
# Spawn several threads to run "hg status" in parallel with the rebase
num_threads = 6
threads = []
for _ in range(num_threads):
t = threading.Thread(target=status_thread)
threads.append(t)
t.start()
# Run the rebase. Explicitly disable inmemory rebase so that eden
# will need to update the working directory state as tehe rebase progresses
self.repo.run_hg(
"--debug",
"--config",
"rebase.experimental.inmemory=False",
"rebase",
"-s",
self._c11,
"-d",
self._c25,
stdout=None,
stderr=None,
)
new_commit = self.hg("log", "-rtip", "-T{node}")
stop.set()
for t in threads:
t.join()
self.assert_status_empty()
# Verify that the new commit looks correct
self.repo.update(new_commit)
self.assert_status_empty()
self.assert_file_regex("numbers/1/15", "15\n")
self.assert_file_regex("numbers/2/25", "25\n")
| [
"[email protected]"
]
| |
f3e8686fcdc11a92cf10d7b6bd5f7a314cd2ce1b | b94bb6b6e2fac5fb8f02354a2d05374b8f00ff60 | /mandelbrot/numpy_vector_numexpr/numpy_vector_numexpr.py | 484ad304db72cf8701216640e58a2e7b8f1d35dc | []
| no_license | janus/EuroPython2011_HighPerformanceComputing | 17ff9e6d7d5634c424983103ad45442acfe2502e | 1a15b5e66a22bd11422a1bb9ad749c5d906e3f98 | refs/heads/master | 2021-01-16T22:41:04.249856 | 2011-06-28T22:32:50 | 2011-06-28T22:32:50 | 1,973,541 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,837 | py | import datetime
import sys
import numpy as np
import numexpr
# area of space to investigate
x1, x2, y1, y2 = -2.13, 0.77, -1.3, 1.3
# use numexpr library to vectorise (and maybe parallelise) the numpy expressions
def calculate_z_numpy(q, maxiter, z):
output = np.resize(np.array(0,), q.shape)
for iteration in range(maxiter):
#z = z*z + q
z = numexpr.evaluate("z*z+q")
#done = nm.greater(abs(z), 2.0)
done = numexpr.evaluate("abs(z).real>2.0")
#q = nm.where(done,0+0j, q)
q = numexpr.evaluate("where(done, 0+0j, q)")
#z = nm.where(done,0+0j, z)
z = numexpr.evaluate("where(done,0+0j, z)")
#output = nm.where(done, iteration, output)
output = numexpr.evaluate("where(done, iteration, output)")
return output
def calculate(show_output):
# make a list of x and y values which will represent q
# xx and yy are the co-ordinates, for the default configuration they'll look like:
# if we have a 1000x1000 plot
# xx = [-2.13, -2.1242, -2.1184000000000003, ..., 0.7526000000000064, 0.7584000000000064, 0.7642000000000064]
# yy = [1.3, 1.2948, 1.2895999999999999, ..., -1.2844000000000058, -1.2896000000000059, -1.294800000000006]
x_step = (float(x2 - x1) / float(w)) * 2
y_step = (float(y1 - y2) / float(h)) * 2
x=[]
y=[]
ycoord = y2
while ycoord > y1:
y.append(ycoord)
ycoord += y_step
xcoord = x1
while xcoord < x2:
x.append(xcoord)
xcoord += x_step
x = np.array(x)
y = np.array(y) * 1j # make y a complex number
print "x and y have length:", len(x), len(y)
# create a square matrix using clever addressing
x_y_square_matrix = x+y[:, np.newaxis] # it is np.complex128
# convert square matrix to a flatted vector using ravel
q = np.ravel(x_y_square_matrix)
# create z as a 0+0j array of the same length as q
# note that it defaults to reals (float64) unless told otherwise
z = np.zeros(q.shape, np.complex128)
start_time = datetime.datetime.now()
print "Total elements:", len(q)
output = calculate_z_numpy(q, maxiter, z)
end_time = datetime.datetime.now()
secs = end_time - start_time
print "Main took", secs
validation_sum = sum(output)
print "Total sum of elements (for validation):", validation_sum
if show_output:
import Image
output = (output + (256*output) + (256**2)*output) * 8
im = Image.new("RGB", (w/2, h/2))
im.fromstring(output.tostring(), "raw", "RGBX", 0, -1)
im.show()
if __name__ == '__main__':
w = int(sys.argv[1]) # e.g. 100
h = int(sys.argv[1]) # e.g. 100
maxiter = int(sys.argv[2]) # e.g. 300
calculate(True)
| [
"[email protected]"
]
| |
dd4afdb6db252146efbf72714ce2914c07933fec | 20343e8a8435b3f839d5abd0c4063cf735f43341 | /Experiment/price_with_basic/JQ_Demo.py | 7f7010abf4084044ea5f6bd90bd0cf54ea8c9477 | []
| no_license | alading241/MoDeng | 948f2099e2f7e4548d6e477b6e06b833bdf4f9bb | 01819e58943d7d1a414714d64aa531c0e99dfe22 | refs/heads/master | 2021-05-23T11:39:41.326804 | 2020-04-05T06:06:01 | 2020-04-05T06:06:01 | 253,269,397 | 1 | 0 | null | 2020-04-05T15:38:33 | 2020-04-05T15:38:33 | null | UTF-8 | Python | false | false | 2,699 | py | # encoding=utf-8
# from JQData_Test.auth_info import *
import pandas as pd
from SDK.MyTimeOPT import convert_str_to_date
from matplotlib import pyplot as plt
import seaborn as sns
"""
使用JQ数据进行研究
"""
stk_code = normalize_code('000001')
# 查询300508的市值数据
q = query(valuation.pe_ratio,
valuation.pb_ratio,
indicator.eps,
indicator.roe,
indicator.operating_profit,
indicator.net_profit_margin,
indicator.inc_revenue_annual,
indicator.inc_operation_profit_year_on_year,
indicator.inc_operation_profit_annual,
indicator.inc_net_profit_year_on_year,
indicator.inc_net_profit_annual
).filter(valuation.code.in_([stk_code]))
panel = get_fundamentals_continuously(q, end_date='2019-05-12', count=1200)
df_basic = panel.minor_xs(stk_code)
df_basic['date_str'] = df_basic.index
df_basic['date'] = df_basic.apply(lambda x: convert_str_to_date(x['date_str']), axis=1)
df_basic = df_basic.set_index('date')
# 查询收盘价
df_close = get_price(stk_code, start_date='2017-01-01', end_date='2019-05-12', frequency='daily', fields=None, skip_paused=False, fq='pre')
df_close = df_close.reset_index()
df_close['date'] = df_close.apply(lambda x: convert_str_to_date(str(x['index'])[:10]), axis=1)
df_close = df_close.set_index('date')
df_concat = pd.concat([df_basic, df_close], axis=1)\
.dropna(axis=0)\
.loc[:, [
'close',
'eps',
'pb_ratio',
'pe_ratio',
'roe',
'operating_profit',
'net_profit_margin',
'inc_revenue_annual',
'inc_operation_profit_year_on_year',
'inc_operation_profit_annual',
'inc_net_profit_year_on_year',
'inc_net_profit_annual']]
df_corr = df_concat.corr()
# sns.distplot(df_corr['close'])
df_corr['xlabel'] = df_corr.index
# 画条形图
sns.barplot(y='close', x='xlabel', data=df_corr)
plt.xticks(rotation=90)
plt.show()
"""
df_concat.corr()
画图
.corr()
"""
s
"""
#DataFrame的corr和cov方法将以DataFrame的形式返回完整的相关系数或协方差矩阵:
data.corr()
data.cov()
"""
end = 0 | [
"[email protected]"
]
| |
793e4320d3064724680bb351c892080960854d23 | 5e9bda2d6082f62d889df1c28973436af905faaa | /demo/Joystick.py | e5ff4b5a7e6ec783bfa22dc96ae45c3d4bd74c90 | []
| no_license | FXCMAPI/Phoenix | cab74db4e8f141f4d27b4b2e3d5bab06994b2e52 | 040763282f04a5b98f89c054b254993d1a8ca618 | refs/heads/master | 2020-03-16T15:42:10.660845 | 2018-05-09T01:41:51 | 2018-05-09T01:45:33 | 132,755,174 | 1 | 1 | null | 2018-05-09T12:41:54 | 2018-05-09T12:41:53 | null | UTF-8 | Python | false | false | 35,148 | py | #!/usr/bin/env python
#----------------------------------------------------------------------------
# Name: Joystick.py
# Purpose: Demonstrate use of wx.Joystick
#
# Author: Jeff Grimmett ([email protected]), adapted from original
# .wdr-derived demo
#
# Created: 02-Jan-2004
# Copyright:
# Licence: wxWindows license
#----------------------------------------------------------------------------
import math
import wx
import wx.adv
haveJoystick = True
if wx.Platform == "__WXMAC__":
haveJoystick = False
#----------------------------------------------------------------------------
# Once all supported versions of Python support 32-bit integers on all
# platforms, this can go up to 32.
MAX_BUTTONS = 16
#----------------------------------------------------------------------------
class Label(wx.StaticText):
# A derived StaticText that always aligns right and renders
# in a bold font.
def __init__(self, parent, label):
wx.StaticText.__init__(self, parent, -1, label, style=wx.ALIGN_RIGHT)
f = parent.GetFont()
f.SetWeight(wx.FONTWEIGHT_BOLD)
self.SetFont(f)
#----------------------------------------------------------------------------
class JoyGauge(wx.Panel):
def __init__(self, parent, stick):
self.stick = stick
size = (100,100)
wx.Panel.__init__(self, parent, -1, size=size)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_ERASE_BACKGROUND, lambda e: None)
self.buffer = wx.Bitmap(*size)
dc = wx.BufferedDC(None, self.buffer)
self.DrawFace(dc)
self.DrawJoystick(dc)
def OnSize(self, event):
# The face Bitmap init is done here, to make sure the buffer is always
# the same size as the Window
w, h = self.GetClientSize()
self.buffer = wx.Bitmap(w,h)
dc = wx.BufferedDC(wx.ClientDC(self), self.buffer)
self.DrawFace(dc)
self.DrawJoystick(dc)
def DrawFace(self, dc):
dc.SetBackground(wx.Brush(self.GetBackgroundColour()))
dc.Clear()
def OnPaint(self, evt):
# When dc is destroyed it will blit self.buffer to the window,
# since no other drawing is needed we'll just return and let it
# do it's thing
dc = wx.BufferedPaintDC(self, self.buffer)
def DrawJoystick(self, dc):
# draw the guage as a maxed square in the center of this window.
w, h = self.GetClientSize()
edgeSize = min(w, h)
xorigin = (w - edgeSize) / 2
yorigin = (h - edgeSize) / 2
center = edgeSize / 2
# Restrict our drawing activities to the square defined
# above.
dc.SetClippingRegion(xorigin, yorigin, edgeSize, edgeSize)
dc.SetBrush(wx.Brush(wx.Colour(251, 252, 237)))
dc.DrawRectangle(xorigin, yorigin, edgeSize, edgeSize)
dc.SetPen(wx.Pen(wx.BLACK, 1, wx.PENSTYLE_DOT_DASH))
dc.DrawLine(xorigin, yorigin + center, xorigin + edgeSize, yorigin + center)
dc.DrawLine(xorigin + center, yorigin, xorigin + center, yorigin + edgeSize)
if self.stick:
# Get the joystick position as a float
joyx = float(self.stick.GetPosition().x)
joyy = float(self.stick.GetPosition().y)
# Get the joystick range of motion
xmin = self.stick.GetXMin()
xmax = self.stick.GetXMax()
if xmin < 0:
xmax += abs(xmin)
joyx += abs(xmin)
xmin = 0
xrange = max(xmax - xmin, 1)
ymin = self.stick.GetYMin()
ymax = self.stick.GetYMax()
if ymin < 0:
ymax += abs(ymin)
joyy += abs(ymin)
ymin = 0
yrange = max(ymax - ymin, 1)
# calc a ratio of our range versus the joystick range
xratio = float(edgeSize) / xrange
yratio = float(edgeSize) / yrange
# calc the displayable value based on position times ratio
xval = int(joyx * xratio)
yval = int(joyy * yratio)
# and normalize the value from our brush's origin
x = xval + xorigin
y = yval + yorigin
# Now to draw it.
dc.SetPen(wx.Pen(wx.RED, 2))
dc.CrossHair(x, y)
def Update(self):
dc = wx.BufferedDC(wx.ClientDC(self), self.buffer)
self.DrawFace(dc)
self.DrawJoystick(dc)
#----------------------------------------------------------------------------
class JoyPanel(wx.Panel):
def __init__(self, parent, stick):
self.stick = stick
wx.Panel.__init__(self, parent, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
fn = parent.GetFont()
fn.SetPointSize(fn.GetPointSize() + 3)
fn.SetWeight(wx.FONTWEIGHT_BOLD)
t = wx.StaticText(self, -1, "X - Y Axes", style = wx.ALIGN_CENTRE)
t.SetFont(fn)
sizer.Add(t, 0, wx.ALL | wx.EXPAND | wx.ALIGN_CENTER | wx.ALIGN_CENTER_HORIZONTAL, 1)
self.control = JoyGauge(self, self.stick)
sizer.Add(self.control, 1, wx.ALL | wx.EXPAND | wx.ALIGN_CENTER | wx.ALIGN_CENTER_HORIZONTAL, 1)
self.SetSizer(sizer)
sizer.Fit(self)
def Update(self):
self.control.Update()
#----------------------------------------------------------------------------
class POVGauge(wx.Panel):
#
# Display the current postion of the POV control
#
def __init__(self, parent, stick):
self.stick = stick
self.size = (100, 100)
self.avail = False
self.fourDir = False
self.cts = False
wx.Panel.__init__(self, parent, -1, size=self.size)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_ERASE_BACKGROUND, lambda e: None)
self.buffer = wx.Bitmap(*self.size)
dc = wx.BufferedDC(None, self.buffer)
self.DrawFace(dc)
self.DrawPOV(dc)
def OnSize(self, event):
# calculate the size of our display and make a buffer for it.
w, h = self.GetClientSize()
s = min(w, h)
self.size = (s, s)
self.buffer = wx.Bitmap(w,h)
dc = wx.BufferedDC(wx.ClientDC(self), self.buffer)
self.DrawFace(dc)
self.DrawPOV(dc)
def DrawFace(self, dc):
dc.SetBackground(wx.Brush(self.GetBackgroundColour()))
dc.Clear()
def OnPaint(self, evt):
# When dc is destroyed it will blit self.buffer to the window,
# since no other drawing is needed we'll just return and let it
# do it's thing
dc = wx.BufferedPaintDC(self, self.buffer)
def DrawPOV(self, dc):
# draw the guage as a maxed circle in the center of this window.
w, h = self.GetClientSize()
diameter = min(w, h)
xorigin = (w - diameter) / 2
yorigin = (h - diameter) / 2
xcenter = xorigin + diameter / 2
ycenter = yorigin + diameter / 2
# our 'raster'.
dc.SetBrush(wx.Brush(wx.WHITE))
dc.DrawCircle(xcenter, ycenter, diameter/2)
dc.SetBrush(wx.Brush(wx.BLACK))
dc.DrawCircle(xcenter, ycenter, 10)
# fancy decorations
dc.SetPen(wx.Pen(wx.BLACK, 1, wx.PENSTYLE_DOT_DASH))
dc.DrawLine(xorigin, ycenter, xorigin + diameter, ycenter)
dc.DrawLine(xcenter, yorigin, xcenter, yorigin + diameter)
if self.stick:
if self.avail:
pos = -1
# use the appropriate function to get the POV position
if self.fourDir:
pos = self.stick.GetPOVPosition()
if self.cts:
pos = self.stick.GetPOVCTSPosition()
# trap invalid values
if 0 <= pos <= 36000:
vector = 30
else:
vector = 0
# rotate CCW by 90 so that 0 is up.
pos = (pos / 100) - 90
# Normalize
if pos < 0:
pos = pos + 360
# Stolen from wx.lib.analogclock :-)
radiansPerDegree = math.pi / 180
pointX = int(round(vector * math.cos(pos * radiansPerDegree)))
pointY = int(round(vector * math.sin(pos * radiansPerDegree)))
# normalise value to match our actual center.
nx = pointX + xcenter
ny = pointY + ycenter
# Draw the line
dc.SetPen(wx.Pen(wx.BLUE, 2))
dc.DrawLine(xcenter, ycenter, nx, ny)
# And a little thing to show the endpoint
dc.SetBrush(wx.Brush(wx.BLUE))
dc.DrawCircle(nx, ny, 8)
def Update(self):
dc = wx.BufferedDC(wx.ClientDC(self), self.buffer)
self.DrawFace(dc)
self.DrawPOV(dc)
def Calibrate(self):
s = self.stick
self.avail = s.HasPOV()
self.fourDir = s.HasPOV4Dir()
self.cts = s.HasPOVCTS()
#----------------------------------------------------------------------------
class POVStatus(wx.Panel):
#
# Displays static info about the POV control
#
def __init__(self, parent, stick):
self.stick = stick
wx.Panel.__init__(self, parent, -1, size=(100, 100))
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add((20,20))
self.avail = wx.CheckBox(self, -1, "Available")
sizer.Add(self.avail, 0, wx.ALL | wx.EXPAND | wx.ALIGN_LEFT, 2)
self.fourDir = wx.CheckBox(self, -1, "4-Way Only")
sizer.Add(self.fourDir, 0, wx.ALL | wx.EXPAND | wx.ALIGN_LEFT, 2)
self.cts = wx.CheckBox(self, -1, "Continuous")
sizer.Add(self.cts, 0, wx.ALL | wx.EXPAND | wx.ALIGN_LEFT, 2)
self.SetSizer(sizer)
sizer.Fit(self)
# Effectively makes the checkboxes read-only.
self.Bind(wx.EVT_CHECKBOX, self.Calibrate)
def Calibrate(self, evt=None):
s = self.stick
self.avail.SetValue(s.HasPOV())
self.fourDir.SetValue(s.HasPOV4Dir())
self.cts.SetValue(s.HasPOVCTS())
#----------------------------------------------------------------------------
class POVPanel(wx.Panel):
def __init__(self, parent, stick):
self.stick = stick
wx.Panel.__init__(self, parent, -1, size=(100, 100))
sizer = wx.BoxSizer(wx.HORIZONTAL)
gsizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add((25,25))
fn = parent.GetFont()
fn.SetPointSize(fn.GetPointSize() + 3)
fn.SetWeight(wx.FONTWEIGHT_BOLD)
t = wx.StaticText(self, -1, "POV Control", style = wx.ALIGN_CENTER)
t.SetFont(fn)
gsizer.Add(t, 0, wx.ALL | wx.EXPAND, 1)
self.display = POVGauge(self, stick)
gsizer.Add(self.display, 1, wx.ALL | wx.EXPAND | wx.ALIGN_CENTER, 1)
sizer.Add(gsizer, 1, wx.ALL | wx.EXPAND | wx.ALIGN_CENTER, 1)
self.status = POVStatus(self, stick)
sizer.Add(self.status, 1, wx.ALL | wx.EXPAND | wx.ALIGN_CENTER, 1)
self.SetSizer(sizer)
sizer.Fit(self)
def Calibrate(self):
self.display.Calibrate()
self.status.Calibrate()
def Update(self):
self.display.Update()
#----------------------------------------------------------------------------
class LED(wx.Panel):
def __init__(self, parent, number):
self.state = -1
self.size = (20, 20)
self.number = number
fn = parent.GetFont()
fn.SetPointSize(fn.GetPointSize() - 1)
fn.SetWeight(wx.FONTWEIGHT_BOLD)
self.fn = fn
wx.Panel.__init__(self, parent, -1, size=self.size)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_ERASE_BACKGROUND, lambda e: None)
self.buffer = wx.Bitmap(*self.size)
dc = wx.BufferedDC(None, self.buffer)
self.DrawFace(dc)
self.DrawLED(dc)
def OnSize(self, event):
# calculate the size of our display.
w, h = self.GetClientSize()
s = min(w, h)
self.size = (s, s)
self.buffer = wx.Bitmap(*self.size)
dc = wx.BufferedDC(wx.ClientDC(self), self.buffer)
self.DrawFace(dc)
self.DrawLED(dc)
def DrawFace(self, dc):
dc.SetBackground(wx.Brush(self.GetBackgroundColour()))
dc.Clear()
def OnPaint(self, evt):
# When dc is destroyed it will blit self.buffer to the window,
# since no other drawing is needed we'll just return and let it
# do it's thing
dc = wx.BufferedPaintDC(self, self.buffer)
def DrawLED(self, dc):
# bitmap size
bw, bh = self.size
# center of bitmap
center = bw / 2
# calc the 0, 0 origin of the bitmap
xorigin = center - (bw / 2)
yorigin = center - (bh / 2)
# our 'raster'.
if self.state == 0:
dc.SetBrush(wx.Brush(wx.RED))
elif self.state == 1:
dc.SetBrush(wx.Brush(wx.GREEN))
else:
dc.SetBrush(wx.Brush(wx.BLACK))
dc.DrawCircle(center, center, bw/2)
txt = str(self.number)
# Set the font for the DC ...
dc.SetFont(self.fn)
# ... and calculate how much space our value
# will take up.
fw, fh = dc.GetTextExtent(txt)
# Calc the center of the LED, and from that
# derive the origin of our value.
tx = center - (fw/2)
ty = center - (fh/2)
# I draw the value twice so as to give it a pseudo-shadow.
# This is (mostly) because I'm too lazy to figure out how
# to blit my text onto the gauge using one of the logical
# functions. The pseudo-shadow gives the text contrast
# regardless of whether the bar is under it or not.
dc.SetTextForeground(wx.WHITE)
dc.DrawText(txt, tx, ty)
def Update(self):
dc = wx.BufferedDC(wx.ClientDC(self), self.buffer)
self.DrawFace(dc)
self.DrawLED(dc)
#----------------------------------------------------------------------------
class JoyButtons(wx.Panel):
def __init__(self, parent, stick):
self.stick = stick
self.leds = {}
wx.Panel.__init__(self, parent, -1)
tsizer = wx.BoxSizer(wx.VERTICAL)
fn = parent.GetFont()
fn.SetPointSize(fn.GetPointSize() + 3)
fn.SetWeight(wx.FONTWEIGHT_BOLD)
t = wx.StaticText(self, -1, "Buttons", style = wx.ALIGN_LEFT)
t.SetFont(fn)
tsizer.Add(t, 0, wx.ALL | wx.EXPAND | wx.ALIGN_LEFT, 1)
sizer = wx.FlexGridSizer(4, 16, 2, 2)
fn.SetPointSize(parent.GetFont().GetPointSize() + 1)
for i in range(0, MAX_BUTTONS):
t = LED(self, i)
self.leds[i] = t
sizer.Add(t, 1, wx.ALL|wx.ALIGN_CENTER|wx.ALIGN_CENTER_VERTICAL, 1)
sizer.AddGrowableCol(i)
tsizer.Add(sizer, 1, wx.ALL | wx.EXPAND | wx.ALIGN_LEFT, 1)
self.SetSizer(tsizer)
tsizer.Fit(self)
def Calibrate(self):
for i in range(0, MAX_BUTTONS):
self.leds[i].state = -1
t = self.stick.GetNumberButtons()
for i in range(0, t):
self.leds[i].state = 0
def Update(self):
t = self.stick.GetButtonState()
for i in range(0, MAX_BUTTONS):
if self.leds[i].state == 1:
self.leds[i].state = 0
if (t & (1<<i)):
self.leds[i].state = 1
self.leds[i].Update()
#----------------------------------------------------------------------------
class InfoPanel(wx.Panel):
def __init__(self, parent, stick):
self.stick = stick
wx.Panel.__init__(self, parent, -1)
sizer = wx.GridBagSizer(1, 1)
sizer.Add(Label(self, 'Mfr ID: '), (0, 0), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_RIGHT, 2)
self.MfgID = wx.TextCtrl(self, -1, value='', size=(45, -1), style=wx.TE_READONLY)
sizer.Add(self.MfgID, (0, 1), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_LEFT, 2)
sizer.Add(Label(self, 'Prod Name: '), (0, 2), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_RIGHT, 2)
self.ProdName = wx.TextCtrl(self, -1, value='', style=wx.TE_READONLY)
sizer.Add(self.ProdName, (0, 3), (1, 3), wx.ALL | wx.GROW | wx.ALIGN_LEFT, 2)
sizer.Add(Label(self, 'Threshold: '), (0, 6), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_RIGHT, 2)
self.Threshold = wx.TextCtrl(self, -1, value='', size=(45, -1), style=wx.TE_READONLY)
sizer.Add(self.Threshold, (0, 7), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_LEFT, 2)
#----------------------------------------------------------------------------
b = wx.Button(self, -1, "Calibrate")
sizer.Add(b, (1, 0), (2, 2), wx.ALL | wx.ALIGN_CENTER, 2)
sizer.Add(Label(self, '# of Sticks: '), (1, 2), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_RIGHT, 2)
self.NumJoysticks = wx.TextCtrl(self, -1, value='', size=(45, -1), style=wx.TE_READONLY)
sizer.Add(self.NumJoysticks, (1, 3), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_LEFT, 2)
sizer.Add(Label(self, '# of Axes: '), (1, 4), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_RIGHT, 2)
self.NumAxis = wx.TextCtrl(self, -1, value='', size=(45, -1), style=wx.TE_READONLY)
sizer.Add(self.NumAxis, (1, 5), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_LEFT, 2)
sizer.Add(Label(self, 'Max # Axes: '), (1, 6), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_RIGHT, 2)
self.MaxAxis = wx.TextCtrl(self, -1, value='', size=(45, -1), style=wx.TE_READONLY)
sizer.Add(self.MaxAxis, (1, 7), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_LEFT, 2)
#----------------------------------------------------------------------------
sizer.Add(Label(self, 'Polling -- '), (2, 3), (1, 1), wx.ALL | wx.GROW, 2)
sizer.Add(Label(self, 'Min: '), (2, 4), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_RIGHT, 2)
self.PollMin = wx.TextCtrl(self, -1, value='', size=(45, -1), style=wx.TE_READONLY)
sizer.Add(self.PollMin, (2, 5), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_LEFT, 2)
sizer.Add(Label(self, 'Max: '), (2, 6), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_RIGHT, 2)
self.PollMax = wx.TextCtrl(self, -1, value='', size=(45, -1), style=wx.TE_READONLY)
sizer.Add(self.PollMax, (2, 7), (1, 1), wx.ALL | wx.GROW | wx.ALIGN_LEFT, 2)
#----------------------------------------------------------------------------
self.SetSizer(sizer)
sizer.Fit(self)
def Calibrate(self):
if not self.stick:
return
s = self.stick
self.MfgID.SetValue(str(s.GetManufacturerId()))
self.ProdName.SetValue(str(s.GetProductName()))
self.Threshold.SetValue(str(s.GetMovementThreshold()))
self.NumJoysticks.SetValue(str(s.GetNumberJoysticks()))
self.NumAxis.SetValue(str(s.GetNumberAxes()))
self.MaxAxis.SetValue(str(s.GetMaxAxes()))
self.PollMin.SetValue(str(s.GetPollingMin()))
self.PollMax.SetValue(str(s.GetPollingMax()))
#----------------------------------------------------------------------------
class AxisBar(wx.Gauge):
#
# This class allows us to use a wx.Gauge to display the axis value
# with a fancy label overlayed onto the guage itself. Two values are
# used to do things: first of all, since the gauge is limited to
# positive numbers, the scale is fixed at 0 to 1000. We will receive
# an adjusted value to use to render the gauge itself. The other value
# is a raw value and actually reflects the value from the joystick itself,
# which is then drawn over the gauge.
#
def __init__(self, parent):
wx.Gauge.__init__(self, parent, -1, 1000, size=(-1, 20), style = wx.GA_HORIZONTAL | wx.GA_SMOOTH )
# This is the value we will display.
self.rawvalue = 0
self.SetBackgroundColour('light blue')
self.SetForegroundColour('orange')
# Capture paint events for purpose of updating
# the displayed value.
self.Bind(wx.EVT_PAINT, self.onPaint)
def Update(self, value, rawvalue):
# Updates the gauge itself, sets the raw value for
# the next EVT_PAINT
self.SetValue(value)
self.rawvalue = rawvalue
def onPaint(self, evt):
# Must always create a PaintDC when capturing
# an EVT_PAINT event
self.ShowValue(wx.PaintDC(self), evt)
def ShowValue(self, dc, evt):
# This method handles actual painting of and drawing
# on the gauge.
# Clear out the gauge
dc.Clear()
# and then carry out business as usual
wx.Gauge.OnPaint(self, evt)
# This is the size available to us.
w, h = dc.GetSize()
# This is what we will overlay on the gauge.
# It reflects the actual value received from the
# wx.Joystick.
txt = str(self.rawvalue)
# Copy the default font, make it bold.
fn = self.GetParent().GetFont()
fn.SetWeight(wx.FONTWEIGHT_BOLD)
# Set the font for the DC ...
dc.SetFont(fn)
# ... and calculate how much space our value
# will take up.
fw, fh = dc.GetTextExtent(txt)
# Calc the center of the gauge, and from that
# derive the origin of our value.
center = w / 2
tx = center - (fw/2)
center = h / 2
ty = center - (fh/2)
# I draw the value twice so as to give it a pseudo-shadow.
# This is (mostly) because I'm too lazy to figure out how
# to blit my text onto the gauge using one of the logical
# functions. The pseudo-shadow gives the text contrast
# regardless of whether the bar is under it or not.
dc.SetTextForeground(wx.BLACK)
dc.DrawText(txt, tx, ty)
dc.SetTextForeground('white')
dc.DrawText(txt, tx-1, ty-1)
#----------------------------------------------------------------------------
class Axis(wx.Panel):
#
# This class is a container for the min, max, and current
# values of the joystick axis in question. It contains
# also special features to render a 'dummy' if the axis
# in question is not available.
#
def __init__(self, parent, token, stick):
self.stick = stick
#
# token represents the type of axis we're displaying.
#
self.token = token
#
# Create a call to the 'Has*()' method for the stick.
# X and Y are always there, so we tie the Has* method
# to a hardwired True value.
#
if token not in ['X', 'Y']:
self.HasFunc = eval('stick.Has%s' % token)
else:
self.HasFunc = self.alwaysTrue
# Now init the panel.
wx.Panel.__init__(self, parent, -1)
sizer = wx.BoxSizer(wx.HORIZONTAL)
if self.HasFunc():
#
# Tie our calibration functions to the appropriate
# stick method. If we don't have the axis in question,
# we won't need them.
#
self.GetMin = eval('stick.Get%sMin' % token)
self.GetMax = eval('stick.Get%sMax' % token)
# Create our displays and set them up.
self.Min = wx.StaticText(self, -1, str(self.GetMin()), style=wx.ALIGN_RIGHT)
self.Max = wx.StaticText(self, -1, str(self.GetMax()), style=wx.ALIGN_LEFT)
self.bar = AxisBar(self)
sizer.Add(self.Min, 0, wx.ALL | wx.ALIGN_RIGHT | wx.ALIGN_CENTER_VERTICAL, 1)
sizer.Add(self.bar, 1, wx.ALL | wx.ALIGN_CENTER | wx.ALIGN_CENTER_VERTICAL, 1)
sizer.Add(self.Max, 0, wx.ALL | wx.ALIGN_LEFT | wx.ALIGN_CENTER_VERTICAL, 1)
else:
# We go here if the axis in question is not available.
self.control = wx.StaticText(self, -1, ' *** Not Present ***')
sizer.Add(self.control, 1, wx.ALL | wx.ALIGN_CENTER | wx.ALIGN_CENTER_VERTICAL, 1)
#----------------------------------------------------------------------------
self.SetSizer(sizer)
sizer.Fit(self)
wx.CallAfter(self.Update)
def Calibrate(self):
if not self.HasFunc():
return
self.Min.SetLabel(str(self.GetMin()))
self.Max.SetLabel(str(self.GetMax()))
def Update(self):
# Don't bother if the axis doesn't exist.
if not self.HasFunc():
return
min = int(self.Min.GetLabel())
max = int(self.Max.GetLabel())
#
# Not all values are available from a wx.JoystickEvent, so I've elected
# to not use it at all. Therefore, we are getting our values direct from
# the stick. These values also seem to be more stable and reliable than
# those received from the event itself, so maybe it's a good idea to
# use the stick directly for your program.
#
# Here we either select the appropriate member of stick.GetPosition() or
# apply the appropriate Get*Position method call.
#
if self.token == 'X':
val = self.stick.GetPosition().x
elif self.token == 'Y':
val = self.stick.GetPosition().y
else:
val = eval('self.stick.Get%sPosition()' % self.token)
#
# While we might be able to rely on a range of 0-FFFFFF on Win, that might
# not be true of all drivers on all platforms. Thus, calc the actual full
# range first.
#
if min < 0:
max += abs(min)
val += abs(min)
min = 0
range = float(max - min)
#
# The relative value is used by the derived wx.Gauge since it is a
# positive-only control.
#
relative = 0
if range:
relative = int( val / range * 1000)
#
# Pass both the raw and relative values to the derived Gauge
#
self.bar.Update(relative, val)
def alwaysTrue(self):
# a dummy method used for X and Y axis.
return True
#----------------------------------------------------------------------------
class AxisPanel(wx.Panel):
#
# Contained herein is a panel that offers a graphical display
# of the levels for all axes supported by wx.Joystick. If
# your system doesn't have a particular axis, it will be
# 'dummied' for transparent use.
#
def __init__(self, parent, stick):
self.stick = stick
# Defines labels and 'tokens' to identify each
# supporte axis.
axesList = [
('X Axis ', 'X'), ('Y Axis ', 'Y'),
('Z Axis ', 'Z'), ('Rudder ', 'Rudder'),
('U Axis ', 'U'), ('V Axis ', 'V')
]
# Contains a list of all axis initialized.
self.axes = []
wx.Panel.__init__(self, parent, -1)
sizer = wx.FlexGridSizer(3, 4, 1, 1)
sizer.AddGrowableCol(1)
sizer.AddGrowableCol(3)
#----------------------------------------------------------------------------
# Go through the list of labels and tokens and add a label and
# axis display to the sizer for each.
for label, token in axesList:
sizer.Add(Label(self, label), 0, wx.ALL | wx.ALIGN_RIGHT, 2)
t = Axis(self, token, self.stick)
self.axes.append(t)
sizer.Add(t, 1, wx.ALL | wx.EXPAND | wx.ALIGN_LEFT, 2)
#----------------------------------------------------------------------------
self.SetSizer(sizer)
sizer.Fit(self)
wx.CallAfter(self.Update)
def Calibrate(self):
for i in self.axes:
i.Calibrate()
def Update(self):
for i in self.axes:
i.Update()
#----------------------------------------------------------------------------
class JoystickDemoPanel(wx.Panel):
def __init__(self, parent, log):
self.log = log
wx.Panel.__init__(self, parent, -1)
# Try to grab the control. If we get it, capture the stick.
# Otherwise, throw up an exception message and play stupid.
try:
self.stick = wx.adv.Joystick()
self.stick.SetCapture(self)
# Calibrate our controls
wx.CallAfter(self.Calibrate)
wx.CallAfter(self.OnJoystick)
except NotImplementedError as v:
wx.MessageBox(str(v), "Exception Message")
self.stick = None
# One Sizer to Rule Them All...
sizer = wx.GridBagSizer(2,2)
self.info = InfoPanel(self, self.stick)
sizer.Add(self.info, (0, 0), (1, 3), wx.ALL | wx.GROW, 2)
self.info.Bind(wx.EVT_BUTTON, self.Calibrate)
self.joy = JoyPanel(self, self.stick)
sizer.Add(self.joy, (1, 0), (1, 1), wx.ALL | wx.GROW, 2)
self.pov = POVPanel(self, self.stick)
sizer.Add(self.pov, (1, 1), (1, 2), wx.ALL | wx.GROW, 2)
self.axes = AxisPanel(self, self.stick)
sizer.Add(self.axes, (2, 0), (1, 3), wx.ALL | wx.GROW, 2)
self.buttons = JoyButtons(self, self.stick)
sizer.Add(self.buttons, (3, 0), (1, 3), wx.ALL | wx.EXPAND | wx.ALIGN_CENTER | wx.ALIGN_CENTER_VERTICAL, 1)
self.SetSizer(sizer)
sizer.Fit(self)
# Capture Joystick events (if they happen)
self.Bind(wx.EVT_JOYSTICK_EVENTS, self.OnJoystick)
self.stick.SetMovementThreshold(10)
def Calibrate(self, evt=None):
# Do not try this without a stick
if not self.stick:
return
self.info.Calibrate()
self.axes.Calibrate()
self.pov.Calibrate()
self.buttons.Calibrate()
def OnJoystick(self, evt=None):
if not self.stick:
return
self.axes.Update()
self.joy.Update()
self.pov.Update()
if evt is not None and evt.IsButton():
self.buttons.Update()
def ShutdownDemo(self):
if self.stick:
self.stick.ReleaseCapture()
self.stick = None
#----------------------------------------------------------------------------
def runTest(frame, nb, log):
if haveJoystick:
win = JoystickDemoPanel(nb, log)
return win
else:
from wx.lib.msgpanel import MessagePanel
win = MessagePanel(nb, 'wx.Joystick is not available on this platform.',
'Sorry', wx.ICON_WARNING)
return win
#----------------------------------------------------------------------------
overview = """\
<html>
<body>
<h1>wx.Joystick</h1>
This demo illustrates the use of the wx.Joystick class, which is an interface to
one or more joysticks attached to your system.
<p>The data that can be retrieved from the joystick comes in four basic flavors.
All of these are illustrated in the demo. In fact, this demo illustrates everything
you <b>can</b> get from the wx.Joystick control.
<ul>
<li>Static information such as Manufacturer ID and model name,
<li>Analog input from up to six axes, including X and Y for the actual stick,
<li>Button input from the fire button and any other buttons that the stick has,
<li>and the POV control (a kind of mini-joystick on top of the joystick) that many sticks come with.
</ul>
<p>Getting data from the joystick can be event-driven thanks to four event types associated
with wx.JoystickEvent, or the joystick can be polled programatically to get data on
a regular basis.
<h2>Data types</h2>
Data from the joystick comes in two flavors: that which defines the boundaries, and that
which defines the current state of the stick. Thus, we have Get*Max() and Get*Min()
methods for all axes, the max number of axes, the max number of buttons, and so on. In
general, this data can be read once and stored to speed computation up.
<h3>Analog Input</h3>
Analog input (the axes) is delivered as a whole, positive number. If you need to know
if the axis is at zero (centered) or not, you will first have to calculate that center
based on the max and min values. The demo shows a bar graph for each axis expressed
in native numerical format, plus a 'centered' X-Y axis compass showing the relationship
of that input to the calculated stick position.
Analog input may be jumpy and spurious, so the control has a means of 'smoothing' the
analog data by setting a movement threshold. This demo sets the threshold to 10, but
you can set it at any valid value between the min and max.
<h3>Button Input</h3>
Button state is retrieved as one int that contains each button state mapped to a bit.
You get the state of a button by AND-ing its bit against the returned value, in the form
<pre>
# assume buttonState is what the stick returned, and buttonBit
# is the bit you want to examine
if (buttonState & ( 1 << buttonBit )) :
# button pressed, do something with it
</pre>
<p>The problem here is that some OSs return a 32-bit value for up to 32 buttons
(imagine <i>that</i> stick!). Python V2.3 will generate an exception for bit
values over 30. For that reason, this demo is limited to 16 buttons.
<p>Note that more than one button can be pressed at a time, so be sure to check all of them!
<h3>POV Input</h3>
POV hats come in two flavors: four-way, and continuous. four-way POVs are restricted to
the cardinal points of the compass; continuous, or CTS POV hats can deliver input in
.01 degree increments, theoreticaly. The data is returned as a whole number; the last
two digits are considered to be to the right of the decimal point, so in order to
use this information, you need to divide by 100 right off the bat.
<p>Different methods are provided to retrieve the POV data for a CTS hat
versus a four-way hat.
<h2>Caveats</h2>
The wx.Joystick control is in many ways incomplete at the C++ library level, but it is
not insurmountable. In short, while the joystick interface <i>can</i> be event-driven,
the wx.JoystickEvent class lacks event binders for all event types. Thus, you cannot
rely on wx.JoystickEvents to tell you when something has changed, necessarilly.
<ul>
<li>There are no events associated with the POV control.
<li>There are no events associated with the Rudder
<li>There are no events associated with the U and V axes.
</ul>
<p>Fortunately, there is an easy workaround. In the top level frame, create a wx.Timer
that will poll the stick at a set interval. Of course, if you do this, you might as
well forgo catching wxEVT_JOYSTICK_* events at all and rely on the timer to do the
polling.
<p>Ideally, the timer should be a one-shot; after it fires, collect and process data as
needed, then re-start the timer, possibly using wx.CallAfter().
</body>
</html>
"""
#----------------------------------------------------------------------------
if __name__ == '__main__':
import sys,os
import run
run.main(['', os.path.basename(sys.argv[0])] + sys.argv[1:])
| [
"[email protected]"
]
| |
4ede01c900ccfbb8f6ca47a02a125acfb6428bd3 | be70e130f53c7703f942057923577adf607687a6 | /src/biotite/file.pyi | a2139f3eef3966c7794d5746f092780f16322213 | [
"BSD-3-Clause"
]
| permissive | Dr-Moreb/biotite | 4043eadb607e9ede13ce049ade554546ce58afe0 | c34ccb7a7a7de923bf8a238944dfb7e1e635bb28 | refs/heads/master | 2020-04-01T19:02:08.086093 | 2018-10-10T16:01:45 | 2018-10-10T16:01:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 744 | pyi | # This source code is part of the Biotite package and is distributed
# under the 3-Clause BSD License. Please see 'LICENSE.rst' for further
# information.
from typing import Generic, TypeVar, Union, TextIO, BinaryIO
from .copyable import Copyable
_T_io = TypeVar("_T_io", TextIO, BinaryIO)
class File(Copyable, Generic[_T_io]):
def __init__(self) -> None: ...
def read(self, file: Union[str, _T_io]) -> None: ...
def write(self, file: Union[str, _T_io]) -> None: ...
class TextFile(File[TextIO]):
def __init__(self) -> None: ...
def read(self, file: Union[str, TextIO]) -> None: ...
def write(self, file: Union[str, TextIO]) -> None: ...
def __str__(self) -> str: ...
class InvalidFileError(Exception):
... | [
"[email protected]"
]
| |
2a1087e05acebcf01639fe47aff07188a44ebec8 | 9ae6ce54bf9a2a86201961fdbd5e7b0ec913ff56 | /google/ads/googleads/v9/services/services/carrier_constant_service/__init__.py | 80069d6c6385daed2583921ed17c9252426c9a92 | [
"Apache-2.0"
]
| permissive | GerhardusM/google-ads-python | 73b275a06e5401e6b951a6cd99af98c247e34aa3 | 676ac5fcb5bec0d9b5897f4c950049dac5647555 | refs/heads/master | 2022-07-06T19:05:50.932553 | 2022-06-17T20:41:17 | 2022-06-17T20:41:17 | 207,535,443 | 0 | 0 | Apache-2.0 | 2019-09-10T10:58:55 | 2019-09-10T10:58:55 | null | UTF-8 | Python | false | false | 694 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .client import CarrierConstantServiceClient
__all__ = ("CarrierConstantServiceClient",)
| [
"[email protected]"
]
| |
f692be9989677461e8cb5d3829593c3c761017f4 | bd72c02af0bbd8e3fc0d0b131e3fb9a2aaa93e75 | /Design/implement_stack_using_queues.py | 90c0e731a480e74cfb57531edf2b51ea640c72b6 | []
| no_license | harvi7/Leetcode-Problems-Python | d3a5e8898aceb11abc4cae12e1da50061c1d352c | 73adc00f6853e821592c68f5dddf0a823cce5d87 | refs/heads/master | 2023-05-11T09:03:03.181590 | 2023-04-29T22:03:41 | 2023-04-29T22:03:41 | 222,657,838 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 866 | py | from queue import Queue
class MyStack:
def __init__(self):
"""
Initialize your data structure here.
"""
self.stack = Queue()
def push(self, x: int) -> None:
"""
Push element x onto stack.
"""
self.stack.put(x);
i = 1;
while i < self.stack.qsize():
i += 1
self.stack.put(self.stack.get())
def pop(self) -> int:
"""
Removes the element on top of the stack and returns that element.
"""
return self.stack.get()
def top(self) -> int:
"""
Get the top element.
"""
top = self.stack.get()
self.push(top)
return top
def empty(self) -> bool:
"""
Returns whether the stack is empty.
"""
return self.stack.empty() | [
"[email protected]"
]
| |
9fb7ec8bf614b85657848553a6966bddee75bccb | 795df757ef84073c3adaf552d5f4b79fcb111bad | /r8lib/r83col_print_part.py | 457c5bdb95cdf27c187fd4287858d3c0b6468785 | []
| no_license | tnakaicode/jburkardt-python | 02cb2f9ba817abf158fc93203eb17bf1cb3a5008 | 1a63f7664e47d6b81c07f2261b44f472adc4274d | refs/heads/master | 2022-05-21T04:41:37.611658 | 2022-04-09T03:31:00 | 2022-04-09T03:31:00 | 243,854,197 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,243 | py | #! /usr/bin/env python
#
def r83col_print_part ( n, a, max_print, title ):
#*****************************************************************************80
#
## R83COL_PRINT_PART prints "part" of an R83COL.
#
# Discussion:
#
# An R83COL is a (3,N) array of R8's.
#
# The user specifies MAX_PRINT, the maximum number of lines to print.
#
# If N, the size of the vector, is no more than MAX_PRINT, then
# the entire vector is printed, one entry per line.
#
# Otherwise, if possible, the first MAX_PRINT-2 entries are printed,
# followed by a line of periods suggesting an omission,
# and the last entry.
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 11 April 2015
#
# Author:
#
# John Burkardt
#
# Parameters:
#
# Input, integer N, the number of entries of the vector.
#
# Input, real A(N,3), the vector to be printed.
#
# Input, integer MAX_PRINT, the maximum number of lines
# to print.
#
# Input, string TITLE, a title.
#
if ( 0 < max_print ):
if ( 0 < n ):
if ( 0 < len ( title ) ):
print ( '' )
print ( title )
print ( '' )
if ( n <= max_print ):
for i in range ( 0, n ):
print ( ' %4d %14g %14g %14g' % ( i, a[i,0], a[i,1], a[i,2] ) )
elif ( 3 <= max_print ):
for i in range ( 0, max_print - 2 ):
print ( ' %4d %14g %14g %14g' % ( i, a[i,0], a[i,1], a[i,2] ) )
print ( ' .... .............. .............. ..............' )
i = n - 1
print ( ' %4d %14g %14g %14g' % ( i, a[i,0], a[i,1], a[i,2] ) )
else:
for i in range ( 0, max_print - 1 ):
print ( ' %4d %14g %14g %14g' % ( i, a[i,0], a[i,1], a[i,2] ) )
i = max_print - 1
print ( ' %4d %14g %14g %14g ...more entries...' \
% ( i, a[i,0], a[i,1], a[i,2] ) )
return
def r83col_print_part_test ( ):
#*****************************************************************************80
#
## R83COL_PRINT_PART_TEST tests R83COL_PRINT_PART.
#
# Licensing:
#
# This code is distributed under the GNU LGPL license.
#
# Modified:
#
# 11 April 2015
#
# Author:
#
# John Burkardt
#
import numpy as np
import platform
print ( '' )
print ( 'R83COL_PRINT_PART_TEST' )
print ( ' Python version: %s' % ( platform.python_version ( ) ) )
print ( ' R83COL_PRINT_PART prints part of an R83COL.' )
n = 10
v = np.array ( [ \
[ 11, 12, 13 ], \
[ 21, 22, 23 ], \
[ 31, 32, 33 ], \
[ 41, 42, 43 ], \
[ 51, 52, 53 ], \
[ 61, 62, 63 ], \
[ 71, 72, 73 ], \
[ 81, 82, 83 ], \
[ 91, 92, 93 ], \
[ 101, 102, 103 ] ] )
max_print = 2
r83col_print_part ( n, v, max_print, ' Output with MAX_PRINT = 2' )
max_print = 5
r83col_print_part ( n, v, max_print, ' Output with MAX_PRINT = 5' )
max_print = 25
r83col_print_part ( n, v, max_print, ' Output with MAX_PRINT = 25' )
#
# Terminate.
#
print ( '' )
print ( 'R83COL_PRINT_PART_TEST:' )
print ( ' Normal end of execution.' )
return
if ( __name__ == '__main__' ):
from timestamp import timestamp
timestamp ( )
r83col_print_part_test ( )
timestamp ( )
| [
"[email protected]"
]
| |
7dda127c3c00949baafaf34b60be20df495fd4e2 | 2f4f6efd1963aa4e8e749f17b078720c437ae9ac | /time_process.py | b6100d34aff621e09ccb32022c48bbdd9b09ece0 | []
| no_license | FlashRepo/Flash-Storm | d248f4ed615096539048be1ec65f1fcb31f1d2ee | 2dd953350a5d690c409996512dec30196d8e9199 | refs/heads/master | 2021-07-15T02:58:36.421446 | 2017-10-23T07:03:47 | 2017-10-23T07:03:47 | 107,943,717 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | import pickle
import numpy as np
dict = pickle.load(open('time.p'))
problems = sorted(dict.keys())
print ', '.join(['Problem', 'MOEAD', 'NSGAII', 'SPEA2'])
for problem in problems:
print problem,
algorithms = sorted(dict[problem].keys())
# print algorithms
# print algorithms
for algorithm in algorithms:
print round(np.mean(dict[problem][algorithm]), 3),
print | [
"[email protected]"
]
| |
e329f90cf89024a973e4360bf56a43969742fe39 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5658571765186560_1/Python/jfguo/main.py | 5cdb0cbc380443d34ead8f2010a9a0172066645e | []
| no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 999 | py |
import os
import sys
import glob
import subprocess
import random
import fileinput
next_line = 0
lines = [line.strip() for line in fileinput.input()]
def get_line():
global next_line
i = next_line
next_line += 1
return lines[i]
def calc():
s = get_line().split(' ')
X = int(s[0])
R = int(s[1])
C = int(s[2])
if R > C:
R, C = C, R
if R*C % X != 0:
return 'RICHARD'
if R < (X + 1)/2:
return 'RICHARD'
if X == 1:
return 'GABRIEL'
if X == 2:
return 'GABRIEL'
if X == 3:
return 'GABRIEL'
if X == 4:
if R == 2:
return 'RICHARD'
if R == 3:
return 'GABRIEL'
if R == 4:
return 'GABRIEL'
if X >= 7:
return 'RICHARD'
if R >= (X + 1)/2 + 1:
return 'GABRIEL'
if R*C <= 2*X:
return 'RICHARD'
return 'GABRIEL'
T = int(get_line())
for i in range(1, T + 1):
print('Case #%d: %s' % (i, calc()))
| [
"[email protected]"
]
| |
82660f2d91527c4a4c020cc6bd64fd6a5a183eed | be6e6d8af85adf044bf79676b7276c252407e010 | /spec/python/test_params_call_extra_parens.py | 5035e7de4b9cff4c0b219268de38ae4498a8fa5d | [
"MIT"
]
| permissive | kaitai-io/kaitai_struct_tests | 516e864d29d1eccc5fe0360d1b111af7a5d3ad2b | 3d8a6c00c6bac81ac26cf1a87ca84ec54bf1078d | refs/heads/master | 2023-08-19T19:42:47.281953 | 2023-08-04T20:26:50 | 2023-08-04T20:26:50 | 52,155,797 | 12 | 41 | MIT | 2023-07-30T23:30:30 | 2016-02-20T13:55:39 | Ruby | UTF-8 | Python | false | false | 378 | py | # Autogenerated from KST: please remove this line if doing any edits by hand!
import unittest
from params_call_extra_parens import ParamsCallExtraParens
class TestParamsCallExtraParens(unittest.TestCase):
def test_params_call_extra_parens(self):
with ParamsCallExtraParens.from_file('src/term_strz.bin') as r:
self.assertEqual(r.buf1.body, u"foo|b")
| [
"[email protected]"
]
| |
9e4a72cfaa5d511cbc35bccab33d6d759a585c40 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2976/49361/276731.py | c934f824ceb2d2636866731a7c096aa5606facba | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 315 | py | import re
lines = []
while True:
try:
lines.append(input())
except:
break
outputStr = ""
for index in range(1, len(lines)):
afterStr = re.sub(lines[0].replace(" ", ""), "", lines[index].replace(" ", ""), flags=re.IGNORECASE)
outputStr += afterStr + "\n"
print(outputStr.strip("\n")) | [
"[email protected]"
]
| |
b22f31b6aadd19380527790ab5aa79ebdec743c4 | 633b695a03e789f6aa644c7bec7280367a9252a8 | /samplepy/3-21_nest_example.py | 127cdbb495205706506f0903ba1a7d345e90446b | []
| no_license | tnakaicode/PlotGallery | 3d831d3245a4a51e87f48bd2053b5ef82cf66b87 | 5c01e5d6e2425dbd17593cb5ecc973982f491732 | refs/heads/master | 2023-08-16T22:54:38.416509 | 2023-08-03T04:23:21 | 2023-08-03T04:23:21 | 238,610,688 | 5 | 2 | null | null | null | null | UTF-8 | Python | false | false | 94 | py | for a in range(1, 4):
print('a=', a)
for b in range(1, 4):
print(' b=', b)
| [
"[email protected]"
]
| |
3f04bfd5dc6f5a148c9f8957f359a211e6e91bd0 | 0e834094f5e4274b279939b81caedec7d8ef2c73 | /m2/d05/fork.py | 0f760582ce1b6de41ccd05f7672b9e7300500889 | []
| no_license | SpringSnowB/All-file | b74eaebe1d54e1410945eaca62c70277a01ef0bf | 03485c60e7c07352aee621df94455da3d466b872 | refs/heads/master | 2020-11-27T23:54:36.984555 | 2020-01-21T08:42:21 | 2020-01-21T08:42:21 | 229,651,737 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | import os
print("-----------------")
a = 1
pid = os.fork()#子进程从此处开始执行
if pid < 0:
print("fail")
elif pid ==0:
print("child a=",a)#1
a = 10000
else:
print("parent a=",a) #1
print("over a=",a) #子进程10000,父进程1
| [
"[email protected]"
]
| |
121d893f28582fff0641fac5f6b9adc78fb4fd4b | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/ALCATEL-IND1-VLAN-STACKING-MIB.py | e69667e7960c0ccd5dbafcc31d4d1f6bbefc2552 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 17,347 | py | #
# PySNMP MIB module ALCATEL-IND1-VLAN-STACKING-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ALCATEL-IND1-VLAN-STACKING-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:20:26 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
softentIND1VlanStackingMgt, = mibBuilder.importSymbols("ALCATEL-IND1-BASE", "softentIND1VlanStackingMgt")
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion", "SingleValueConstraint")
InterfaceIndex, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
Integer32, NotificationType, Unsigned32, TimeTicks, IpAddress, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, iso, Counter64, ModuleIdentity, Counter32, Gauge32, Bits, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "NotificationType", "Unsigned32", "TimeTicks", "IpAddress", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "Counter64", "ModuleIdentity", "Counter32", "Gauge32", "Bits", "MibIdentifier")
RowStatus, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "TextualConvention", "DisplayString")
alcatelIND1VLANStackingMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1))
if mibBuilder.loadTexts: alcatelIND1VLANStackingMIB.setLastUpdated('200704030000Z')
if mibBuilder.loadTexts: alcatelIND1VLANStackingMIB.setOrganization('Alcatel-Lucent')
if mibBuilder.loadTexts: alcatelIND1VLANStackingMIB.setContactInfo('Please consult with Customer Service to ensure the most appropriate version of this document is used with the products in question: Alcatel-Lucent, Enterprise Solutions Division (Formerly Alcatel Internetworking, Incorporated) 26801 West Agoura Road Agoura Hills, CA 91301-5122 United States Of America Telephone: North America +1 800 995 2696 Latin America +1 877 919 9526 Europe +31 23 556 0100 Asia +65 394 7933 All Other +1 818 878 4507 Electronic Mail: [email protected] World Wide Web: http://alcatel-lucent.com/wps/portal/enterprise File Transfer Protocol: ftp://ftp.ind.alcatel.com/pub/products/mibs')
if mibBuilder.loadTexts: alcatelIND1VLANStackingMIB.setDescription('The parameters for configuration of the VLAN Stacking feature, including the association between ports and svlans. The right to make changes in specification and other information contained in this document without prior notice is reserved. No liability shall be assumed for any incidental, indirect, special, or consequential damages whatsoever arising from or related to this document or the information contained herein. Vendors, end-users, and other interested parties are granted non-exclusive license to use this specification in connection with management of the products for which it is intended to be used. Copyright (C) 1995-2006 Alcatel-Lucent ALL RIGHTS RESERVED WORLDWIDE')
alcatelIND1VLANStackingMIBObjects = ObjectIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1))
if mibBuilder.loadTexts: alcatelIND1VLANStackingMIBObjects.setStatus('current')
if mibBuilder.loadTexts: alcatelIND1VLANStackingMIBObjects.setDescription('Branch For VLAN Stacking Managed Objects.')
alcatelIND1VLANStackingMIBConformance = ObjectIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 2))
if mibBuilder.loadTexts: alcatelIND1VLANStackingMIBConformance.setStatus('current')
if mibBuilder.loadTexts: alcatelIND1VLANStackingMIBConformance.setDescription('Branch For VLAN Stacking Conformance Information.')
alcatelIND1VLANStackingMIBGroups = ObjectIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 2, 1))
if mibBuilder.loadTexts: alcatelIND1VLANStackingMIBGroups.setStatus('current')
if mibBuilder.loadTexts: alcatelIND1VLANStackingMIBGroups.setDescription('Branch For VLAN Stacking Units Of Conformance.')
alcatelIND1VLANStackingMIBCompliances = ObjectIdentity((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 2, 2))
if mibBuilder.loadTexts: alcatelIND1VLANStackingMIBCompliances.setStatus('current')
if mibBuilder.loadTexts: alcatelIND1VLANStackingMIBCompliances.setDescription('Branch For VLAN Stacking Compliance Statements.')
alaVlanStackingPort = MibIdentifier((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 1))
alaVstkPortTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 1, 1), )
if mibBuilder.loadTexts: alaVstkPortTable.setStatus('current')
if mibBuilder.loadTexts: alaVstkPortTable.setDescription('A table that contains port-specific information for the VLAN Stacking feature. An entry in this table is created when a port is configured with VLAN stacking capability, OR when a port is configured with a specific vendor ethertype, a particular bridge protocol action.')
alaVstkPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 1, 1, 1), ).setIndexNames((0, "ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkPortNumber"))
if mibBuilder.loadTexts: alaVstkPortEntry.setStatus('current')
if mibBuilder.loadTexts: alaVstkPortEntry.setDescription('A VLAN Stacking port entry.')
alaVstkPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 1, 1, 1, 1), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alaVstkPortNumber.setStatus('current')
if mibBuilder.loadTexts: alaVstkPortNumber.setDescription('The port ifindex of the port for which this entry contains VLAN Stacking management information. ')
alaVstkPortType = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("userCustomer", 1), ("userProvider", 2), ("network", 3))).clone('userCustomer')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alaVstkPortType.setStatus('current')
if mibBuilder.loadTexts: alaVstkPortType.setDescription('The type of this VLAN Stacking port. User-customer (1) is a VLAN Stacking user port connected to customer network. User-provider (2) is a VLAN Stacking user port used to run provider management traffic. Network (2) indicates a network facing port.')
alaVstkPortVendorTpid = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 1, 1, 1, 3), Integer32().clone(34984)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alaVstkPortVendorTpid.setStatus('current')
if mibBuilder.loadTexts: alaVstkPortVendorTpid.setDescription('The TPID for this port. It is used for the incoming data traffic parsing and it is substituted to the 802.1Q standard Tpid for the outgoing data traffic. This is used for compatibility with other vendor equipment. The default value is the standard value 0x88a8.')
alaVstkPortBpduTreatment = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("flooded", 1), ("dropped", 2))).clone('flooded')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alaVstkPortBpduTreatment.setStatus('current')
if mibBuilder.loadTexts: alaVstkPortBpduTreatment.setDescription("The customer bpdu treatment for this port. It defines the type of processing applied to the user's bridge protocol data unit. The bridge protocol treatment (flooded) floods any user's bridge protocol data unit to all user ports and network ports on the same SVLAN. The bridge protocol (dropped) drops any user's bridge protocol data unit.")
alaVstkPortAcceptFrameType = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("tagged", 1), ("untagged", 2), ("all", 3))).clone('all')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alaVstkPortAcceptFrameType.setStatus('current')
if mibBuilder.loadTexts: alaVstkPortAcceptFrameType.setDescription('The acceptable frame types on this port.')
alaVstkPortLookupMiss = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("drop", 1), ("default", 2))).clone('default')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alaVstkPortLookupMiss.setStatus('current')
if mibBuilder.loadTexts: alaVstkPortLookupMiss.setDescription('Treatment of tagged packets upon vlan lookup miss. Drop (1) means that on lookup miss the packets will be dropped. Default (2) means that on lookup miss the default SVLAN for that port will be used to tunnel the packets. This is significant only for user port.')
alaVstkPortDefaultSvlan = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alaVstkPortDefaultSvlan.setStatus('current')
if mibBuilder.loadTexts: alaVstkPortDefaultSvlan.setDescription('The default svlan of this port.')
alaVstkPortRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 1, 1, 1, 8), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alaVstkPortRowStatus.setStatus('current')
if mibBuilder.loadTexts: alaVstkPortRowStatus.setDescription('The status of this table entry. The supported value supported for set are createAndGo (4) and destroy(6), to create or remove a vlan-stacking port.')
alaVstkPortLegacyStpBpdu = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2))).clone('disable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alaVstkPortLegacyStpBpdu.setStatus('current')
if mibBuilder.loadTexts: alaVstkPortLegacyStpBpdu.setDescription('The legacy STP BPDU treatment for this port. It defines the type of processing applied to STP legacy BPDUs on network ports. Legacy BPDU refer to conventional/customer BPDUs with MAC address 01:80:c2:00:00:00 and its processing on network ports can be enabled/disabled by this object.By default the value is disabled i.e provider MAC BPDU with MAC address 01:80:c2:00:00:08 would be processed at network ports.')
alaVlanStackingSvlanPort = MibIdentifier((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 2))
alaVstkSvlanPortTable = MibTable((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 2, 1), )
if mibBuilder.loadTexts: alaVstkSvlanPortTable.setStatus('current')
if mibBuilder.loadTexts: alaVstkSvlanPortTable.setDescription('A table that contains svlan/ipmvlan-port association for the VLAN Stacking feature.')
alaVstkSvlanPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 2, 1, 1), ).setIndexNames((0, "ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkSvlanPortSvlanNumber"), (0, "ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkSvlanPortPortNumber"), (0, "ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkSvlanPortCvlanNumber"))
if mibBuilder.loadTexts: alaVstkSvlanPortEntry.setStatus('current')
if mibBuilder.loadTexts: alaVstkSvlanPortEntry.setDescription('The svlan/ipmvlan-port association.')
alaVstkSvlanPortSvlanNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: alaVstkSvlanPortSvlanNumber.setStatus('current')
if mibBuilder.loadTexts: alaVstkSvlanPortSvlanNumber.setDescription('Number identifying the svlan/ipmvlan.')
alaVstkSvlanPortPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 2, 1, 1, 2), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alaVstkSvlanPortPortNumber.setStatus('current')
if mibBuilder.loadTexts: alaVstkSvlanPortPortNumber.setDescription('The port ifindex of the port associated to the svlan/ipmvlan.')
alaVstkSvlanPortCvlanNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: alaVstkSvlanPortCvlanNumber.setStatus('current')
if mibBuilder.loadTexts: alaVstkSvlanPortCvlanNumber.setDescription('The customer vlan id associated to the svlan/ipmvlan.')
alaVstkSvlanPortMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("doubleTag", 1), ("translate", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alaVstkSvlanPortMode.setStatus('current')
if mibBuilder.loadTexts: alaVstkSvlanPortMode.setDescription('The vlan stacking mode: double tagging (1) or vlan translation/mapping (2). Only translation mode is valid in case of IPM Vlans')
alaVstkSvlanPortRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 1, 2, 1, 1, 5), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alaVstkSvlanPortRowStatus.setStatus('current')
if mibBuilder.loadTexts: alaVstkSvlanPortRowStatus.setDescription('The status of this table entry. The supported value for set are createAndGo (4) and destroy(6), to add or remove an svlan-port association.')
alcatelIND1VLANStackingMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 2, 2, 1)).setObjects(("ALCATEL-IND1-VLAN-STACKING-MIB", "vlanStackingPortGroup"), ("ALCATEL-IND1-VLAN-STACKING-MIB", "vlanStackingSvlanPortGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
alcatelIND1VLANStackingMIBCompliance = alcatelIND1VLANStackingMIBCompliance.setStatus('current')
if mibBuilder.loadTexts: alcatelIND1VLANStackingMIBCompliance.setDescription('Compliance statement for VLAN Stacking.')
vlanStackingPortGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 2, 1, 1)).setObjects(("ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkPortNumber"), ("ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkPortType"), ("ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkPortVendorTpid"), ("ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkPortBpduTreatment"), ("ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkPortAcceptFrameType"), ("ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkPortLookupMiss"), ("ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkPortDefaultSvlan"), ("ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkPortRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
vlanStackingPortGroup = vlanStackingPortGroup.setStatus('current')
if mibBuilder.loadTexts: vlanStackingPortGroup.setDescription('Collection of objects for management of VLAN Stacking Ports.')
vlanStackingSvlanPortGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 6486, 801, 1, 2, 1, 37, 1, 2, 1, 2)).setObjects(("ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkSvlanPortSvlanNumber"), ("ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkSvlanPortPortNumber"), ("ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkSvlanPortCvlanNumber"), ("ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkSvlanPortMode"), ("ALCATEL-IND1-VLAN-STACKING-MIB", "alaVstkSvlanPortRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
vlanStackingSvlanPortGroup = vlanStackingSvlanPortGroup.setStatus('current')
if mibBuilder.loadTexts: vlanStackingSvlanPortGroup.setDescription('Collection of objects for svlan-port associations.')
mibBuilder.exportSymbols("ALCATEL-IND1-VLAN-STACKING-MIB", vlanStackingPortGroup=vlanStackingPortGroup, alaVstkSvlanPortSvlanNumber=alaVstkSvlanPortSvlanNumber, alcatelIND1VLANStackingMIBGroups=alcatelIND1VLANStackingMIBGroups, alcatelIND1VLANStackingMIBCompliance=alcatelIND1VLANStackingMIBCompliance, PYSNMP_MODULE_ID=alcatelIND1VLANStackingMIB, alaVstkPortBpduTreatment=alaVstkPortBpduTreatment, alaVstkPortVendorTpid=alaVstkPortVendorTpid, alaVstkSvlanPortRowStatus=alaVstkSvlanPortRowStatus, vlanStackingSvlanPortGroup=vlanStackingSvlanPortGroup, alaVstkPortRowStatus=alaVstkPortRowStatus, alaVstkPortEntry=alaVstkPortEntry, alaVstkSvlanPortEntry=alaVstkSvlanPortEntry, alaVstkPortType=alaVstkPortType, alcatelIND1VLANStackingMIB=alcatelIND1VLANStackingMIB, alaVstkSvlanPortTable=alaVstkSvlanPortTable, alaVstkSvlanPortMode=alaVstkSvlanPortMode, alaVlanStackingPort=alaVlanStackingPort, alaVstkPortLookupMiss=alaVstkPortLookupMiss, alaVstkPortTable=alaVstkPortTable, alaVstkPortDefaultSvlan=alaVstkPortDefaultSvlan, alaVstkSvlanPortPortNumber=alaVstkSvlanPortPortNumber, alaVlanStackingSvlanPort=alaVlanStackingSvlanPort, alaVstkSvlanPortCvlanNumber=alaVstkSvlanPortCvlanNumber, alcatelIND1VLANStackingMIBConformance=alcatelIND1VLANStackingMIBConformance, alcatelIND1VLANStackingMIBCompliances=alcatelIND1VLANStackingMIBCompliances, alaVstkPortAcceptFrameType=alaVstkPortAcceptFrameType, alaVstkPortLegacyStpBpdu=alaVstkPortLegacyStpBpdu, alcatelIND1VLANStackingMIBObjects=alcatelIND1VLANStackingMIBObjects, alaVstkPortNumber=alaVstkPortNumber)
| [
"[email protected]"
]
| |
e36502d092eb4d4f11257c8b70b8846bfef0a973 | a46d135ba8fd7bd40f0b7d7a96c72be446025719 | /packages/python/plotly/plotly/validators/histogram/marker/colorbar/_yanchor.py | 5d929c6570d7a6ea82df360ef7b79e5066a0b291 | [
"MIT"
]
| permissive | hugovk/plotly.py | 5e763fe96f225d964c4fcd1dea79dbefa50b4692 | cfad7862594b35965c0e000813bd7805e8494a5b | refs/heads/master | 2022-05-10T12:17:38.797994 | 2021-12-21T03:49:19 | 2021-12-21T03:49:19 | 234,146,634 | 0 | 0 | MIT | 2020-01-15T18:33:43 | 2020-01-15T18:33:41 | null | UTF-8 | Python | false | false | 507 | py | import _plotly_utils.basevalidators
class YanchorValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="yanchor", parent_name="histogram.marker.colorbar", **kwargs
):
super(YanchorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
values=kwargs.pop("values", ["top", "middle", "bottom"]),
**kwargs
)
| [
"[email protected]"
]
| |
a2b750917d590f2cad5dfbac283319542556e13c | fd717fe6ca74f6d77210cdd57a8c365d27c5bfc6 | /pychron/experiment/utilities/mass_spec_utilities.py | e439853d44c9878143faa78ab7e9391af8a64b9f | [
"Apache-2.0"
]
| permissive | stephen-e-cox/pychron | 1dea0467d904d24c8a3dd22e5b720fbccec5c0ed | 681d5bfe2c13e514859479369c2bb20bdf5c19cb | refs/heads/master | 2021-01-19T15:40:03.663863 | 2016-07-14T14:37:16 | 2016-07-14T14:37:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,294 | py | # ===============================================================================
# Copyright 2016 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
import os
# ============= standard library imports ========================
# ============= local library imports ==========================
os.environ['MassSpecDBVersion'] = '16'
from pychron.mass_spec.database.massspec_database_adapter import MassSpecDatabaseAdapter
from pychron.mass_spec.database.massspec_orm import AnalysesTable, IsotopeTable, DetectorTable
db = MassSpecDatabaseAdapter(bind=False)
db.host = '129.138.12.160'
db.name = 'massspecdata'
db.username = 'jross'
db.password = 'Jross40*39'
db.kind = 'mysql'
db.connect(test=False)
def fix_reference_detector(rd, aid):
with db.session_ctx() as sess:
q = sess.query(AnalysesTable)
q = q.filter(AnalysesTable.AnalysisID == aid)
record = q.one()
q = sess.query(DetectorTable)
q = q.join(IsotopeTable)
q = q.join(AnalysesTable)
q = q.filter(AnalysesTable.AnalysisID == aid)
for r in q.all():
if r.Label == rd:
print 'setting refid current={} new={}'.format(record.RefDetID, r.DetectorID)
record.RefDetID = r.DetectorID
def fix_reference_detectors(path):
with open(path) as rfile:
for line in rfile:
line = line.strip()
if line:
aid = int(line)
fix_reference_detector('H2', aid)
# break
path = '/Users/ross/Desktop/Untitled.csv'
fix_reference_detectors(path)
# ============= EOF =============================================
| [
"[email protected]"
]
| |
351ffee98beaac269739f12103d72257fded664c | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r9/Gen/DecFiles/options/12267141.py | fde8c5e02df48c536fc9078ec662b3e30932602c | []
| no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,212 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r9/Gen/DecFiles/options/12267141.py generated: Fri, 27 Mar 2015 16:10:10
#
# Event Type: 12267141
#
# ASCII decay Descriptor: [B+ -> (D~0 -> (KS0 -> pi+ pi-) K+ K-) K+ pi- pi+]cc
#
from Configurables import Generation
Generation().EventType = 12267141
Generation().SampleGenerationTool = "SignalRepeatedHadronization"
from Configurables import SignalRepeatedHadronization
Generation().addTool( SignalRepeatedHadronization )
Generation().SignalRepeatedHadronization.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Bu_D0Kpipi,KSKK=addResTuned,TightCut,PHSP.dec"
Generation().SignalRepeatedHadronization.CutTool = "LoKi::GenCutTool/TightCut"
Generation().SignalRepeatedHadronization.SignalPIDList = [ 521,-521 ]
#
from Configurables import LoKi__GenCutTool
from Gauss.Configuration import *
Generation().SignalRepeatedHadronization.addTool ( LoKi__GenCutTool , 'TightCut' )
tightCut = Generation().SignalRepeatedHadronization.TightCut
tightCut.Decay = '^[B+ ==> ^(D~0 => ^(KS0 ==> ^pi+ ^pi-) ^K+ ^K-) ^K+ ^pi- ^pi+]CC'
tightCut.Preambulo += [
'GVZ = LoKi.GenVertices.PositionZ() ' ,
'from GaudiKernel.SystemOfUnits import millimeter',
'inAcc = (in_range (0.005, GTHETA, 0.400))',
'goodB = (GP > 55000 * MeV) & (GPT > 5000 * MeV) & (GTIME > 0.135 * millimeter)',
'goodD = (GP > 25000 * MeV) & (GPT > 2500 * MeV)',
'goodKS = (GFAEVX(abs(GVZ), 0) < 2500.0 * millimeter)',
'goodDDaugPi = (GNINTREE ((("K+" == GABSID) | ("pi+" == GABSID)) & (GP > 2000 * MeV) & inAcc, 4) > 3.5)',
'goodKsDaugPi = (GNINTREE (("pi+" == GABSID) & (GP > 2000 * MeV) & inAcc, 4) > 1.5)',
'goodBachKPia = (GNINTREE ((("K+" == GABSID) | ("pi+" == GABSID)) & (GP > 2000 * MeV) & (GPT > 100 * MeV) & inAcc, 4) > 4.5)',
'goodBachKPib = (GNINTREE ((("K+" == GABSID) | ("pi+" == GABSID)) & (GP > 2000 * MeV) & (GPT > 300 * MeV) & inAcc, 4) > 1.5)',
]
tightCut.Cuts = {
'[B+]cc' : 'goodB & goodBachKPia & goodBachKPib',
'[D0]cc' : 'goodD & goodDDaugPi',
'[KS0]cc' : 'goodKS & goodKsDaugPi',
'[pi+]cc' : 'inAcc'
}
# Ad-hoc particle gun code
from Configurables import ParticleGun
pgun = ParticleGun("ParticleGun")
pgun.SignalPdgCode = 521
pgun.DecayTool = "EvtGenDecay"
pgun.GenCutTool = "DaughtersInLHCb"
from Configurables import FlatNParticles
pgun.NumberOfParticlesTool = "FlatNParticles"
pgun.addTool( FlatNParticles , name = "FlatNParticles" )
from Configurables import MomentumSpectrum
pgun.ParticleGunTool = "MomentumSpectrum"
pgun.addTool( MomentumSpectrum , name = "MomentumSpectrum" )
pgun.MomentumSpectrum.PdgCodes = [ 521,-521 ]
pgun.MomentumSpectrum.InputFile = "$PGUNSDATAROOT/data/Ebeam4000GeV/MomentumSpectrum_521.root"
pgun.MomentumSpectrum.BinningVariables = "pteta"
pgun.MomentumSpectrum.HistogramPath = "h_pteta"
from Configurables import BeamSpotSmearVertex
pgun.addTool(BeamSpotSmearVertex, name="BeamSpotSmearVertex")
pgun.VertexSmearingTool = "BeamSpotSmearVertex"
pgun.EventType = 12267141
| [
"[email protected]"
]
| |
91f2f0326c39a0175aa8510fe1f285d347efdc54 | fb383c3550cdcb1514df17a2e5d87b453240e4a5 | /baekjoon/13706.py | ee0b9f933b5e253d683e4f1962ab3f59ba8fa62b | []
| no_license | mingxoxo/Algorithm | 5ba84afebe1b0125d43011403610619804c107be | dc352f591c6c8ed84f7dbbeb37a2df4178270605 | refs/heads/master | 2023-07-30T17:38:00.190966 | 2023-07-30T08:51:23 | 2023-07-30T08:51:23 | 205,862,682 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 381 | py | # 제곱근
# 23.05.11
# https://www.acmicpc.net/problem/13706
def square_root(n: int) -> int:
start, end = 1, n // 2
while start <= end:
mid = (start + end) // 2
if mid * mid == n:
return mid
elif mid * mid < n:
start = mid + 1
else:
end = mid - 1
return 1
N = int(input())
print(square_root(N))
| [
"[email protected]"
]
| |
ea57c7126e4dac9e0f5ce7e56a0f128fa3a91ef5 | a0dfeb01fd15550961b7e15c504327ea37ce4dea | /home_and_login/migrations/0001_initial.py | 0436e1fa98f1ffce003621db14932c8e78e495f9 | []
| no_license | theparadoxer02/Colossus | 34c4d99f7d14caa0c464036d25b776dde31c4ec0 | 7d95024acea42b46b598923aef80080cd7890fa2 | refs/heads/master | 2021-01-19T22:59:25.057466 | 2017-09-25T18:03:37 | 2017-09-25T18:03:37 | 88,902,426 | 2 | 1 | null | 2020-07-11T12:45:14 | 2017-04-20T19:20:42 | CSS | UTF-8 | Python | false | false | 1,320 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-26 19:50
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='user_details',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('full_name', models.CharField(max_length=50)),
('profile_link', models.URLField(default='/welcome_user')),
('dob', models.DateField(null=True)),
('intro', models.CharField(max_length=200, null=True)),
('photo_link', models.URLField(default='/static/sitewide/anonymous-male.png')),
('followers_total', models.IntegerField(default=0)),
('following_total', models.IntegerField(default=0)),
('projects_total', models.IntegerField(default=0)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
]
| |
ffd724c24a3e8a69f6d6d092e453a4544c9d2d41 | 311b08b547e51907fe88e177817f10e5754dedbe | /tests/test_module.py | 6e800f1af7fe1dbdbb9dbd206ee1e3478bbd22a4 | [
"BSD-3-Clause"
]
| permissive | kernc/dill | a17187545ffd6fa3c410ae4763579744f8238326 | 11effa42b8c486b2139125d094988f6f68595b3c | refs/heads/master | 2020-07-05T14:11:30.681507 | 2016-11-16T12:27:42 | 2016-11-16T12:27:42 | 74,115,619 | 0 | 0 | null | 2016-11-18T09:34:14 | 2016-11-18T09:34:14 | null | UTF-8 | Python | false | false | 1,620 | py | #!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 2008-2016 California Institute of Technology.
# License: 3-clause BSD. The full license text is available at:
# - http://trac.mystic.cacr.caltech.edu/project/pathos/browser/dill/LICENSE
import sys
import dill
import test_mixins as module
try: from imp import reload
except ImportError: pass
dill.settings['recurse'] = True
cached = (module.__cached__ if hasattr(module, "__cached__")
else module.__file__.split(".", 1)[0] + ".pyc")
module.a = 1234
pik_mod = dill.dumps(module)
module.a = 0
# remove module
del sys.modules[module.__name__]
del module
module = dill.loads(pik_mod)
assert hasattr(module, "a") and module.a == 1234
assert module.double_add(1, 2, 3) == 2 * module.fx
# Restart, and test use_diff
reload(module)
try:
dill.use_diff()
module.a = 1234
pik_mod = dill.dumps(module)
module.a = 0
# remove module
del sys.modules[module.__name__]
del module
module = dill.loads(pik_mod)
assert hasattr(module, "a") and module.a == 1234
assert module.double_add(1, 2, 3) == 2 * module.fx
except AttributeError:
pass
# clean up
import os
os.remove(cached)
pycache = os.path.join(os.path.dirname(module.__file__), "__pycache__")
if os.path.exists(pycache) and not os.listdir(pycache):
os.removedirs(pycache)
# test when module is None
import math
def get_lambda(str, **kwarg):
return eval(str, kwarg, None)
obj = get_lambda('lambda x: math.exp(x)', math=math)
assert obj.__module__ is None
assert dill.copy(obj)(3) == obj(3)
# EOF
| [
"mmckerns@8bfda07e-5b16-0410-ab1d-fd04ec2748df"
]
| mmckerns@8bfda07e-5b16-0410-ab1d-fd04ec2748df |
236514cc0ec78312be28461c4e33da0ed917ada1 | b06bceb8fdc24e0c890fb2201c535cb660a94f86 | /onmt/legacy/old_models/memory_transformer.py | 0780b03908c7b58a6a7004d0cda272b70159d5a7 | [
"MIT"
]
| permissive | quanpn90/NMTGMinor | 7f294b40763b3f586d34ef4985799b851052f2ed | 5e1e424d0d9c2135a456e372a2ea9ee49de5bd2c | refs/heads/master | 2023-08-22T14:53:31.420276 | 2023-08-21T08:26:49 | 2023-08-21T08:26:49 | 116,663,163 | 92 | 39 | NOASSERTION | 2023-07-31T15:07:35 | 2018-01-08T10:33:56 | HTML | UTF-8 | Python | false | false | 32,849 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
from onmt.modules.relative_attention import RelPartialLearnableMultiHeadAttn
from onmt.models.transformer_layers import PositionalEncoding, PrePostProcessing
from onmt.models.transformer_layers import EncoderLayer, DecoderLayer
from onmt.models.transformers import TransformerEncoder, TransformerDecoder, TransformerDecodingState
import onmt
from onmt.modules.bottle import Bottle
from onmt.modules.dropout import embedded_dropout
from onmt.models.transformer_layers import XavierLinear, MultiHeadAttention, FeedForward, PrePostProcessing
from onmt.models.transformer_layers import EncoderLayer, DecoderLayer
from onmt.models.relative_transformer_layers import RelativeTransformerEncoderLayer, RelativeTransformerDecoderLayer
from onmt.legacy.old_models.unified_transformer import UnifiedTransformer
from onmt.models.relative_transformer import SinusoidalPositionalEmbedding, LearnablePostionEmbedding, \
StreamState, StreamDecodingState
from onmt.utils import flip, expected_length
from collections import defaultdict
import math
def seperate_tensor(input, lengths):
bsz, tgt_len = input.size(1), input.size(0)
assert (bsz == 1)
outputs = list()
# starting from the first position of the tensor
offset = 0
for length in lengths:
segment = input.narrow(0, offset, length)
offset += length
outputs.append(segment)
return outputs
class MemoryTransformerDecoderLayer(nn.Module):
def __init__(self, h, d_model, p, d_ff, attn_p=0.1, version=1.0, ignore_source=False,
variational=False, death_rate=0.0):
super(MemoryTransformerDecoderLayer, self).__init__()
self.version = version
self.ignore_source = ignore_source
self.variational = variational
self.death_rate = death_rate
self.preprocess_attn = PrePostProcessing(d_model, p, sequence='n')
self.postprocess_attn = PrePostProcessing(d_model, p, sequence='da', variational=self.variational)
self.preprocess_ffn = PrePostProcessing(d_model, p, sequence='n')
self.postprocess_ffn = PrePostProcessing(d_model, p, sequence='da', variational=self.variational)
d_head = d_model // h
self.multihead_tgt = RelPartialLearnableMultiHeadAttn(h, d_model, d_head, dropatt=attn_p)
if onmt.constants.activation_layer == 'linear_relu_linear':
ff_p = p
feedforward = FeedForward(d_model, d_ff, ff_p, variational=self.variational)
elif onmt.constants.activation_layer == 'maxout':
k = int(math.ceil(d_ff / d_model))
feedforward = MaxOut(d_model, d_model, k)
elif onmt.constants.activation_layer == 'linear_swish_linear':
ff_p = p
feedforward = FeedForwardSwish(d_model, d_ff, ff_p)
else:
raise NotImplementedError
self.feedforward = Bottle(feedforward)
def forward(self, input_, context, pos_emb, mask_tgt, mask_src, mems=None,
incremental=False, incremental_cache=None):
# incremental=False, incremental_cache=None, reuse_source=True):
""" Self attention layer with memory
layernorm > attn > dropout > residual
"""
assert context is None, "This model does not have an context encoder"
coin = True
if self.training and self.death_rate > 0:
coin = (torch.rand(1)[0].item() >= self.death_rate)
if coin:
# input and context should be time first ?
query = self.preprocess_attn(input_)
if mems is not None and mems.size(0) > 0:
mems = self.preprocess_attn(mems)
else:
mems = None
# out, _ = self.multihead_tgt(query, pos_emb, r_w_bias, r_r_bias, attn_mask=mask_tgt)
out, _, incremental_cache = self.multihead_tgt(query, pos_emb, attn_mask=mask_tgt,
incremental=incremental, incremental_cache=incremental_cache)
# rescaling before residual
if self.training and self.death_rate > 0:
out = out / (1 - self.death_rate)
input_ = self.postprocess_attn(out, input_)
""" Context Attention layer
layernorm > attn > dropout > residual
"""
coverage = None
""" Feed forward layer
layernorm > ffn > dropout > residual
"""
out = self.feedforward(self.preprocess_ffn(input_))
# rescaling before residual
if self.training and self.death_rate > 0:
out = out / (1 - self.death_rate)
input_ = self.postprocess_ffn(out, input_)
else:
coverage = None
if incremental:
return input_, coverage, incremental_cache
return input_, coverage
def step(self, input, context, pos_emb, mask_tgt, mask_src, buffer=None):
""" Self attention layer
layernorm > attn > dropout > residual
"""
query = self.preprocess_attn(input)
out, _, buffer = self.multihead_tgt(query, pos_emb, attn_mask=mask_tgt, buffer=buffer)
input = self.postprocess_attn(out, input)
""" Feed forward layer
layernorm > ffn > dropout > residual
"""
out = self.feedforward(self.preprocess_ffn(input))
input = self.postprocess_ffn(out, input)
return input, coverage, buffer
class MemoryTransformer(UnifiedTransformer):
"""
This class combines the encoder and the decoder into one single sequence
Joined attention between encoder and decoder parts
"""
def __init__(self, opt, src_embedding, tgt_embedding, generator, positional_encoder,
language_embeddings=None, encoder_type='text', **kwargs):
self.death_rate = opt.death_rate
self.bidirectional = opt.bidirectional
self.layer_modules = []
self.learnable_position_encoding = opt.learnable_position_encoding
self.max_memory_size = opt.max_memory_size
self.mem_len = self.max_memory_size
self.dictionary = kwargs.get('dictionary', None)
# build_modules will be called from the inherited constructor
super(MemoryTransformer, self).__init__(opt, tgt_embedding, src_embedding,
generator, positional_encoder,
language_embeddings=language_embeddings,
encoder_type=encoder_type)
self.src_embedding = src_embedding
self.tgt_embedding = tgt_embedding
# self.language_embedding = nn.Embedding(3, self.model_size, padding_idx=0)
self.generator = generator
self.ignore_source = True
self.encoder_type = opt.encoder_type
# learnable position encoding
if self.learnable_position_encoding:
self.max_pos_length = opt.max_pos_length
# pos_emb = self.model_size // self.n_heads
pos_emb = self.model_size
self.positional_encoder = LearnablePostionEmbedding(self.max_pos_length, pos_emb)
print("* Learnable position encoding with max %d positions" % self.max_pos_length)
else:
# or using pre-set sinusoidal
self.positional_encoder = SinusoidalPositionalEmbedding(opt.model_size)
# self.positional_encoder = SinusoidalPositionalEmbedding(opt.model_size)
self.d_head = self.model_size // self.n_heads
def gen_mask(self, src, tgt):
# generate the mask for the mini-batch data
# both src and tgt are T x B
input_seq = torch.cat([src, tgt], dim=0)
seq_len = input_seq.size(0)
if self.bidirectional:
bsz, src_len = src.size(1), src.size(0)
tgt_len = tgt.size(0)
tgt_tgt_mask = torch.triu(src.new_ones(tgt_len, tgt_len), diagonal=1)
tgt_src_mask = src.new_zeros(tgt_len, src_len)
tgt_mask = torch.cat([tgt_src_mask, tgt_tgt_mask], dim=-1)
src_src_mask = src.new_zeros(src_len, src_len)
src_tgt_mask = src.new_ones(src_len, tgt_len)
src_mask = torch.cat([src_src_mask, src_tgt_mask], dim=-1)
attn_mask = torch.cat([src_mask, tgt_mask], dim=0)
attn_mask = attn_mask.bool().unsqueeze(-1)
pad_mask = input_seq.eq(onmt.constants.PAD).unsqueeze(0)
attn_mask = attn_mask | pad_mask
else:
attn_mask = torch.triu(src.new_ones(seq_len, seq_len), diagonal=1).bool().unsqueeze(-1) # T x T x -1
pad_mask = input_seq.eq(onmt.constants.PAD).unsqueeze(0) # 1 x T x B
# attn_mask = self.mask[:seq_len, :seq_len] + input_seq.eq(onmt.constants.PAD).byte().unsqueeze(1)
attn_mask = attn_mask | pad_mask
return attn_mask
def build_modules(self):
e_length = expected_length(self.layers, self.death_rate)
print("* Transformer Decoder with Relative Attention with %.2f expected layers" % e_length)
self.layer_modules = nn.ModuleList()
for l in range(self.layers):
# linearly decay the death rate
death_r = (l + 1.0) / self.layers * self.death_rate
block = MemoryTransformerDecoderLayer(self.n_heads, self.model_size,
self.dropout, self.inner_size, self.attn_dropout,
ignore_source=True,
variational=self.variational_dropout, death_rate=death_r)
self.layer_modules.append(block)
def create_mask_stream(self, src, tgt, src_lengths, tgt_lengths, mem_length=0):
if self.bidirectional:
mask = None
prev_length = 0
# go through the src and tgt lengths to create mask
for i, (src_len, tgt_len) in enumerate(zip(src_lengths, tgt_lengths)):
# print("Step ", i, src_len, tgt_len)
# first, the source sentence should have full bidirectional attention to the end of itself
src_mask = src.new_zeros(src_len, src_len + prev_length)
if prev_length == 0:
mask = src_mask
else:
# everything in the past doesn't look at the future
prev_mask = src.new_ones(prev_length, src_len)
if mask is not None:
mask = torch.cat([mask, prev_mask], dim=1) # prev_len x (src_len + prev_length)
else:
mask = prev_mask
mask = torch.cat([mask, src_mask], dim=0) # (src_len + prev_length) x (src_len + prev_length)
prev_length += src_len
# the target sentence
# everything in the past doesn't look at the future
prev_mask = tgt.new_ones(prev_length, tgt_len)
# the target has unidirectional attention towards everything in the past
mlen = prev_length
qlen = tgt_len
klen = qlen + mlen
tgt_mask = torch.triu(tgt.new_ones(qlen, klen), diagonal=1 + mlen)
mask = torch.cat([mask, prev_mask], dim=1) # prev_len x (prev_len + tgt_len)
mask = torch.cat([mask, tgt_mask], dim=0) #
prev_length += tgt_len
if mem_length > 0:
past_mask = src.new_zeros(prev_length, mem_length)
mask = torch.cat([past_mask, mask], dim=1)
attn_mask = mask.bool().unsqueeze(-1)
else:
seq_len = sum(src_lengths) + sum(tgt_lengths)
# mask = torch.triu(src.new_ones(seq_len, seq_len), diagonal=1)
# if mem_length > 0:
# past_mask = src.new_zeros(seq_len, mem_length)
# mask = torch.cat([past_mask, mask], dim=1)
mask = torch.triu(src.new_ones(seq_len, seq_len + mem_length), diagonal=1 + mem_length)
attn_mask = mask.bool().unsqueeze(-1)
return attn_mask
def forward_stream(self, batch, **kwargs):
streaming_state = kwargs.get('streaming_state', None)
mems = streaming_state.mems
src = batch.get('source') # src_len x batch_size
tgt = batch.get('target_input') # (len_tgt x batch_size) x 1
bsz = src.size(1)
assert bsz == 1
src_lang = batch.get('source_lang')
tgt_lang = batch.get('target_lang')
src_lengths = batch.src_lengths
tgt_lengths = batch.tgt_lengths
# First: separate the input tensor into segments
src_segments = seperate_tensor(src, src_lengths)
tgt_segments = seperate_tensor(tgt, tgt_lengths)
# if self.dictionary is not None:
# for src_, tgt_ in zip(src_segments, tgt_segments):
# src_ = src_.squeeze(1)
# tgt_ = tgt_.squeeze(1)
#
# src_words = " ".join(self.dictionary.convertToLabels(src_, onmt.constants.EOS))
# tgt_words = " ".join(self.dictionary.convertToLabels(tgt_, onmt.constants.EOS))
# print(src_words, tgt_words)
# input("Press any key to continue...")
# Embedding stage (and scale the embedding)
embed = self.src_embedding
if self.word_dropout > 0 and self.training:
mask = embed.weight.new().resize_((embed.weight.size(0), 1)). \
bernoulli_(1 - self.word_dropout).expand_as(embed.weight) / (1 - self.word_dropout)
masked_embed_weight = mask * embed.weight
else:
masked_embed_weight = embed.weight
padding_idx = embed.padding_idx
if padding_idx is None:
padding_idx = -1
# Second: Embedding
src_embeddings = []
for src_segment in src_segments:
src_emb = F.embedding(
src_segment, masked_embed_weight, padding_idx, embed.max_norm,
embed.norm_type, embed.scale_grad_by_freq, embed.sparse)
src_emb.mul_(math.sqrt(self.model_size))
if self.use_language_embedding:
if self.language_embedding_type in ["sum", "all_sum"]:
src_lang_emb = self.language_embeddings(src_lang)
src_emb += src_lang_emb
src_embeddings.append(src_emb)
tgt_embeddings = []
for tgt_segment in tgt_segments:
tgt_emb = F.embedding(
tgt_segment, masked_embed_weight, padding_idx, embed.max_norm,
embed.norm_type, embed.scale_grad_by_freq, embed.sparse)
tgt_emb.mul_(math.sqrt(self.model_size))
if self.use_language_embedding:
if self.language_embedding_type in ["sum", "all_sum"]:
tgt_lang_emb = self.language_embeddings(tgt_lang)
tgt_emb += tgt_lang_emb
tgt_embeddings.append(tgt_emb)
# add src1, tgt1, src2, tgt2 .... srcn, tgtn
all_embeddings = []
for (src_emb, tgt_emb) in zip(src_embeddings, tgt_embeddings):
all_embeddings.append(src_emb)
all_embeddings.append(tgt_emb)
emb = torch.cat(all_embeddings, dim=0)
# prepare attention mask
mem_length = streaming_state.mems[0].size(0) if mems is not None else 0
attn_mask = self.create_mask_stream(src, tgt, src_lengths, tgt_lengths, mem_length=mem_length)
qlen = emb.size(0)
klen = emb.size(0) + mem_length
if self.bidirectional:
pos = torch.arange(klen - 1, -klen, -1.0, device=emb.device, dtype=emb.dtype)
else:
pos = torch.arange(klen - 1, -1, -1.0, device=emb.device, dtype=emb.dtype)
pos_emb = self.positional_encoder(pos)
output = emb
# Applying dropout
output = self.preprocess_layer(output)
pos_emb = self.preprocess_layer(pos_emb)
hids = [output]
# FORWARD PASS
coverage = None
for i, layer in enumerate(self.layer_modules):
mems_i = None if mems is None else mems[i]
output, coverage = layer(output, None, pos_emb, attn_mask, None, mems=mems_i)
# context and context_mask are None
hids.append(output)
# final layer norm
output = self.postprocess_layer(output)
# update the memory and then prune
streaming_state.update_mems(hids, qlen)
# now we have to separate the target states from the "output" to generate translations
target_outputs = []
contexts = []
offset = 0
for (src_len, tgt_len) in zip(src_lengths, tgt_lengths):
source_output = output.narrow(0, offset, src_len)
offset += src_len
target_output = output.narrow(0, offset, tgt_len)
offset += tgt_len
target_outputs.append(target_output)
contexts.append(source_output)
context = torch.cat(contexts, dim=0)
output = torch.cat(target_outputs, dim=0)
output_dict = {'hidden': output, 'coverage': coverage, 'context': context, 'src': src,
'target_mask': None}
output_dict = defaultdict(lambda: None, output_dict)
# final layer: computing log probabilities
logprobs = self.generator[0](output_dict)
output_dict['logprobs'] = logprobs
output_dict['streaming_state'] = streaming_state
return output_dict
def forward(self, batch, target_mask=None, streaming=False, **kwargs):
if streaming:
return self.forward_stream(batch, **kwargs)
src = batch.get('source') # src_len x batch_size
tgt = batch.get('target_input') # len_tgt x batch_size
src_pos = batch.get('source_pos')
tgt_pos = batch.get('target_pos')
src_lang = batch.get('source_lang')
tgt_lang = batch.get('target_lang')
tgt_len = tgt.size(0)
src_len = src.size(0)
bsz = tgt.size(1)
# Embedding stage (and scale the embedding)
embed = self.src_embedding
if self.word_dropout > 0 and self.training:
mask = embed.weight.new().resize_((embed.weight.size(0), 1)). \
bernoulli_(1 - self.word_dropout).expand_as(embed.weight) / (1 - self.word_dropout)
masked_embed_weight = mask * embed.weight
else:
masked_embed_weight = embed.weight
padding_idx = embed.padding_idx
if padding_idx is None:
padding_idx = -1
src_emb = F.embedding(
src, masked_embed_weight, padding_idx, embed.max_norm,
embed.norm_type, embed.scale_grad_by_freq, embed.sparse)
src_emb.mul_(math.sqrt(self.model_size))
tgt_emb = F.embedding(
tgt, masked_embed_weight, padding_idx, embed.max_norm,
embed.norm_type, embed.scale_grad_by_freq, embed.sparse)
tgt_emb.mul_(math.sqrt(self.model_size))
if self.use_language_embedding:
if self.language_embedding_type in ["sum", "all_sum"]:
src_lang_emb = self.language_embeddings(src_lang)
src_emb += src_lang_emb
tgt_lang_emb = self.language_embeddings(tgt_lang)
tgt_emb += tgt_lang_emb
else:
raise NotImplementedError
# concatenate embedding
emb = torch.cat([src_emb, tgt_emb], dim=0) # L x batch_size x H
# prepare self-attention mask
attn_mask = self.gen_mask(src, tgt)
# pos = torch.arange(klen - 1, -1, -1.0, device=emb.device, dtype=emb.dtype)
klen = src_len + tgt_len
if self.bidirectional:
pos = torch.arange(klen - 1, -klen, -1.0, device=emb.device, dtype=emb.dtype)
else:
pos = torch.arange(klen - 1, -1, -1.0, device=emb.device, dtype=emb.dtype)
pos_emb = self.positional_encoder(pos)
output = emb
# Applying dropout
output = self.preprocess_layer(output)
pos_emb = self.preprocess_layer(pos_emb)
# FORWARD PASS
coverage = None
for i, layer in enumerate(self.layer_modules):
output, coverage, _ = layer(output, None, pos_emb, attn_mask, None) # context and context_mask are None
# Final normalization
output = self.postprocess_layer(output)
# extract the "source" and "target" parts of the output
context = output[:src_len, :, :]
output = output[-tgt_len:, :, :]
output_dict = {'hidden': output, 'coverage': coverage, 'context': context, 'src': src,
'target_mask': target_mask}
# final layer: computing log probabilities
logprobs = self.generator[0](output_dict)
output_dict['logprobs'] = logprobs
return output_dict
def encode(self, input, decoder_state, input_pos=None, input_lang=None):
buffers = decoder_state.attention_buffers
src_lang = input_lang
input = input.transpose(0, 1)
# Embedding stage (and scale the embedding)
src_emb = embedded_dropout(self.src_embedding, input, dropout=self.word_dropout if self.training else 0) \
* math.sqrt(self.model_size)
if self.use_language_embedding:
if self.language_embedding_type in ["sum", "all_sum"]:
src_lang_emb = self.language_embeddings(src_lang)
src_emb += src_lang_emb
emb = src_emb
src_len = input.size(0)
bsz = input.size(1)
mask_src_src = input.eq(onmt.constants.PAD).expand(src_len, src_len, bsz)
buffer = buffers[0] if 0 in buffers else None
if buffer is not None:
mem_len = buffer['k'].size(0)
else:
mem_len = 0
if mem_len > 0:
# print(mask_src_src.size())
past_mask = input.new_zeros(src_len, mem_len).bool().unsqueeze(-1).expand(src_len, mem_len, bsz)
mask_src_src = torch.cat([past_mask, mask_src_src], dim=1)
mask_src = mask_src_src
attn_mask = mask_src.bool() # L x L x batch_size
output = emb
klen = src_len + mem_len
pos = torch.arange(klen - 1, -klen, -1.0, device=emb.device, dtype=emb.dtype)
pos_emb = self.positional_encoder(pos)
# FORWARD PASS
coverage = None
for i, layer in enumerate(self.layer_modules):
# context and context_mask are None
buffer = buffers[i] if i in buffers else None
# if i == 0 and buffer is not None:
# key = next(iter(buffer))
# print(buffer[key].size())
# output, coverage, buffer = layer.step(output, None, attn_mask, None, buffer)
output, coverage, buffer = layer(output, None, pos_emb, attn_mask, None,
incremental=True, incremental_cache=buffer)
decoder_state.update_attention_buffer(buffer, i)
# Final normalization
output = self.postprocess_layer(output)
return output, decoder_state
def decode(self, batch):
"""
:param batch: (onmt.Dataset.Batch) an object containing tensors needed for training
:return: gold_scores (torch.Tensor) log probs for each sentence
gold_words (Int) the total number of non-padded tokens
allgold_scores (list of Tensors) log probs for each word in the sentence
"""
# raise NotImplementedError
tgt_output = batch.get('target_output')
output_dict = self.forward(batch, target_mask=None)
context = output_dict['context']
logprobs = output_dict['logprobs']
batch_size = logprobs.size(1)
gold_scores = context.new(batch_size).zero_()
gold_words = 0
allgold_scores = list()
for gen_t, tgt_t in zip(logprobs, tgt_output):
tgt_t = tgt_t.unsqueeze(1)
scores = gen_t.gather(1, tgt_t)
scores.masked_fill_(tgt_t.eq(onmt.constants.PAD), 0)
gold_scores += scores.squeeze(1).type_as(gold_scores)
gold_words += tgt_t.ne(onmt.constants.PAD).sum().item()
allgold_scores.append(scores.squeeze(1).type_as(gold_scores))
return gold_words, gold_scores, allgold_scores
def renew_buffer(self, new_len):
# This model uses pre-allocated position encoding
self.positional_encoder.renew(new_len)
mask = torch.ByteTensor(np.triu(np.ones((new_len + 1, new_len + 1)), k=1).astype('uint8'))
self.register_buffer('mask', mask)
return
def reset_states(self):
return
def step(self, input, decoder_state, **kwargs):
src = decoder_state.src if decoder_state.src is not None else None
tgt = input.transpose(0, 1)
tgt_lang = decoder_state.tgt_lang
src_lang = decoder_state.src_lang
buffers = decoder_state.attention_buffers
tgt_len = tgt.size(0)
src_len = src.size(0)
bsz = tgt.size(1)
# Embedding stage (and scale the embedding)
# src_emb = embedded_dropout(self.src_embedding, src, dropout=self.word_dropout if self.training else 0) \
# * math.sqrt(self.model_size)
input_ = tgt[-1:]
tgt_emb = embedded_dropout(self.tgt_embedding, input_, dropout=self.word_dropout if self.training else 0) \
* math.sqrt(self.model_size)
if self.use_language_embedding:
if self.language_embedding_type in ["sum", "all_sum"]:
# src_lang_emb = self.language_embeddings(src_lang)
# src_emb += src_lang_emb
tgt_lang_emb = self.language_embeddings(tgt_lang)
tgt_emb += tgt_lang_emb
else:
raise NotImplementedError
# concatenate embedding
emb = tgt_emb
# prepare self-attention mask
# attn_mask = self.gen_mask(src, tgt)
buffer = buffers[0] if 0 in buffers else None
if buffer is not None:
mem_len = buffer['k'].size(0)
else:
mem_len = 0
qlen = tgt_len
klen = qlen + mem_len
attn_mask = torch.triu(emb.new_ones(qlen, klen), diagonal=1+mem_len).bool().unsqueeze(-1)
# last attn_mask step
attn_mask = attn_mask[-1:, :, :]
pos = torch.arange(klen - 1, -1, -1.0, device=emb.device, dtype=emb.dtype)
pos_emb = self.positional_encoder(pos)
output = emb
# Applying dropout
output = self.preprocess_layer(output)
# FORWARD PASS
coverage = None
for i, layer in enumerate(self.layer_modules):
buffer = buffers[i] if i in buffers else None
output, coverage, buffer = layer(output, None, pos_emb, attn_mask, None,
incremental=True,
incremental_cache=buffer) # context and context_mask are None
decoder_state.update_attention_buffer(buffer, i)
# Final normalization
output = self.postprocess_layer(output)
# output = output[-1:, :, :]
output_dict = defaultdict(lambda: None)
output_dict['hidden'] = output
logprobs = self.generator[0](output_dict).squeeze(0)
output_dict['src'] = decoder_state.src.transpose(0, 1)
output_dict['log_prob'] = logprobs
output_dict['coverage'] = logprobs.new(bsz, tgt_len, src_len).zero_()
# pruning
max_mem_size = self.max_memory_size + tgt_len + 1
for i in range(self.layers):
buffer = buffers[i] if i in buffers else None
for k in buffer:
v = buffer[k]
buffer[k] = v[-max_mem_size:, :, :]
decoder_state.update_attention_buffer(buffer, i)
return output_dict
def create_decoder_state(self, batch, beam_size=1, type=2, streaming=False, previous_decoding_state=None):
src = batch.get('source')
src_pos = batch.get('source_pos')
src_lang = batch.get('source_lang')
tgt_lang = batch.get('target_lang')
src_transposed = src.transpose(0, 1) # B x T
if previous_decoding_state is None:
decoder_state = TransformerDecodingState(src, tgt_lang, None, None,
beam_size=beam_size, model_size=self.model_size, type=type,
cloning=True)
else:
src = src.repeat(1, beam_size)
decoder_state = TransformerDecodingState(src, tgt_lang, None, None,
beam_size=beam_size, model_size=self.model_size,
type=type, cloning=False)
decoder_state.attention_buffers = previous_decoding_state.attention_buffers
# forward pass through the input to get the buffer
src_transposed = src_transposed.repeat(beam_size, 1)
encoder_output, decoder_state = self.encode(src_transposed, decoder_state, input_pos=src_pos,
input_lang=src_lang)
decoder_state.src_lang = src_lang
# buffers = decoder_state.attention_buffers
# bsz = src.size(1)
# new_order = torch.arange(bsz).view(-1, 1).repeat(1, beam_size).view(-1)
# new_order = new_order.to(src.device)
#
# for l in buffers:
# buffer_ = buffers[l]
# if buffer_ is not None:
# for k in buffer_.keys():
# t_, br_, d_ = buffer_[k].size()
# buffer_[k] = buffer_[k].index_select(1, new_order) # 1 for time first
return decoder_state
def tie_weights(self):
assert self.generator is not None, "The generator needs to be created before sharing weights"
self.generator[0].linear.weight = self.tgt_embedding.weight
def share_enc_dec_embedding(self):
self.src_embedding.weight = self.tgt_embedding.weight
def init_stream(self):
param = next(self.parameters())
layers = self.layers
streaming_state = MemoryState(layers, self.max_memory_size, param.device, param.dtype)
return streaming_state
def set_memory_size(self, src_memory_size, tgt_memory_size):
self.max_memory_size = src_memory_size + tgt_memory_size
class MemoryState(object):
def __init__(self, nlayers, mem_len, device, dtype):
self.mem_len = mem_len
self.mems = []
self.nlayers = nlayers
# n+1 memory slots (embeddings and n layers)
# but maybe we don't need to store the upper layer?
for i in range(self.nlayers + 1):
empty = torch.empty(0, dtype=dtype, device=device)
self.mems.append(empty)
def update_mems(self, hids, qlen):
# does not deal with None
if self.mems is None:
return None
mlen = self.mems[0].size(0) if self.mems is not None else 0
# mems is not None
assert len(hids) == len(self.mems), 'len(hids) != len(mems)'
# There are `mlen + qlen` steps that can be cached into mems
# For the next step, the last `ext_len` of the `qlen` tokens
# will be used as the extended context. Hence, we only cache
# the tokens from `mlen + qlen - self.ext_len - self.mem_len`
# to `mlen + qlen - self.ext_len`.
with torch.no_grad():
new_mems = []
end_idx = mlen + qlen
beg_idx = max(0, end_idx - self.mem_len)
for i in range(len(hids)):
cat = torch.cat([self.mems[i], hids[i]], dim=0)
new_mems.append(cat[beg_idx:end_idx].detach())
# Important:
self.mems = new_mems
# self.src_buffer = defaultdict(lambda: None)
# self.prev_src_mem_size = 0
# self.src_lengths = []
# self.tgt_buffer = defaultdict(lambda: None)
# self.prev_tgt_mem_size = 0
# self.tgt_lengths = []
#
# self.context_memory = None
# def init_mems(self):
# if self.mem_len > 0:
# mems = []
# param = next(self.parameters())
# for i in range(self.n_layer + 1):
# empty = torch.empty(0, dtype=param.dtype, device=param.device)
# mems.append(empty)
#
# return mems
# else:
# return None
| [
"[email protected]"
]
| |
9394c9c8fd1b27eac0dd7b4bc2d82db67266668f | 1d2bbeda56f8fede69cd9ebde6f5f2b8a50d4a41 | /easy/python3/c0009_28_implement-strstr/00_leetcode_0009.py | 3f3da727753e9087912c82010101009b9d93cff2 | []
| no_license | drunkwater/leetcode | 38b8e477eade68250d0bc8b2317542aa62431e03 | 8cc4a07763e71efbaedb523015f0c1eff2927f60 | refs/heads/master | 2020-04-06T07:09:43.798498 | 2018-06-20T02:06:40 | 2018-06-20T02:06:40 | 127,843,545 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 892 | py | # DRUNKWATER TEMPLATE(add description and prototypes)
# Question Title and Description on leetcode.com
# Function Declaration and Function Prototypes on leetcode.com
#28. Implement strStr()
#Implement strStr().
#Return the index of the first occurrence of needle in haystack, or -1 if needle is not part of haystack.
#Example 1:
#Input: haystack = "hello", needle = "ll"
#Output: 2
#Example 2:
#Input: haystack = "aaaaa", needle = "bba"
#Output: -1
#Clarification:
#What should we return when needle is an empty string? This is a great question to ask during an interview.
#For the purpose of this problem, we will return 0 when needle is an empty string. This is consistent to C's strstr() and Java's indexOf().
#class Solution:
# def strStr(self, haystack, needle):
# """
# :type haystack: str
# :type needle: str
# :rtype: int
# """
# Time Is Money | [
"[email protected]"
]
| |
80a26aad0a1f115f682e53ed5d47c9cbfd137809 | 03f9666687a147bfd6bace2adfbab6de8879e207 | /plugins/action/device_credential.py | 2d88230b22e84c11fad07c6f85152d6c6fde77a6 | [
"MIT"
]
| permissive | robertcsapo/dnacenter-ansible | a221c8dc6ab68d6ccbc710e5e5f3061b90b0de59 | 33f776f8c0bc7113da73191c301dd1807e6b4a43 | refs/heads/main | 2023-07-17T08:27:59.902108 | 2021-09-06T15:58:05 | 2021-09-06T15:58:05 | 376,349,036 | 0 | 0 | MIT | 2021-06-12T17:32:27 | 2021-06-12T17:32:27 | null | UTF-8 | Python | false | false | 2,797 | py | from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
try:
from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
AnsibleArgSpecValidator,
)
except ImportError:
ANSIBLE_UTILS_IS_INSTALLED = False
else:
ANSIBLE_UTILS_IS_INSTALLED = True
from ansible.errors import AnsibleActionFail
from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
ModuleDefinition,
DNACModule,
dnac_argument_spec,
)
from ansible_collections.cisco.dnac.plugins.module_utils.definitions.device_credential import (
module_definition,
)
IDEMPOTENT = False
# Instantiate the module definition for this module
moddef = ModuleDefinition(module_definition)
# Get the argument spec for this module and add the 'state' param,
# which is common to all modules
argument_spec = moddef.get_argument_spec_dict()
argument_spec.update(dict(dnac_argument_spec(idempotent=IDEMPOTENT)))
# Get the schema conditionals, if applicable
required_if = moddef.get_required_if_list()
class ActionModule(ActionBase):
def __init__(self, *args, **kwargs):
if not ANSIBLE_UTILS_IS_INSTALLED:
raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
super(ActionModule, self).__init__(*args, **kwargs)
self._supports_async = False
self._result = None
# Checks the supplied parameters against the argument spec for this module
def _check_argspec(self):
aav = AnsibleArgSpecValidator(
data=self._task.args,
schema=dict(argument_spec=argument_spec),
schema_format="argspec",
schema_conditionals=dict(required_if=required_if),
name=self._task.action,
)
valid, errors, self._task.args = aav.validate()
if not valid:
raise AnsibleActionFail(errors)
def run(self, tmp=None, task_vars=None):
self._task.diff = False
self._result = super(ActionModule, self).run(tmp, task_vars)
self._result["changed"] = False
self._check_argspec()
dnac = DNACModule(
moddef=moddef,
params=self._task.args,
verbosity=self._play_context.verbosity,
)
state = self._task.args.get("state")
if state == "query":
dnac.exec("get")
elif state == "delete":
dnac.exec("delete")
elif state == "create":
dnac.disable_validation()
dnac.exec("post")
elif state == "update":
dnac.disable_validation()
dnac.exec("put")
self._result.update(dnac.exit_json())
return self._result
| [
"[email protected]"
]
| |
9f5e950d9099755d4c2e22f43dda1ea777edf4d7 | 4a0ed9c079286428e44bf8bcfc82034dac041897 | /gallery/views.py | 6afa3b8cfd519dce4b362fd22970809cab865f38 | [
"MIT"
]
| permissive | lilianwaweru/Gallery | 3b57fed326e1d868c83944037fe203f35204a650 | de9b02e59dac22f45df8c7cbc0570fe7ac685d3f | refs/heads/master | 2020-05-21T07:06:38.468356 | 2019-05-14T13:03:09 | 2019-05-14T13:03:09 | 185,950,002 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,093 | py | from django.shortcuts import render
from .models import Image,Category,Location
# Create your views here.
def welcome(request):
images = Image.objects.all()
return render(request,'welcome.html',{'images':images})
def search_category(request):
if 'category' in request.GET and request.GET["category"]:
search_term = (request.GET.get("category")).title()
searched_images = Image.search_by_category(search_term)
message = f"{search_term}"
return render(request, 'all-gallery/search.html',{"message":message,"images": searched_images})
else:
message = "You haven't searched for any category"
return render(request, 'all-gallery/search.html',{"message":message})
def display_location(request,location_id):
try:
locations = Location.objects.all()
location = Location.objects.get(id = location_id)
images = Image.objects.filter(image_location = location.id)
except:
raise Http404()
return render(request,'location.html',{'location':location,'images':images,'locations':locations})
| [
"[email protected]"
]
| |
eeaef235caf718137abbbe3d6139aac575687bbc | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/6/oan.py | af3f217dc52107525f28568b79685a2e8f0957c2 | []
| no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'oAN':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
]
| |
9855c9b0200842716863d96c8193f9f091dcf658 | 70bcdd97318a85acc5bc3f4d47afde696fb7a33b | /jqdata/gta_tables/MAC_INDUSTRY_EMPLOYWAGEQ.py | 2ebaaaaaf34a003becdbd973ddf86f715001b314 | []
| no_license | Inistlwq/tulipquant-code | f78fe3c4238e98014f6d4f36735fb65a8b88f60d | 5959bfe35b6ae2e0e2a204117bda66a13893c64c | refs/heads/master | 2020-03-31T11:25:12.145593 | 2018-04-22T02:16:16 | 2018-04-22T02:16:16 | 152,175,727 | 2 | 0 | null | 2018-10-09T02:31:34 | 2018-10-09T02:31:33 | null | UTF-8 | Python | false | false | 693 | py |
# coding: utf-8
from sqlalchemy import BigInteger, Column, DateTime, Integer, Numeric, SmallInteger, String, Table, Text, text
from sqlalchemy.dialects.mysql.base import LONGBLOB
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
metadata = Base.metadata
class MAC_INDUSTRY_EMPLOYWAGEQ(Base):
__tablename__ = "MAC_INDUSTRY_EMPLOYWAGEQ"
SGNQUARTER = Column(String(14, u'utf8_bin'), primary_key=True, nullable=False)
INDUSTRYID = Column(String(20, u'utf8_bin'), primary_key=True, nullable=False)
EMPLOY = Column(Numeric(18, 4))
STAFF = Column(Numeric(18, 4))
EMPLOYPAY = Column(Numeric(18, 4))
STAFFWAGE = Column(Numeric(18, 4))
| [
"[email protected]"
]
| |
236b2156772d0785fdcae77e42fa711d3fe46373 | 9259591e4794aecd85c199b645ffb05ccbebd993 | /vendor/gevent/_sslgte279.py | 6941dcc0895fec259f5992087c7d35ce77bea1b7 | [
"Apache-2.0"
]
| permissive | swdotcom/swdc-sublime | 000e20b2d2d9fa214480d2a94fcb97a88b83f67f | eab2007ab408e44d38163a121bf95c5d2018d6e4 | refs/heads/main | 2022-03-10T05:57:44.701151 | 2022-03-09T23:53:14 | 2022-03-09T23:53:14 | 127,207,312 | 14 | 5 | Apache-2.0 | 2022-03-09T23:53:15 | 2018-03-28T22:47:16 | Python | UTF-8 | Python | false | false | 28,202 | py | # Wrapper module for _ssl. Written by Bill Janssen.
# Ported to gevent by Denis Bilenko.
"""SSL wrapper for socket objects on Python 2.7.9 and above.
For the documentation, refer to :mod:`ssl` module manual.
This module implements cooperative SSL socket wrappers.
"""
from __future__ import absolute_import
# Our import magic sadly makes this warning useless
# pylint: disable=undefined-variable
# pylint: disable=too-many-instance-attributes,too-many-locals,too-many-statements,too-many-branches
# pylint: disable=arguments-differ,too-many-public-methods
import ssl as __ssl__
_ssl = __ssl__._ssl # pylint:disable=no-member
import errno
from gevent._socket2 import socket
from gevent.socket import timeout_default
from gevent.socket import create_connection
from gevent.socket import error as socket_error
from gevent.socket import timeout as _socket_timeout
from gevent._compat import PYPY
from gevent._util import copy_globals
__implements__ = [
'SSLContext',
'SSLSocket',
'wrap_socket',
'get_server_certificate',
'create_default_context',
'_create_unverified_context',
'_create_default_https_context',
'_create_stdlib_context',
'_fileobject',
]
# Import all symbols from Python's ssl.py, except those that we are implementing
# and "private" symbols.
__imports__ = copy_globals(__ssl__, globals(),
# SSLSocket *must* subclass gevent.socket.socket; see issue 597 and 801
names_to_ignore=__implements__ + ['socket', 'create_connection'],
dunder_names_to_keep=())
try:
_delegate_methods
except NameError: # PyPy doesn't expose this detail
_delegate_methods = ('recv', 'recvfrom', 'recv_into', 'recvfrom_into', 'send', 'sendto')
__all__ = __implements__ + __imports__
if 'namedtuple' in __all__:
__all__.remove('namedtuple')
# See notes in _socket2.py. Python 3 returns much nicer
# `io` object wrapped around a SocketIO class.
if hasattr(__ssl__, '_fileobject'):
assert not hasattr(__ssl__._fileobject, '__enter__') # pylint:disable=no-member
class _fileobject(getattr(__ssl__, '_fileobject', object)): # pylint:disable=no-member
def __enter__(self):
return self
def __exit__(self, *args):
# pylint:disable=no-member
if not self.closed:
self.close()
orig_SSLContext = __ssl__.SSLContext # pylint: disable=no-member
class SSLContext(orig_SSLContext):
def wrap_socket(self, sock, server_side=False,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
server_hostname=None):
return SSLSocket(sock=sock, server_side=server_side,
do_handshake_on_connect=do_handshake_on_connect,
suppress_ragged_eofs=suppress_ragged_eofs,
server_hostname=server_hostname,
_context=self)
def create_default_context(purpose=Purpose.SERVER_AUTH, cafile=None,
capath=None, cadata=None):
"""Create a SSLContext object with default settings.
NOTE: The protocol and settings may change anytime without prior
deprecation. The values represent a fair balance between maximum
compatibility and security.
"""
if not isinstance(purpose, _ASN1Object):
raise TypeError(purpose)
context = SSLContext(PROTOCOL_SSLv23)
# SSLv2 considered harmful.
context.options |= OP_NO_SSLv2 # pylint:disable=no-member
# SSLv3 has problematic security and is only required for really old
# clients such as IE6 on Windows XP
context.options |= OP_NO_SSLv3 # pylint:disable=no-member
# disable compression to prevent CRIME attacks (OpenSSL 1.0+)
context.options |= getattr(_ssl, "OP_NO_COMPRESSION", 0) # pylint:disable=no-member
if purpose == Purpose.SERVER_AUTH:
# verify certs and host name in client mode
context.verify_mode = CERT_REQUIRED
context.check_hostname = True # pylint: disable=attribute-defined-outside-init
elif purpose == Purpose.CLIENT_AUTH:
# Prefer the server's ciphers by default so that we get stronger
# encryption
context.options |= getattr(_ssl, "OP_CIPHER_SERVER_PREFERENCE", 0) # pylint:disable=no-member
# Use single use keys in order to improve forward secrecy
context.options |= getattr(_ssl, "OP_SINGLE_DH_USE", 0) # pylint:disable=no-member
context.options |= getattr(_ssl, "OP_SINGLE_ECDH_USE", 0) # pylint:disable=no-member
# disallow ciphers with known vulnerabilities
context.set_ciphers(_RESTRICTED_SERVER_CIPHERS)
if cafile or capath or cadata:
context.load_verify_locations(cafile, capath, cadata)
elif context.verify_mode != CERT_NONE:
# no explicit cafile, capath or cadata but the verify mode is
# CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system
# root CA certificates for the given purpose. This may fail silently.
context.load_default_certs(purpose)
return context
def _create_unverified_context(protocol=PROTOCOL_SSLv23, cert_reqs=None,
check_hostname=False, purpose=Purpose.SERVER_AUTH,
certfile=None, keyfile=None,
cafile=None, capath=None, cadata=None):
"""Create a SSLContext object for Python stdlib modules
All Python stdlib modules shall use this function to create SSLContext
objects in order to keep common settings in one place. The configuration
is less restrict than create_default_context()'s to increase backward
compatibility.
"""
if not isinstance(purpose, _ASN1Object):
raise TypeError(purpose)
context = SSLContext(protocol)
# SSLv2 considered harmful.
context.options |= OP_NO_SSLv2 # pylint:disable=no-member
# SSLv3 has problematic security and is only required for really old
# clients such as IE6 on Windows XP
context.options |= OP_NO_SSLv3 # pylint:disable=no-member
if cert_reqs is not None:
context.verify_mode = cert_reqs
context.check_hostname = check_hostname # pylint: disable=attribute-defined-outside-init
if keyfile and not certfile:
raise ValueError("certfile must be specified")
if certfile or keyfile:
context.load_cert_chain(certfile, keyfile)
# load CA root certs
if cafile or capath or cadata:
context.load_verify_locations(cafile, capath, cadata)
elif context.verify_mode != CERT_NONE:
# no explicit cafile, capath or cadata but the verify mode is
# CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system
# root CA certificates for the given purpose. This may fail silently.
context.load_default_certs(purpose)
return context
# Used by http.client if no context is explicitly passed.
_create_default_https_context = create_default_context
# Backwards compatibility alias, even though it's not a public name.
_create_stdlib_context = _create_unverified_context
class SSLSocket(socket):
"""
gevent `ssl.SSLSocket <https://docs.python.org/2/library/ssl.html#ssl-sockets>`_
for Pythons >= 2.7.9 but less than 3.
"""
def __init__(self, sock=None, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_SSLv23, ca_certs=None,
do_handshake_on_connect=True,
family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None,
suppress_ragged_eofs=True, npn_protocols=None, ciphers=None,
server_hostname=None,
_context=None):
# fileno is ignored
# pylint: disable=unused-argument
if _context:
self._context = _context
else:
if server_side and not certfile:
raise ValueError("certfile must be specified for server-side "
"operations")
if keyfile and not certfile:
raise ValueError("certfile must be specified")
if certfile and not keyfile:
keyfile = certfile
self._context = SSLContext(ssl_version)
self._context.verify_mode = cert_reqs
if ca_certs:
self._context.load_verify_locations(ca_certs)
if certfile:
self._context.load_cert_chain(certfile, keyfile)
if npn_protocols:
self._context.set_npn_protocols(npn_protocols)
if ciphers:
self._context.set_ciphers(ciphers)
self.keyfile = keyfile
self.certfile = certfile
self.cert_reqs = cert_reqs
self.ssl_version = ssl_version
self.ca_certs = ca_certs
self.ciphers = ciphers
# Can't use sock.type as other flags (such as SOCK_NONBLOCK) get
# mixed in.
if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM:
raise NotImplementedError("only stream sockets are supported")
if PYPY:
socket.__init__(self, _sock=sock)
sock._drop()
else:
# CPython: XXX: Must pass the underlying socket, not our
# potential wrapper; test___example_servers fails the SSL test
# with a client-side EOF error. (Why?)
socket.__init__(self, _sock=sock._sock)
# The initializer for socket overrides the methods send(), recv(), etc.
# in the instance, which we don't need -- but we want to provide the
# methods defined in SSLSocket.
for attr in _delegate_methods:
try:
delattr(self, attr)
except AttributeError:
pass
if server_side and server_hostname:
raise ValueError("server_hostname can only be specified "
"in client mode")
if self._context.check_hostname and not server_hostname:
raise ValueError("check_hostname requires server_hostname")
self.server_side = server_side
self.server_hostname = server_hostname
self.do_handshake_on_connect = do_handshake_on_connect
self.suppress_ragged_eofs = suppress_ragged_eofs
self.settimeout(sock.gettimeout())
# See if we are connected
try:
self.getpeername()
except socket_error as e:
if e.errno != errno.ENOTCONN:
raise
connected = False
else:
connected = True
self._makefile_refs = 0
self._closed = False
self._sslobj = None
self._connected = connected
if connected:
# create the SSL object
try:
self._sslobj = self._context._wrap_socket(self._sock, server_side,
server_hostname, ssl_sock=self)
if do_handshake_on_connect:
timeout = self.gettimeout()
if timeout == 0.0:
# non-blocking
raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets")
self.do_handshake()
except socket_error as x:
self.close()
raise x
@property
def context(self):
return self._context
@context.setter
def context(self, ctx):
self._context = ctx
self._sslobj.context = ctx
def dup(self):
raise NotImplementedError("Can't dup() %s instances" %
self.__class__.__name__)
def _checkClosed(self, msg=None):
# raise an exception here if you wish to check for spurious closes
pass
def _check_connected(self):
if not self._connected:
# getpeername() will raise ENOTCONN if the socket is really
# not connected; note that we can be connected even without
# _connected being set, e.g. if connect() first returned
# EAGAIN.
self.getpeername()
def read(self, len=1024, buffer=None):
"""Read up to LEN bytes and return them.
Return zero-length string on EOF."""
self._checkClosed()
while 1:
if not self._sslobj:
raise ValueError("Read on closed or unwrapped SSL socket.")
if len == 0:
return b'' if buffer is None else 0
if len < 0 and buffer is None:
# This is handled natively in python 2.7.12+
raise ValueError("Negative read length")
try:
if buffer is not None:
return self._sslobj.read(len, buffer)
return self._sslobj.read(len or 1024)
except SSLWantReadError:
if self.timeout == 0.0:
raise
self._wait(self._read_event, timeout_exc=_SSLErrorReadTimeout)
except SSLWantWriteError:
if self.timeout == 0.0:
raise
# note: using _SSLErrorReadTimeout rather than _SSLErrorWriteTimeout below is intentional
self._wait(self._write_event, timeout_exc=_SSLErrorReadTimeout)
except SSLError as ex:
if ex.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs:
if buffer is not None:
return 0
return b''
raise
def write(self, data):
"""Write DATA to the underlying SSL channel. Returns
number of bytes of DATA actually transmitted."""
self._checkClosed()
while 1:
if not self._sslobj:
raise ValueError("Write on closed or unwrapped SSL socket.")
try:
return self._sslobj.write(data)
except SSLError as ex:
if ex.args[0] == SSL_ERROR_WANT_READ:
if self.timeout == 0.0:
raise
self._wait(self._read_event, timeout_exc=_SSLErrorWriteTimeout)
elif ex.args[0] == SSL_ERROR_WANT_WRITE:
if self.timeout == 0.0:
raise
self._wait(self._write_event, timeout_exc=_SSLErrorWriteTimeout)
else:
raise
def getpeercert(self, binary_form=False):
"""Returns a formatted version of the data in the
certificate provided by the other end of the SSL channel.
Return None if no certificate was provided, {} if a
certificate was provided, but not validated."""
self._checkClosed()
self._check_connected()
return self._sslobj.peer_certificate(binary_form)
def selected_npn_protocol(self):
self._checkClosed()
if not self._sslobj or not _ssl.HAS_NPN:
return None
return self._sslobj.selected_npn_protocol()
if hasattr(_ssl, 'HAS_ALPN'):
# 2.7.10+
def selected_alpn_protocol(self):
self._checkClosed()
if not self._sslobj or not _ssl.HAS_ALPN: # pylint:disable=no-member
return None
return self._sslobj.selected_alpn_protocol()
def cipher(self):
self._checkClosed()
if not self._sslobj:
return None
return self._sslobj.cipher()
def compression(self):
self._checkClosed()
if not self._sslobj:
return None
return self._sslobj.compression()
def __check_flags(self, meth, flags):
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to %s on %s" %
(meth, self.__class__))
def send(self, data, flags=0, timeout=timeout_default):
self._checkClosed()
self.__check_flags('send', flags)
if timeout is timeout_default:
timeout = self.timeout
if not self._sslobj:
return socket.send(self, data, flags, timeout)
while True:
try:
return self._sslobj.write(data)
except SSLWantReadError:
if self.timeout == 0.0:
return 0
self._wait(self._read_event)
except SSLWantWriteError:
if self.timeout == 0.0:
return 0
self._wait(self._write_event)
def sendto(self, data, flags_or_addr, addr=None):
self._checkClosed()
if self._sslobj:
raise ValueError("sendto not allowed on instances of %s" %
self.__class__)
if addr is None:
return socket.sendto(self, data, flags_or_addr)
return socket.sendto(self, data, flags_or_addr, addr)
def sendmsg(self, *args, **kwargs):
# Ensure programs don't send data unencrypted if they try to
# use this method.
raise NotImplementedError("sendmsg not allowed on instances of %s" %
self.__class__)
def sendall(self, data, flags=0):
self._checkClosed()
self.__check_flags('sendall', flags)
try:
socket.sendall(self, data)
except _socket_timeout as ex:
if self.timeout == 0.0:
# Python 2 simply *hangs* in this case, which is bad, but
# Python 3 raises SSLWantWriteError. We do the same.
raise SSLWantWriteError("The operation did not complete (write)")
# Convert the socket.timeout back to the sslerror
raise SSLError(*ex.args)
def recv(self, buflen=1024, flags=0):
self._checkClosed()
if self._sslobj:
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to recv() on %s" %
self.__class__)
if buflen == 0:
return b''
return self.read(buflen)
return socket.recv(self, buflen, flags)
def recv_into(self, buffer, nbytes=None, flags=0):
self._checkClosed()
if buffer is not None and (nbytes is None):
# Fix for python bug #23804: bool(bytearray()) is False,
# but we should read 0 bytes.
nbytes = len(buffer)
elif nbytes is None:
nbytes = 1024
if self._sslobj:
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to recv_into() on %s" %
self.__class__)
return self.read(nbytes, buffer)
return socket.recv_into(self, buffer, nbytes, flags)
def recvfrom(self, buflen=1024, flags=0):
self._checkClosed()
if self._sslobj:
raise ValueError("recvfrom not allowed on instances of %s" %
self.__class__)
return socket.recvfrom(self, buflen, flags)
def recvfrom_into(self, buffer, nbytes=None, flags=0):
self._checkClosed()
if self._sslobj:
raise ValueError("recvfrom_into not allowed on instances of %s" %
self.__class__)
return socket.recvfrom_into(self, buffer, nbytes, flags)
def recvmsg(self, *args, **kwargs):
raise NotImplementedError("recvmsg not allowed on instances of %s" %
self.__class__)
def recvmsg_into(self, *args, **kwargs):
raise NotImplementedError("recvmsg_into not allowed on instances of "
"%s" % self.__class__)
def pending(self):
self._checkClosed()
if self._sslobj:
return self._sslobj.pending()
return 0
def shutdown(self, how):
self._checkClosed()
self._sslobj = None
socket.shutdown(self, how)
def close(self):
if self._makefile_refs < 1:
self._sslobj = None
socket.close(self)
else:
self._makefile_refs -= 1
if PYPY:
def _reuse(self):
self._makefile_refs += 1
def _drop(self):
if self._makefile_refs < 1:
self.close()
else:
self._makefile_refs -= 1
def _sslobj_shutdown(self):
while True:
try:
return self._sslobj.shutdown()
except SSLError as ex:
if ex.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs:
return ''
if ex.args[0] == SSL_ERROR_WANT_READ:
if self.timeout == 0.0:
raise
sys.exc_clear()
self._wait(self._read_event, timeout_exc=_SSLErrorReadTimeout)
elif ex.args[0] == SSL_ERROR_WANT_WRITE:
if self.timeout == 0.0:
raise
sys.exc_clear()
self._wait(self._write_event, timeout_exc=_SSLErrorWriteTimeout)
else:
raise
def unwrap(self):
if not self._sslobj:
raise ValueError("No SSL wrapper around " + str(self))
s = self._sslobj_shutdown()
self._sslobj = None
# match _ssl2; critical to drop/reuse here on PyPy
# XXX: _ssl3 returns an SSLSocket. Is that what the standard lib does on
# Python 2? Should we do that?
return socket(_sock=s)
def _real_close(self):
self._sslobj = None
socket._real_close(self) # pylint: disable=no-member
def do_handshake(self):
"""Perform a TLS/SSL handshake."""
self._check_connected()
while True:
try:
self._sslobj.do_handshake()
break
except SSLWantReadError:
if self.timeout == 0.0:
raise
self._wait(self._read_event, timeout_exc=_SSLErrorHandshakeTimeout)
except SSLWantWriteError:
if self.timeout == 0.0:
raise
self._wait(self._write_event, timeout_exc=_SSLErrorHandshakeTimeout)
if self._context.check_hostname:
if not self.server_hostname:
raise ValueError("check_hostname needs server_hostname "
"argument")
match_hostname(self.getpeercert(), self.server_hostname)
def _real_connect(self, addr, connect_ex):
if self.server_side:
raise ValueError("can't connect in server-side mode")
# Here we assume that the socket is client-side, and not
# connected at the time of the call. We connect it, then wrap it.
if self._connected:
raise ValueError("attempt to connect already-connected SSLSocket!")
self._sslobj = self._context._wrap_socket(self._sock, False, self.server_hostname, ssl_sock=self)
try:
if connect_ex:
rc = socket.connect_ex(self, addr)
else:
rc = None
socket.connect(self, addr)
if not rc:
self._connected = True
if self.do_handshake_on_connect:
self.do_handshake()
return rc
except socket_error:
self._sslobj = None
raise
def connect(self, addr):
"""Connects to remote ADDR, and then wraps the connection in
an SSL channel."""
self._real_connect(addr, False)
def connect_ex(self, addr):
"""Connects to remote ADDR, and then wraps the connection in
an SSL channel."""
return self._real_connect(addr, True)
def accept(self):
"""Accepts a new connection from a remote client, and returns
a tuple containing that new connection wrapped with a server-side
SSL channel, and the address of the remote client."""
newsock, addr = socket.accept(self)
newsock._drop_events_and_close(closefd=False) # Why, again?
newsock = self._context.wrap_socket(newsock,
do_handshake_on_connect=self.do_handshake_on_connect,
suppress_ragged_eofs=self.suppress_ragged_eofs,
server_side=True)
return newsock, addr
def makefile(self, mode='r', bufsize=-1):
"""Make and return a file-like object that
works with the SSL connection. Just use the code
from the socket module."""
if not PYPY:
self._makefile_refs += 1
# close=True so as to decrement the reference count when done with
# the file-like object.
return _fileobject(self, mode, bufsize, close=True)
def get_channel_binding(self, cb_type="tls-unique"):
"""Get channel binding data for current connection. Raise ValueError
if the requested `cb_type` is not supported. Return bytes of the data
or None if the data is not available (e.g. before the handshake).
"""
if cb_type not in CHANNEL_BINDING_TYPES:
raise ValueError("Unsupported channel binding type")
if cb_type != "tls-unique":
raise NotImplementedError(
"{0} channel binding type not implemented"
.format(cb_type))
if self._sslobj is None:
return None
return self._sslobj.tls_unique_cb()
def version(self):
"""
Return a string identifying the protocol version used by the
current SSL channel, or None if there is no established channel.
"""
if self._sslobj is None:
return None
return self._sslobj.version()
if PYPY or not hasattr(SSLSocket, 'timeout'):
# PyPy (and certain versions of CPython) doesn't have a direct
# 'timeout' property on raw sockets, because that's not part of
# the documented specification. We may wind up wrapping a raw
# socket (when ssl is used with PyWSGI) or a gevent socket, which
# does have a read/write timeout property as an alias for
# get/settimeout, so make sure that's always the case because
# pywsgi can depend on that.
SSLSocket.timeout = property(lambda self: self.gettimeout(),
lambda self, value: self.settimeout(value))
_SSLErrorReadTimeout = SSLError('The read operation timed out')
_SSLErrorWriteTimeout = SSLError('The write operation timed out')
_SSLErrorHandshakeTimeout = SSLError('The handshake operation timed out')
def wrap_socket(sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_SSLv23, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
ciphers=None):
return SSLSocket(sock=sock, keyfile=keyfile, certfile=certfile,
server_side=server_side, cert_reqs=cert_reqs,
ssl_version=ssl_version, ca_certs=ca_certs,
do_handshake_on_connect=do_handshake_on_connect,
suppress_ragged_eofs=suppress_ragged_eofs,
ciphers=ciphers)
def get_server_certificate(addr, ssl_version=PROTOCOL_SSLv23, ca_certs=None):
"""Retrieve the certificate from the server at the specified address,
and return it as a PEM-encoded string.
If 'ca_certs' is specified, validate the server cert against it.
If 'ssl_version' is specified, use it in the connection attempt."""
_, _ = addr
if ca_certs is not None:
cert_reqs = CERT_REQUIRED
else:
cert_reqs = CERT_NONE
context = _create_stdlib_context(ssl_version,
cert_reqs=cert_reqs,
cafile=ca_certs)
with closing(create_connection(addr)) as sock:
with closing(context.wrap_socket(sock)) as sslsock:
dercert = sslsock.getpeercert(True)
return DER_cert_to_PEM_cert(dercert)
| [
"[email protected]"
]
| |
f35954923107394313f1f954760bca011b8ce868 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/CISCO-EVC-CAPABILITY.py | 454b6d6567630d64f7fd37b6bc76b1264ec7ba05 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 3,608 | py | #
# PySNMP MIB module CISCO-EVC-CAPABILITY (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-EVC-CAPABILITY
# Produced by pysmi-0.3.4 at Wed May 1 11:57:40 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
ciscoAgentCapability, = mibBuilder.importSymbols("CISCO-SMI", "ciscoAgentCapability")
AgentCapabilities, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "AgentCapabilities", "NotificationGroup", "ModuleCompliance")
iso, ModuleIdentity, Bits, Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, IpAddress, Integer32, NotificationType, MibIdentifier, TimeTicks, Unsigned32, ObjectIdentity, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "ModuleIdentity", "Bits", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "IpAddress", "Integer32", "NotificationType", "MibIdentifier", "TimeTicks", "Unsigned32", "ObjectIdentity", "Counter64")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
ciscoEvcCapability = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 7, 568))
ciscoEvcCapability.setRevisions(('2008-08-26 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoEvcCapability.setRevisionsDescriptions(('Initial version of this MIB module.',))
if mibBuilder.loadTexts: ciscoEvcCapability.setLastUpdated('200808260000Z')
if mibBuilder.loadTexts: ciscoEvcCapability.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoEvcCapability.setContactInfo('Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS E-mail: [email protected]')
if mibBuilder.loadTexts: ciscoEvcCapability.setDescription('Agent capabilities for the CISCO-EVC-MIB.')
ciscoEvcCapabilityV12R02SR = AgentCapabilities((1, 3, 6, 1, 4, 1, 9, 7, 568, 1))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoEvcCapabilityV12R02SR = ciscoEvcCapabilityV12R02SR.setProductRelease('Cisco IOS 12.2 SR Release')
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoEvcCapabilityV12R02SR = ciscoEvcCapabilityV12R02SR.setStatus('current')
if mibBuilder.loadTexts: ciscoEvcCapabilityV12R02SR.setDescription('CISCO-EVC-MIB capabilities.')
ciscoEvcCapabilityV12R02XO = AgentCapabilities((1, 3, 6, 1, 4, 1, 9, 7, 568, 2))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoEvcCapabilityV12R02XO = ciscoEvcCapabilityV12R02XO.setProductRelease('Cisco IOS 12.2 XO Release.')
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoEvcCapabilityV12R02XO = ciscoEvcCapabilityV12R02XO.setStatus('current')
if mibBuilder.loadTexts: ciscoEvcCapabilityV12R02XO.setDescription('CISCO-EVC-MIB capabilities.')
mibBuilder.exportSymbols("CISCO-EVC-CAPABILITY", ciscoEvcCapability=ciscoEvcCapability, ciscoEvcCapabilityV12R02SR=ciscoEvcCapabilityV12R02SR, PYSNMP_MODULE_ID=ciscoEvcCapability, ciscoEvcCapabilityV12R02XO=ciscoEvcCapabilityV12R02XO)
| [
"[email protected]"
]
| |
eb09ae376760615f5b04d20e921dc431942061bc | c85b91bfdd7eb2fa5a7d6c6a9b722c8548c83105 | /vscode/extensions/ms-python.python-2020.3.69010/languageServer.0.5.31/Typeshed/stdlib/3/posix.pyi | d99a4584d6d9a6efe91e80408d29b4bf3a3e89ff | [
"MIT",
"Apache-2.0"
]
| permissive | ryangniadek/.dotfiles | ddf52cece49c33664b56f01b17d476cf0f1fafb1 | be272baf6fb7d7cd4f4db1f6812b710196511ffe | refs/heads/master | 2021-01-14T07:43:12.516127 | 2020-03-22T20:27:22 | 2020-03-22T20:27:22 | 242,632,623 | 0 | 0 | MIT | 2020-09-12T17:28:01 | 2020-02-24T02:50:06 | Python | UTF-8 | Python | false | false | 2,650 | pyi | # Stubs for posix
# NOTE: These are incomplete!
from typing import NamedTuple, Tuple
class stat_result:
# For backward compatibility, the return value of stat() is also
# accessible as a tuple of at least 10 integers giving the most important
# (and portable) members of the stat structure, in the order st_mode,
# st_ino, st_dev, st_nlink, st_uid, st_gid, st_size, st_atime, st_mtime,
# st_ctime. More items may be added at the end by some implementations.
st_mode: int # protection bits,
st_ino: int # inode number,
st_dev: int # device,
st_nlink: int # number of hard links,
st_uid: int # user id of owner,
st_gid: int # group id of owner,
st_size: int # size of file, in bytes,
st_atime: float # time of most recent access,
st_mtime: float # time of most recent content modification,
st_ctime: float # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows)
st_atime_ns: int # time of most recent access, in nanoseconds
st_mtime_ns: int # time of most recent content modification in nanoseconds
st_ctime_ns: int # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) in nanoseconds
# not documented
def __init__(self, tuple: Tuple[int, ...]) -> None: ...
# On some Unix systems (such as Linux), the following attributes may also
# be available:
st_blocks: int # number of blocks allocated for file
st_blksize: int # filesystem blocksize
st_rdev: int # type of device if an inode device
st_flags: int # user defined flags for file
# On other Unix systems (such as FreeBSD), the following attributes may be
# available (but may be only filled out if root tries to use them):
st_gen: int # file generation number
st_birthtime: int # time of file creation
# On Mac OS systems, the following attributes may also be available:
st_rsize: int
st_creator: int
st_type: int
uname_result = NamedTuple('uname_result', [('sysname', str), ('nodename', str),
('release', str), ('version', str), ('machine', str)])
times_result = NamedTuple('times_result', [
('user', float),
('system', float),
('children_user', float),
('children_system', float),
('elapsed', float),
])
waitid_result = NamedTuple('waitid_result', [
('si_pid', int),
('si_uid', int),
('si_signo', int),
('si_status', int),
('si_code', int),
])
sched_param = NamedTuple('sched_priority', [
('sched_priority', int),
])
| [
"[email protected]"
]
| |
cb5a7ad60c72cc52d78ddfbdca5cecf634886a08 | 539815f896acbc88b72338992f1adcd55bd7700f | /demo/movie_svc/app_instance.py | d7fce6337abd89e14700e0110df3a57cb570f72d | [
"MIT"
]
| permissive | talkpython/responder-webframework-minicourse | dcb0f38ead081b75a536aca99c6f52fc172c1c0e | 321d52d8ddb434952f373a127b51ef3bbfbeb6af | refs/heads/master | 2021-06-16T13:39:19.149560 | 2021-03-11T20:29:24 | 2021-03-11T20:29:24 | 178,065,735 | 29 | 21 | MIT | 2021-03-11T20:29:25 | 2019-03-27T19:58:30 | null | UTF-8 | Python | false | false | 251 | py | import responder
# CORS wasn't demoed in the course, but is required to be used from
# external apps like movie exploder.
cors_params = {
'allow_origins': '*',
'allow_methods': '*',
}
api = responder.API(cors=True, cors_params=cors_params)
| [
"[email protected]"
]
| |
64876e9ed6c56a785bda85f43297a2f5c6c1aaa3 | 5f8baed3acceaf7b3127f8fbe0ed417070c0e809 | /DiSAN/src/utils/logger.py | 81b83bca491c2b056252a64abcb03365dde710a0 | [
"MIT"
]
| permissive | satwik77/Transformer-Computation-Analysis | ead241d848af51fefd85fe365a3ff87b9251bac5 | 82341f5f2f9cd0831e390f44b338165e45cd6413 | refs/heads/main | 2022-12-29T01:32:12.081865 | 2020-10-10T07:04:27 | 2020-10-10T07:04:27 | 301,588,833 | 5 | 3 | null | null | null | null | UTF-8 | Python | false | false | 3,007 | py | import logging
import pdb
import pandas as pd
# Ignore warnings
import warnings
warnings.filterwarnings("ignore")
import json
'''Logging Modules'''
#log_format='%(asctime)s | %(levelname)s | %(filename)s:%(lineno)s - %(funcName)5s() ] | %(message)s'
def get_logger(name, log_file_path='./logs/temp.log', logging_level=logging.INFO, log_format='%(asctime)s | %(levelname)s | %(filename)s: %(lineno)s : %(funcName)s() ::\t %(message)s'):
logger = logging.getLogger(name)
logger.setLevel(logging_level)
formatter = logging.Formatter(log_format)
file_handler = logging.FileHandler(log_file_path, mode='w')
file_handler.setLevel(logging_level)
file_handler.setFormatter(formatter)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging_level)
stream_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.addHandler(stream_handler)
# logger.addFilter(ContextFilter(expt_name))
return logger
def print_log(logger, dict):
string = ''
for key, value in dict.items():
string += '\n {}: {}\t'.format(key.replace('_', ' '), value)
# string = string.strip()
logger.info(string)
def store_results(config, bleu_score, error_score):
#pdb.set_trace()
try:
with open(config.result_path) as f:
res_data =json.load(f)
except:
res_data = {}
try:
train_loss = train_loss.item()
except:
pass
try:
val_loss = val_loss.item()
except:
pass
#try:
data= {'run_name' : str(config.run_name)
, 'best bleu score' : str(bleu_score)
, 'minimum error' : str(error_score)
, 'dataset' : config.dataset
, 'd_model' : config.d_model
, 'd_ff' : config.d_ff
, 'layers' : config.layers
, 'heads': config.heads
, 'dropout' : config.dropout
, 'lr' : config.lr
, 'batch_size' : config.batch_size
, 'epochs' : config.epochs
}
# res_data.update(data)
res_data[str(config.run_name)] = data
with open(config.result_path, 'w', encoding='utf-8') as f:
json.dump(res_data, f, ensure_ascii= False, indent= 4)
#except:
# pdb.set_trace()
def store_val_results(config, acc_score):
#pdb.set_trace()
try:
with open(config.val_result_path) as f:
res_data = json.load(f)
except:
res_data = {}
try:
data= {'run_name' : str(config.run_name)
, 'acc score': str(acc_score)
, 'dataset' : config.dataset
, 'emb1_size': config.emb1_size
, 'emb2_size': config.emb2_size
, 'cell_type' : config.cell_type
, 'hidden_size' : config.hidden_size
, 'depth' : config.depth
, 'dropout' : config.dropout
, 'init_range' : config.init_range
, 'bidirectional' : config.bidirectional
, 'lr' : config.lr
, 'batch_size' : config.batch_size
, 'opt' : config.opt
, 'use_word2vec' :config.use_word2vec
}
# res_data.update(data)
res_data[str(config.run_name)] = data
with open(config.val_result_path, 'w', encoding='utf-8') as f:
json.dump(res_data, f, ensure_ascii= False, indent= 4)
except:
pdb.set_trace() | [
"[email protected]"
]
| |
4344dd113c53ec44e77b7beb867a74a0a9abcdd1 | 773f6abee91e5368e43b34d8ad179c4ab9056da1 | /gen/referencegenome.py | 5733374a02094277fbde4887efd4b26c7b446068 | []
| no_license | richstoner/aibs | 3dc9489ee6a1db836d58ec736b13d35a7cffc215 | bfc7e732b53b4dff55f7c3edccdd0703f4bab25f | refs/heads/master | 2021-01-10T05:11:09.484238 | 2013-03-03T06:19:34 | 2013-03-03T06:19:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 508 | py | # -*- coding: utf-8 -*-
# Rich Stoner, 2013
class ReferenceGenome(object):
'''aibs.model.referencegenome (autogen)'''
# Fields
self.id = 0
self.name = ''
self.build = ''
self.organism_id = 0
# Associations
self.organism = None # belongs_to Organism
self.genome_locuses = [] # has_many GenomeLocus
def __init__(self, initialData={}):
for k,v in initData.iteritems():
setattr(self, k, v)
# add class methods and private methods here | [
"[email protected]"
]
| |
371d452b19b3f9165eca4b33766c6a00481cdab6 | 6ce8390b9d508d66da381c467852091116c2be33 | /.history/neutrinos_20210602142705.py | 013d2287e5d4170efcfeab0d5e8ebaf7530e108e | []
| no_license | MichaelCullen2011/QuantumAndNeutrinos | 0a269f44f35826f541b79b22d4b0f6927e0b10f8 | 9fd999e5e3b4248248b0e5803789d88015d5bd7d | refs/heads/master | 2023-05-07T20:43:14.766929 | 2021-06-02T16:27:34 | 2021-06-02T16:27:34 | 317,365,182 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,793 | py | import numpy as np
import matplotlib.pyplot as plt
'''
To Do - tau interactions
- clean up constant variables into dicts or lists
'''
'''
Variables and Constants
'''
# # Constants
# Mass difference squared between two flavour states
delta_m_sq = {'eu': 7.53e-5, 'tu': 2.44e-3, 'te': 2.44e-3, 'ue': 7.53e-5, 'ut': 2.44e-3, 'et': 2.44e-3} # eV**2
delta_m_e_mu_sq = 7.53e-5 # eV**2
delta_m_tau_mu_sq = 2.44e-3 # eV**2 # can be +ve or -ve
delta_m_tau_e_sq = 2.44e-3 # eV**2 # can be +ve or -ve
delta_m_mu_e_sq = delta_m_e_mu_sq # eV**2
delta_m_mu_tau_sq = delta_m_tau_mu_sq # can be +ve or -ve
delta_m_e_tau_sq = delta_m_tau_e_sq # eV**2
# Angle between flavour states
sin_q_theta = {'eu': 0.846, 'tu': 0.92, 'te': 0.093, 'ue': 0.846, 'ut': 0.92, 'et': 0.093}
sin_sq_theta_e_tau = 0.093
sin_sq_theta_e_mu = 0.846
sin_sq_theta_mu_tau = 0.92 # actually > 0.92 but it varies on atmospheric values
sin_sq_theta_mu_e = sin_sq_theta_e_mu
sin_sq_theta_tau_e = sin_sq_theta_e_tau
sin_sq_theta_tau_mu = sin_sq_theta_mu_tau
G_f = 1.1663787e-5 # GeV-2
sin_sq_theta_w = 0.22290 # Weinberg angle
m_e = 0.511 * 1e-3 # GeV c-2
m_u = 105 * 1e-3 # GeV c-2
sigma_naught = 1.72e-45 # m**2 / GeV
# # Variables
m_out = 10 # reactant mass out (electrons, muons etc)
m_in = 1000 # reactant mass in (electrons, muons etc)
E_v = 100 # Neutrino energy
L = 10 # distance oscillating (for oscillation)
E = E_v # Neutrino beam energy
d = 10 # distance travelled (for decoherence)
'''
Oscillation Probabilities - p_oscill = (np.sin(2 * theta))**2 * (np.sin((delta_m_sq * L) / (4 * E)))**2
'''
class Oscillations:
def __init__(self, distance):
self.prob_list = {
'eu': [], 'et': [],
'ue': [], 'ut': [],
'te': [], 'tu': [],
'ee': [], 'uu': [], # do these last as theyre calculated based on the previously calculated
'tt': [],
}
self.prob_reduced = {
'e': [],
'u': [],
't': []
}
self.prob_neut = []
self.max_points = 1000 # Number of points
E = 4 # GeV
self.L = distance # Km
self.x_range = np.linspace(0, self.L / E, self.max_points)
theta_range = np.linspace(0, 2 * np.pi, self.max_points)
def calculate(self):
n = 0
for x in self.x_range:
n += 1
print(f"Calculated Probability {n} of {self.max_points}")
for change in self.prob_list.keys():
if change not in ['ee', 'uu', 'tt']:
self.prob_list[change].append(Oscillations.prob(self, flavours=change, x=x))
else:
self.prob_list['ee'].append(1 - (self.prob_list['eu'][-1] + self.prob_list['et'][-1]))
self.prob_list['uu'].append(1 - (self.prob_list['ue'][-1] + self.prob_list['ut'][-1]))
self.prob_list['tt'].append(1 - (self.prob_list['te'][-1] + self.prob_list['tu'][-1]))
self.prob_reduced['e'] = [self.prob_list['ee'], self.prob_list['eu'], self.prob_list['et']]
self.prob_reduced['u'] = [self.prob_list['uu'], self.prob_list['ue'], self.prob_list['ut']]
self.prob_reduced['t'] = [self.prob_list['tt'], self.prob_list['te'], self.prob_list['tu']]
self.prob_neut = [self.prob_reduced['e'], self.prob_reduced['u'], self.prob_reduced['t']]
# fig, axs = plt.subplots(3)
# fig.suptitle('Neutrino Oscillations')
# n = 0
# for initial in prob_neut:
# if n == 0:
# for p in initial:
# axs[n].plot(self.x_range, p)
# axs[n].legend(['e to e', 'e to mu', 'e to tau'], loc=1)
# if n == 1:
# for p in initial:
# axs[n].plot(self.x_range, p)
# axs[n].legend(['mu to mu', 'mu to e', 'mu to tau'], loc=1)
# if n == 2:
# for p in initial:
# axs[n].plot(self.x_range, p)
# axs[n].legend(['tau to tau', 'tau to e', 'tau to mu'], loc=1)
# n += 1
def plot(self):
fig, axs = plt.subplots(3)
fig.suptitle('Neutrino Oscillations')
for interactions, values in self.prob_neut.items():
plt.plot(values[1], values[0], '-')
plt.legend(interactions)
# plt.legend(self.prob_neut.keys())
plt.show()
def prob(self, flavours, x):
if flavours == 'eu':
prob = sin_sq_theta_e_mu * np.square(np.sin(1.27 * delta_m_e_mu_sq * x / 4))
elif flavours == 'et':
prob = sin_sq_theta_e_tau * np.square(np.sin(1.27 * delta_m_e_tau_sq * x / 4))
elif flavours == 'ue':
prob = sin_sq_theta_e_mu * np.square(np.sin(1.27 * delta_m_mu_e_sq * x / 4))
elif flavours == 'ut':
prob = sin_sq_theta_mu_tau * np.square(np.sin(1.27 * delta_m_mu_tau_sq * x / 4))
elif flavours == 'te':
prob = sin_sq_theta_tau_e * np.square(np.sin(1.27 * delta_m_tau_e_sq * x / 4))
elif flavours == 'tu':
prob = sin_sq_theta_tau_mu * np.square(np.sin(1.27 * delta_m_tau_mu_sq * x / 4))
return prob
'''
Cross Sections
'''
class CrossSections:
def __init__(self, energy, lepton):
E_v = energy
sigma_naught = 1.72e-45
s_e = sigma_naught * np.pi / G_f**2
s_u = s_e * (m_u / m_e)
sigma_naught_e = (2 * m_e * G_f**2 * E_v) / np.pi
sigma_naught_u = (2 * m_u * G_f**2 * E_v) / np.pi
if lepton == 'e':
sigma_naught = sigma_naught_e
self.cs_without_ms = {'e_e': [], 'E_E': [], 'E_U': [], 'u_e': [], 'u_u': [], 'U_U': []}
self.cs_with_ms = {'e_e': [], 'E_E': [], 'E_U': [], 'u_e': [], 'u_u': [], 'U_U': []}
elif lepton == 'u':
sigma_naught = sigma_naught_u
self.cs_without_ms = {'e_e': [], 'E_E': [], 'U_E': [], 'e_u': [], 'u_u': [], 'U_U': []}
self.cs_with_ms = {'e_e': [], 'E_E': [], 'U_E': [], 'e_u': [], 'u_u': [], 'U_U': []}
for flavour in self.cs_without_ms.keys():
cs = CrossSections.neutrino_and_electron(self, flavour=flavour) * energy * sigma_naught
self.cs_without_ms[flavour].append(cs)
self.cs_with_ms[flavour].append(
cs * energy * sigma_naught *
CrossSections.mass_suppression(
self, flavour, energy=energy, reaction_lepton=lepton
)
)
def neutrino_and_electron(self, flavour):
if flavour == 'e_e':
cs = 0.25 + sin_sq_theta_w + (4 / 3) * np.square(sin_sq_theta_w)
elif flavour == 'E_E':
cs = (1 / 12) + 1 / 3 * (sin_sq_theta_w + 4 / 3 * np.square(sin_sq_theta_w))
elif flavour == 'E_U' or flavour == 'U_E':
cs = 1 / 3
elif flavour == 'u_e' or flavour == 'e_u':
cs = 1
elif flavour == 'u_u':
cs = 1 / 4 - sin_sq_theta_w + 4 / 3 * np.square(sin_sq_theta_w)
elif flavour == 'U_U':
cs = 1 / 12 - 1 / 3 * sin_sq_theta_w + 4 / 3 * np.square(sin_sq_theta_w)
return cs
def mass_suppression(self, flavour, energy, reaction_lepton='e'):
m_E = m_e
m_U = m_u
if reaction_lepton == 'e':
m_in = m_e
elif reaction_lepton == 'u':
m_in = m_u
if flavour == 'e_e':
zeta = 1 - ((m_e ** 2) / (m_in ** 2 + 2 * m_in * energy))
elif flavour == 'E_E':
zeta = 1 - ((m_e ** 2) / (m_in ** 2 + 2 * m_in * energy))
elif flavour == 'E_U':
zeta = 1 - ((m_u ** 2) / (m_in ** 2 + 2 * m_in * energy))
elif flavour == 'u_e':
zeta = 1 - ((m_u ** 2) / (m_in ** 2 + 2 * m_in * energy))
elif flavour == 'u_u':
zeta = 1 - ((m_e ** 2) / (m_in ** 2 + 2 * m_in * energy))
elif flavour == 'U_U':
zeta = 1 - ((m_e ** 2) / (m_in ** 2 + 2 * m_in * energy))
# neutrino-muon specific reactions
elif flavour == 'U_E':
zeta = 1 - ((m_e ** 2) / (m_in ** 2 + 2 * m_in * energy))
elif flavour == 'e_u':
zeta = 1 - ((m_e ** 2) / (m_in ** 2 + 2 * m_in * energy))
return zeta
'''
Wave Functions (for plotting)
'''
class WaveFunctions:
def __init__(self, accuracy):
phi = np.linspace(0, np.pi, accuracy)
theta = np.linspace(0, 2 * np.pi, 20)
prob_at_detector = {'e_e': [], 'e_mu': [], 'e_tau': [],
'mu_mu': [], 'mu_e': [], 'mu_tau': [],
'tau_tau': [], 'tau_e': [], 'tau_mu': []}
flavour_list = ['e_e', 'e_mu', 'e_tau', 'mu_mu', 'mu_e', 'mu_tau', 'tau_tau', 'tau_e', 'tau_mu']
detector_1 = {'e_e': [], 'e_mu': [], 'e_tau': [],
'mu_mu': [], 'mu_e': [], 'mu_tau': [],
'tau_tau': [], 'tau_e': [], 'tau_mu': []}
detector_2 = {'e_e': [], 'e_mu': [], 'e_tau': [],
'mu_mu': [], 'mu_e': [], 'mu_tau': [],
'tau_tau': [], 'tau_e': [], 'tau_mu': []}
for flavour_change in flavour_list:
for phi_value in phi:
prob_at_detector[flavour_change].append(WaveFunctions.prob(self, flavour=flavour_change, phi=phi_value))
for tuple in prob_at_detector[flavour_change]:
detector_1[flavour_change].append(tuple[0])
detector_2[flavour_change].append(tuple[0])
fig1 = plt.figure()
fig2 = plt.figure()
ax1 = fig1.add_subplot(111)
ax2 = fig2.add_subplot(111)
ax1.title.set_text('Detector 1 Probabilities')
ax2.title.set_text('Detector 2 Probabilities')
ax1.set_xticks(ticks=np.linspace(start=0, stop=2 * np.pi, num=int(accuracy)))
ax1.set_yticks(ticks=np.linspace(0, 1, num=5))
ax2.set_xticks(ticks=np.linspace(start=0, stop=2 * np.pi, num=int(accuracy)))
ax2.set_yticks(ticks=np.linspace(0, 1, num=5))
for prob_list in detector_1.values():
#print(prob_list)
ax1.plot(phi, prob_list, '--', linewidth=1)
for prob_list in detector_2.values():
#print(prob_list)
ax2.plot(phi, prob_list, '--', linewidth=1)
ax1.legend(flavour_list, loc=1)
ax2.legend(flavour_list, loc=1)
def prob(self, flavour, phi):
order_tau_d_same = 1
order_tau_d_change = 1 / 3
if flavour == 'e_e':
prob_f1 = 1
prob_f2 = 0
elif flavour == 'e_mu':
prob_f1 = 1 - (np.sqrt(sin_sq_theta_e_tau) / 2) * (1 - order_tau_d_change * np.cos(phi))
prob_f2 = (np.sqrt(sin_sq_theta_e_tau) / 2) * (1 - order_tau_d_change * np.cos(phi))
elif flavour == 'e_tau':
prob_f1 = 1 - (np.sqrt(sin_sq_theta_e_tau) / 2) * (1 - order_tau_d_change * np.cos(phi))
prob_f2 = (np.sqrt(sin_sq_theta_e_tau) / 2) * (1 - order_tau_d_change * np.cos(phi))
if flavour == 'mu_mu':
prob_f1 = 1
prob_f2 = 0
elif flavour == 'mu_e':
prob_f1 = 1 - (np.sqrt(sin_sq_theta_e_mu) / 2) * (1 - order_tau_d_change * np.cos(phi))
prob_f2 = (np.sqrt(sin_sq_theta_e_mu) / 2) * (1 - order_tau_d_change * np.cos(phi))
elif flavour == 'mu_tau':
prob_f1 = 1 - (np.sqrt(sin_sq_theta_mu_tau) / 2) * (1 - order_tau_d_change * np.cos(phi))
prob_f2 = (np.sqrt(sin_sq_theta_mu_tau) / 2) * (1 - order_tau_d_change * np.cos(phi))
if flavour == 'tau_tau':
prob_f1 = 1
prob_f2 = 0
elif flavour == 'tau_e':
prob_f1 = 1 - (np.sqrt(sin_sq_theta_tau_e) / 2) * (1 - order_tau_d_change * np.cos(phi))
prob_f2 = (np.sqrt(sin_sq_theta_tau_e) / 2) * (1 - order_tau_d_change * np.cos(phi))
elif flavour == 'tau_mu':
prob_f1 = 1 - (np.sqrt(sin_sq_theta_tau_mu) / 2) * (1 - order_tau_d_change * np.cos(phi))
prob_f2 = (np.sqrt(sin_sq_theta_tau_mu) / 2) * (1 - order_tau_d_change * np.cos(phi))
return prob_f1, prob_f2
'''
Gates
'''
class Gates:
def __init__(self, energy_list):
self.energy_list = energy_list
self.gate_energy_reactions = None
self.leptons = {'e': [], 'u': []} # currently only considering e and mu (no tau)
self.all_values = {}
def calculate(self):
for energy in self.energy_list:
for lepton in self.leptons.keys():
# print(f'\n Cross Sections for Reactions with {lepton} at {energy} GeV:')
self.leptons[lepton] = CrossSections(energy=energy, lepton=lepton) # Energy in GeV
gate_reactions = Gates.gate_cs(Gates.combine_cs(self))
if self.gate_energy_reactions is None: # checking if first time running this script
self.gate_energy_reactions = gate_reactions
else:
for flavour, value in gate_reactions.items():
self.gate_energy_reactions[flavour].append(value[0])
self.all_values[energy] = [values for values in gate_reactions.values()]
print(f"Average Prob for {energy} GeV: ", np.average(self.all_values[energy]))
if energy == self.energy_list[-1]:
print(f"Neutrino-Lepton Gate Probabilities for {energy}GeV: \n {gate_reactions}")
# plot
Gates.plot_energies(self)
plt.show()
def combine_cs(self):
all_cs = {'e_e': [], 'e_u': [], 'u_u': [], 'u_e': [], 'E_E': [], 'E_U': [], 'U_U': [], 'U_E': []}
e_class = self.leptons['e']
u_class = self.leptons['u']
for all_reaction in all_cs.keys():
for e_reaction, e_value in e_class.cs_without_ms.items():
if e_reaction == all_reaction:
all_cs[all_reaction].append(e_value[0])
for u_reaction, u_value in u_class.cs_without_ms.items():
if u_reaction == all_reaction:
all_cs[all_reaction].append(u_value[0])
for all_reaction, all_values in all_cs.items():
if len(all_values) > 1:
all_cs[all_reaction] = [(all_values[0] + all_values[1]) / 2]
return all_cs
@staticmethod
def gate_cs(all_cs):
# creates our final dict with the probabilities for all gate interactions
single_reactions = ['e_e', 'e_u', 'u_u', 'u_e']
single_reactions_anti = ['E_E', 'E_U', 'U_U', 'U_E']
whole_reaction = []
for first_reaction in single_reactions:
for second_reaction in single_reactions:
whole_reaction.append(first_reaction[0] + second_reaction[0] + '_' + first_reaction[2] + second_reaction[2])
for first_reaction in single_reactions_anti:
for second_reaction in single_reactions_anti:
whole_reaction.append(first_reaction[0] + second_reaction[0] + '_' + first_reaction[2] + second_reaction[2])
gate_reactions = {whole_keys: [] for whole_keys in whole_reaction}
# reaction 1 is reaction_combined[0] + '_' + reaction_combined[3]
# reaction 2 in reaction_combined[1] + '_' + reaction_combined[4]
for reaction_combined in gate_reactions.keys():
first_reaction = reaction_combined[0] + '_' + reaction_combined[3]
second_reaction = reaction_combined[1] + '_' + reaction_combined[4]
gate_reactions[reaction_combined] = [all_cs[first_reaction][0] * all_cs[second_reaction][0]]
# for key, value in gate_reactions.items():
# print(f"{key}: {value}")
return gate_reactions
def plot_energies(self):
for flavour, values in self.gate_energy_reactions.items():
self.gate_energy_reactions[flavour] = [values, list(self.energy_list)]
for flavour, values in self.gate_energy_reactions.items():
plt.plot(values[1], values[0], '-')
plt.title(f"Neutrino-Lepton Gate Cross-Sections for Energies: {int(min(self.energy_list))}GeV - {int(max(self.energy_list))}GeV")
plt.legend(self.gate_energy_reactions.keys())
'''
Running
'''
Oscillations(distance=1e6).calculate() # Prints oscillation probabilities for flavours given the distance it travels
# WaveFunctions(accuracy=20) # Currently Broken. Doesnt plot correctly # Accuracy is the number of points within the range
# Gates(energy_list=np.linspace(1, 100, 10)).calculate() # calculates and plots gate probabilities at various energies for different interactions
plt.show()
| [
"[email protected]"
]
| |
6dc7210e4f8cd00b9dae94dcc3d074d9cbffc1d3 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02686/s844095657.py | f1ab4403c5eb0d0f166d68bd4338ad075c24471f | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 767 | py | N = int(input())
S = [input() for _ in range(N)]
def solve() :
T = []
for s in S :
open = 0
close = 0
for c in s :
if c == ')' :
if open > 0 :
open -= 1
else :
close += 1
else :
open += 1
T.append((open, close))
if sum(op - cl for op, cl in T) != 0 :
return 'No'
inc = []
dec = []
for op, cl in T :
if op >= cl :
inc.append((cl, op))
else :
dec.append((op, cl))
inc.sort()
open = 0
for cl, op in inc :
if open >= cl :
open += op - cl
else :
return 'No'
close = 0
dec.sort()
for op, cl in dec :
if close >= op :
close += cl - op
else :
return 'No'
return 'Yes'
print(solve()) | [
"[email protected]"
]
| |
fd06fd94704d0b738825aa9fc484c78bdf8ee26e | 933f2a9f155b2a4f9746bf2020d1b828bfe49e81 | /python基础/day1/if 语句.py | fb1eb4aef52c801e5390c18364af092d427d9f15 | []
| no_license | WuAlin0327/python3-notes | d65ffb2b87c8bb23d481ced100d17cda97aef698 | 1d0d66900f6c4b667b3b84b1063f24ee7823e1bb | refs/heads/master | 2020-03-26T04:49:34.937700 | 2018-12-31T11:12:58 | 2018-12-31T11:12:58 | 144,524,404 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 204 | py | monny = int(input("你有多少钱:"))
if monny > 5000:
print("I want buy a macbook")
elif monny >=3000:
print("I want buy a iadp")
elif monny >= 2000:
print("buy a phone")
else:
print("no monny") | [
"[email protected]"
]
| |
b3d6b5d093ad2b6d91232e7018a503b54c028c46 | e1faf332197eef7c57d63d562b42d2cb227f0f1a | /zolo/base.py | ca4421ff76cf2e12a3e8bb966fcf15848765e781 | [
"MIT"
]
| permissive | firefirer1983/zolo | 29ae6cfa5568a28d2a0b53a51054edf784a03ce0 | 889409b491363eb54c2997e01333b77bc81e0c89 | refs/heads/main | 2023-07-15T04:52:56.039301 | 2021-08-31T14:17:48 | 2021-08-31T14:17:48 | 347,134,578 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,002 | py | from abc import ABC, abstractmethod
from datetime import datetime
from typing import List, Iterable, ItemsView, Union, Type, Callable, Dict
import logging
from zolo.consts import BLOCKING_ORDER_TIMEOUT
from .dtypes import Timer, Message
from .hub import evt_hub
from .utils import create_timer, create_filter
from .dtypes import Bar, OrderBook
from .adapters import Adapter
from .benchmarks import BenchmarkType
from .dtypes import Fill, Trade, Tick, Order, Credential, Qty, \
Position, Margin, InstrumentInfo, Lot
from .indicators import IndicatorType
from .posts import OrderPostType
log = logging.getLogger(__name__)
class Sink(ABC):
def on_tick(self, tick: Tick):
raise NotImplementedError
def on_bar(self, bar: Bar):
raise NotImplementedError
def on_fill(self, fill: Fill):
raise NotImplementedError
def on_trade(self, trade: Trade):
raise NotImplementedError
def on_book(self, book: OrderBook):
raise NotImplementedError
def on_order(self, order: Order):
raise NotImplementedError
def on_timer(self, ts: datetime):
raise NotImplementedError
class TickSink(Sink):
@abstractmethod
def on_tick(self, tick: Tick):
pass
class BarSink(Sink):
@abstractmethod
def on_bar(self, tick: Tick):
pass
class FillSink(Sink):
@abstractmethod
def on_fill(self, fill: Fill):
pass
class TradeSink(Sink):
@abstractmethod
def on_trade(self, trade: Trade):
pass
class OrderSink(Sink):
@abstractmethod
def on_order(self, order: Order):
pass
class TimerSink(Sink):
@abstractmethod
def on_timer(self, ts: datetime):
pass
class CommissionScheme(ABC):
@abstractmethod
def calc_commission(self, price: float, qty: float) -> float:
pass
class PnlScheme(ABC):
@abstractmethod
def calc_pnl(
self, avg_entry_price: float, price: float, qty: float
) -> float:
pass
class BrokerContext(ABC):
@abstractmethod
def refresh(self, ts: datetime):
pass
@abstractmethod
def post_order(
self, order: OrderPostType, timeout: float = 0, **kwargs
) -> Order:
pass
@abstractmethod
def get_pending_orders(self) -> List[str]:
pass
@abstractmethod
def get_order(self, client_oid: str) -> Order:
pass
@abstractmethod
def indicator(
self, ind: Union[str, IndicatorType], **kwargs
) -> IndicatorType:
pass
@abstractmethod
def register_on_bar(self, granularity: int, on_bar: Callable):
pass
@abstractmethod
def register_on_tick(self, on_tick: Callable):
pass
@abstractmethod
def register_on_book(self, on_book: Callable):
pass
@abstractmethod
def register_on_trade(self, api_key: str, on_trade: Callable):
pass
@abstractmethod
def benchmark(
self, bch: Union[str, Type[BenchmarkType]], api_key: str, **kwargs
) -> BenchmarkType:
pass
@property
@abstractmethod
def indicators(self) -> ItemsView[int, IndicatorType]:
pass
@property
@abstractmethod
def benchmarks(self) -> ItemsView[int, BenchmarkType]:
pass
@property
@abstractmethod
def instrument_info(self) -> InstrumentInfo:
pass
@property
@abstractmethod
def adapter(self) -> Adapter:
pass
@property
@abstractmethod
def credential(self) -> Credential:
pass
@property
@abstractmethod
def exchange(self) -> str:
pass
@property
@abstractmethod
def instrument_id(self) -> str:
pass
@property
@abstractmethod
def market(self) -> str:
pass
@property
@abstractmethod
def pos_side(self) -> str:
pass
@abstractmethod
def on_order(self, order: Order):
pass
@abstractmethod
def get_trade(self) -> Iterable[Trade]:
pass
# Backtest/Dryrun support only!
def deposit(self, amount: float):
raise NotImplementedError
class Broker(ABC):
@property
@abstractmethod
def adapter(self):
pass
@property
@abstractmethod
def context(self) -> "BrokerContext":
pass
@abstractmethod
def clone(
self, unique_id: str, instrument_id: str, pos_side: str = "",
credential: Credential = None, trade_registry_scheme: str = "",
) -> "Broker":
pass
@property
@abstractmethod
def max_optimal_depth(self) -> int:
pass
@abstractmethod
def get_all_instruments(self) -> Dict[str, InstrumentInfo]:
pass
@abstractmethod
def get_trade(self) -> Iterable[Trade]:
pass
@abstractmethod
def get_tick(self, instrument_id: str = "") -> Tick:
pass
@abstractmethod
def get_ticks(self, *instruments, pricing: str = "avg") -> List[Tick]:
pass
@abstractmethod
def get_available_balance(self, instrument_id: str = "") -> float:
pass
@abstractmethod
def info_instrument(self, instrument_id: str) -> InstrumentInfo:
pass
@abstractmethod
def transfer_asset_to_future_margin(
self, instrument_id: str, amount: float
) -> float:
pass
@abstractmethod
def transfer_asset_to_swap_margin(
self, instrument_id: str, amount: float
) -> float:
pass
@abstractmethod
def transfer_margin_to_asset(
self, instrument_id: str, amount: float
) -> float:
pass
@abstractmethod
def get_book(self, instrument_id: str, depth: int) -> OrderBook:
pass
@abstractmethod
def on_timer(self, ts: datetime):
pass
@abstractmethod
def on_order(self, order: Order):
pass
@abstractmethod
def on_fill(self, fill: Fill):
pass
@abstractmethod
def register_context(
self,
unique_id: str,
instrument_id: str,
pos_side: str,
trade_registry: str,
credential: Credential,
) -> "BrokerContext":
pass
@abstractmethod
def register_indicator(self, ind: IndicatorType):
pass
@abstractmethod
def register_benchmark(self, sink: BenchmarkType):
pass
@abstractmethod
def register_on_bar(self, granularity: int, on_bar: Callable):
pass
@abstractmethod
def register_on_tick(self, on_tick: Callable):
pass
@abstractmethod
def register_on_book(self, on_book: Callable):
pass
@abstractmethod
def register_on_trade(self, api_key: str, on_trade: Callable):
pass
@abstractmethod
def register_on_order(self, api_key: str, on_order: Callable):
pass
@abstractmethod
def register_on_fill(self, api_key: str, on_fill: Callable):
pass
@abstractmethod
def indicator(
self, ind: Union[str, IndicatorType], **kwargs
) -> IndicatorType:
pass
@abstractmethod
def benchmark(
self, bch: Union[str, BenchmarkType], api_key: str, **kwargs
) -> BenchmarkType:
pass
@abstractmethod
def list_active_indicators(self) -> ItemsView[int, IndicatorType]:
pass
@abstractmethod
def list_active_benchmarks(self) -> ItemsView[int, BenchmarkType]:
pass
@abstractmethod
def estimate_lot(self, size: float, price: float = 0) -> Lot:
pass
@abstractmethod
def buy_market(
self,
qty: Qty,
timeout: float = BLOCKING_ORDER_TIMEOUT,
slippage: float = 0,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@abstractmethod
def sell_market(
self,
qty: Qty,
timeout: float = BLOCKING_ORDER_TIMEOUT,
slippage: float = 0,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@abstractmethod
def buy_limit(
self,
price: float,
qty: Qty,
timeout: float
) -> Order:
pass
@abstractmethod
def sell_limit(
self,
price: float,
qty: Qty,
timeout: float
) -> str:
pass
@abstractmethod
def buy_limit_ioc(
self,
price: float,
qty: Qty,
timeout: float,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@abstractmethod
def sell_limit_ioc(
self,
price: float,
qty: Qty,
timeout: float,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@abstractmethod
def buy_limit_fok(
self,
price: float,
qty: Qty,
timeout: float,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@abstractmethod
def sell_limit_fok(
self,
price: float,
qty: Qty,
timeout: float,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@abstractmethod
def buy_opponent_ioc(
self,
qty: Qty,
timeout: float = BLOCKING_ORDER_TIMEOUT,
slippage: float = 0,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@abstractmethod
def sell_opponent_ioc(
self,
qty: Qty,
timeout: float = BLOCKING_ORDER_TIMEOUT,
slippage: float = 0,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@abstractmethod
def buy_opponent_fok(
self,
qty: Qty,
timeout: float = BLOCKING_ORDER_TIMEOUT,
slippage: float = 0,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@abstractmethod
def sell_opponent_fok(
self,
qty: Qty,
timeout: float = BLOCKING_ORDER_TIMEOUT,
slippage: float = 0,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@abstractmethod
def buy_optimal_ioc(
self,
depth: int,
qty: Qty,
timeout: float = BLOCKING_ORDER_TIMEOUT,
slippage: float = 0,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@abstractmethod
def sell_optimal_ioc(
self,
depth: int,
qty: Qty,
timeout: float = BLOCKING_ORDER_TIMEOUT,
slippage: float = 0,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@abstractmethod
def buy_optimal_fok(
self,
depth: int,
qty: Qty,
timeout: float = BLOCKING_ORDER_TIMEOUT,
slippage: float = 0,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@abstractmethod
def sell_optimal_fok(
self,
depth: int,
qty: Qty,
timeout: float = BLOCKING_ORDER_TIMEOUT,
slippage: float = 0,
step: Qty = 0,
period: float = 0,
) -> Order:
pass
@property
@abstractmethod
def exchange(self) -> str:
pass
@property
@abstractmethod
def market(self) -> str:
pass
def deposit(self, amount: float):
raise RuntimeError
@abstractmethod
def post_order(
self, order: OrderPostType, timeout: float = 0, **kwargs
) -> Order:
pass
@abstractmethod
def get_pending_orders(self) -> List[str]:
pass
@abstractmethod
def get_order_by_id(self, client_oid: str) -> Order:
pass
@abstractmethod
def cancel_order(self, client_oid: str):
pass
@abstractmethod
def cancel_all_orders(self):
pass
@abstractmethod
def get_position(self) -> Position:
pass
@abstractmethod
def get_margin(self) -> Margin:
pass
@abstractmethod
def set_leverage(self, lv: float):
pass
@abstractmethod
def get_leverage(self):
pass
class Strategy(ABC):
def __init__(self):
self._brokers: List[Broker] = list()
@abstractmethod
def on_start(self):
pass
@abstractmethod
def on_stop(self):
pass
def register_broker(self, *brokers: Broker):
for brk in brokers:
self._brokers.append(brk)
brk.register_on_order(brk.context.credential.api_key, brk.on_order)
brk.register_on_fill(brk.context.credential.api_key, brk.on_fill)
# broker需要不断刷新order状态.
evt_hub.attach_sink(Timer, create_timer(timeout=1), brk.on_timer)
@staticmethod
def register_timer(sink: Sink, timeout: int):
assert timeout
evt_hub.attach_sink(Timer, create_timer(timeout=timeout), sink.on_timer)
@staticmethod
def register_cmd(sink: Sink, cmd: str):
assert cmd
evt_hub.attach_sink(Message, create_filter(cmd=cmd), sink.on_message)
@property
def brokers(self) -> List[Broker]:
return self._brokers
| [
"[email protected]"
]
| |
cf37da3f5b81520ea9ba19cc258a0363291042d6 | 89bcfc45d70a3ca3f0f1878bebd71aa76d9dc5e2 | /scrapy_demo/sina_news/sina_news/middlewares.py | 819f2c24c10afaf9fbaced6ef0f1b0f49ec5c423 | []
| no_license | lichao20000/python_spider | dfa95311ab375804e0de4a31ad1e4cb29b60c45b | 81f3377ad6df57ca877463192387933c99d4aff0 | refs/heads/master | 2022-02-16T20:59:40.711810 | 2019-09-10T03:13:07 | 2019-09-10T03:13:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,601 | py | # -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class SinaNewsSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class SinaNewsDownloaderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| [
"[email protected]"
]
| |
e1ee11936044cee591fa34caea14fe7c48692724 | a990bd26d3a69d1ea6699c85efa2cea99452c3df | /pytriplets/pythagoreanTriplets.py | 9800357dc2720a809abc7bcffed191203f31baa3 | []
| no_license | abecus/DS-and-Algorithms | 5f1a948a085465ae165090ec957a9d5307ce729d | 3259e8183382265a27cf8c91e37d0086175a5703 | refs/heads/master | 2022-05-05T07:07:08.194243 | 2022-04-05T16:23:39 | 2022-04-05T16:23:39 | 193,111,610 | 11 | 6 | null | 2020-11-18T16:19:18 | 2019-06-21T14:27:25 | Python | UTF-8 | Python | false | false | 2,315 | py | from math import ceil, sqrt
def EratosthenesSieve(N:int)-> list:
'''
Calculating SPF (Smallest Prime Factor)
for every number till N.
Time Complexity : O(NloglogN)
'''
N+=1
# stores smallest prime factor for every number
spf = [*range(N)]
# separately marking spf for every even number as 2
for i in range(4, N, 2):
spf[i] = 2
for i in range(3, ceil(sqrt(N))):
# checking if i is prime
if (spf[i] == i):
# marking SPF for all numbers divisible by i
for j in range(i * i, N, i):
# marking spf[j] if it is not previously marked
if (spf[j] == j):
spf[j] = i
return spf
def getReducedFactorization(N:int, spf:list)-> int:
"""
counts repetition of each prime from prime factorisation of N
using trial method upon spf list, and calculating the ceil of
half of all prime's powers (pow(p, ceil(a/2))) and multiplying
them together.
"""
gamma = 1
while (N!=1):
# keep a prime in prev variable
prev=spf[N]
# for counting the power
c=0
# counts power of a prime
while spf[N]==prev:
c+=1
N//=spf[N]
# multiplies the half ceil of power on primes
gamma*=pow(prev, ceil(c/2))
prev=spf[N]
return gamma
def pythagoreanTriplets(n):
# calculate spf array
spf=EratosthenesSieve((n - int(sqrt((n<<1) -1)))<<1)
# keeps the triplet count
tripletCount=0
# loopinf for every values of 2*b
for b2 in range(4, (n - int(sqrt((n<<1) -1)))<<1, 2):
# calculates reduced factor of 2*b
gamma=getReducedFactorization(b2, spf)
# for findin all triplets from 2*b
for i in range(1, int(sqrt(b2*((b2>>1)-1)))//gamma+1):
i*=gamma
sqVal = i*i
q=sqVal//b2
# if z = q+i+(b2>>1) > n break else print triplet
if q+i+(b2>>1)>n:
break
else:
# remove comments in this else block to print Triplets
x=q+i
print((x, (b2>>1)+i, x+(b2>>1)))
# tripletCount+=1
return tripletCount
if __name__ == "__main__":
n=100
print(pythagoreanTriplets(n)) | [
"[email protected]"
]
| |
4ce7e9375fb540a78e89c6052c9ac31834889e7a | 90f2cbe1c940a20dcc893837b6033a51d3233931 | /python 进阶/面向对象5.py | e0cdc5256497aaf75db084c7e20d655c6faec438 | []
| no_license | MaxNcu/Learn_Python | 71501f38f6442f3ff2a1de1ff685b8975e50af20 | 5a1c6edf353ed7447b2ffd4126ad7668d8c5a407 | refs/heads/master | 2022-01-15T18:56:04.814476 | 2019-07-20T03:02:02 | 2019-07-20T03:02:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,178 | py | # -*- coding: utf-8 -*-
# @Time : 2018/5/3 0003 17:27
# @Author : Langzi
# @Blog : www.langzi.fun
# @File : 面向对象5.py
# @Software: PyCharm
import sys
import requests
reload(sys)
sys.setdefaultencoding('utf-8')
class gg:
url = 0
stat = 0
# 因为使用classmethod后会传入新的变量,所以一开始是需要自己先定义类变量
def __init__(self,url=0,stat=0):
# 这里按照正常的定义构造函数
self.url=url
self.stat=stat
@classmethod
# 装饰器,立马执行下面的函数
def split(cls,info):
# 这个函数接受两个参数,默认的cls就是这个类的init函数,info就是外面传入进来的
url,stat=map(str,info.split('-'))
# 这里转换成了格式化的结构
data = cls(url,stat)
# 然后执行这个类第一个方法,这个类构造函数需要传入两个参数,于是就传入了两个参数
return data
# 这里就直接返回了函数结果
def outer(self):
print self.url
print self.stat
r = gg.split(('langzi-200'))
r.outer()
# 这里是调用类方法,与调用实例方法一样
| [
"[email protected]"
]
| |
9d3d24b465ffb8dc9148555c52358627c3f4e05b | 3551f1150dee2772b1949a199250e4960a71989e | /focusgrouplogs/web.py | 0391b7ec78d6461332a0c6ec9d81af8e275f140c | [
"MIT"
]
| permissive | ccpgames/focusgrouplogs-frontend | 868f4398fb5e965f3a27f66bbba46086dc6906c6 | 42bd2bac04bbdc49d87ed9218f6b32a1d239c1ee | refs/heads/master | 2021-01-17T06:38:04.869762 | 2018-05-08T18:02:28 | 2018-05-08T18:02:28 | 50,437,131 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,456 | py | """Web routes for focusgrouplogs."""
import os
import sys
import traceback
from flask import render_template
from flask import Response
from focusgrouplogs import app
from focusgrouplogs import cache
from focusgrouplogs import FOCUS_GROUPS
from focusgrouplogs.datastore import all_content
from focusgrouplogs.datastore import log_content
from focusgrouplogs.datastore import log_metadata
@cache.cached(timeout=None, key_prefix="inline-css")
def get_style():
"""Reads and returns the inline css styling."""
style = os.path.join(os.path.dirname(__file__), "templates", "style.css")
with open(style, "r") as opencss:
return opencss.read().strip()
@app.route("/<regex('({})'):group>/<date>/".format("|".join(FOCUS_GROUPS)),
methods=["GET"])
@cache.memoize(timeout=60)
def group_get(group, date):
"""Displays the most recent day for a group (or specific)."""
if date is None:
group_logs = all_content(group)
else:
group_logs = [log_content(group, date)]
return render_template(
"logs.html",
focus_group=group,
log_days=group_logs,
css=get_style(),
)
@app.route("/", methods=["GET"])
@cache.cached(timeout=3600)
def main_index():
"""Displays links to the focus groups, fairly static."""
return render_template(
"index.html",
groups=[{"name": f, "logs": log_metadata(f)} for f in FOCUS_GROUPS],
css=get_style(),
)
@app.route("/ping", methods=["GET"])
def ping_response():
"""Return a static 200 OK response."""
return Response("ok", status=200)
def traceback_formatter(excpt, value, tback):
"""Catches all exceptions and re-formats the traceback raised."""
sys.stdout.write("".join(traceback.format_exception(excpt, value, tback)))
def hook_exceptions():
"""Hooks into the sys module to set our formatter."""
if hasattr(sys.stdout, "fileno"): # when testing, sys.stdout is StringIO
# reopen stdout in non buffered mode
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
# set the hook
sys.excepthook = traceback_formatter
def paste(*_, **settings):
"""For paste, start and return the Flask app."""
hook_exceptions()
return app
def main():
"""Debug/cmdline entry point."""
paste().run(
host="0.0.0.0",
port=8080,
debug=True,
use_reloader=False,
)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
39fd5781c172d7c39966c2f8e8ac762b9ae943b6 | 6b64338c3453d896310a381929fdf61cd846bbb7 | /biaobei-pretrain/tacotron/utils/symbols.py | f1e84a10e8d9e07c6bc1ba5b035ec7a4a17c205e | []
| no_license | Tubbz-alt/Taco_Collection | b0e9234ca8309300783b6a258adb0255d3119f93 | fb30bab5231c5c22ff03184f428aa43a0700d47d | refs/heads/master | 2022-02-28T21:41:15.275047 | 2019-09-23T14:45:54 | 2019-09-23T14:45:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 892 | py | '''
Defines the set of symbols used in text input to the model.
The default is a set of ASCII characters that works well for English or text that has been run
through Unidecode. For other data, you can modify _characters. See TRAINING_DATA.md for details.
'''
import os
import glob
AUTO_DETECT_SYMBOLS=True
train_text_files = glob.glob(os.path.join("../../female_golden_v2","*.corpus"))
if train_text_files and AUTO_DETECT_SYMBOLS:
_characters = set()
for file in train_text_files:
with open(file,"rb") as fin:
for line in fin:
line = line.decode().split("|")[1]
_characters = _characters.union(line)
else:
_characters = "12345abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ,。!? #*$%"
print(_characters)
_pad = "_"
_eos = "~"
symbols = [_pad,_eos]+list(_characters)
print("all symbols is {}".format(symbols))
| [
"[email protected]"
]
| |
165877fef4819cb2279ba767229c479c1f17b7e2 | a26c8bbd67c614354c72c3eade71981adea28eea | /src/main/resources/devops-as-code/add_ci_to_env.py | 9f51d350a015afba8071c5eea1e0e26b41597529 | [
"MIT"
]
| permissive | xebialabs-community/xld-ansible-step-plugin | 481e25350728f50cebcb5a15c64e8d388b2d16ed | c05eec5767214ed91f6e42819212bf0bc5d164b6 | refs/heads/master | 2021-06-16T01:56:12.030162 | 2021-02-08T12:19:36 | 2021-02-08T12:40:12 | 140,391,653 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,061 | py | #
# Copyright 2021 XEBIALABS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import com.xebialabs.deployit.plugin.api.reflect.Type as Type
def query_all_containers(ci_id, results):
# print("query {0}".format(ci_id))
result = repositoryService.query(Type.valueOf('udm.Container'), ci_id, None, '', None, None, 0, -1)
sub_result = []
for sub_ci in result:
results.append(sub_ci)
query_all_containers(sub_ci.id, sub_result)
results.extend(sub_result)
print("environment {0}".format(environment))
print("provisioned_host {0}".format(provisioned_host))
list_of_ci = []
query_all_containers(provisioned_host.id, list_of_ci)
members = environment.members
boundConfigurationItems = deployed.boundConfigurationItems
for ci in list_of_ci:
print("Found {0}".format(ci))
read_ci = repositoryService.read(ci.id)
members.add(read_ci)
boundConfigurationItems.add(read_ci)
environment.members = members
deployed.boundConfigurationItems = boundConfigurationItems
print(environment.members)
repositoryService.update([environment])
| [
"[email protected]"
]
| |
9d5c116670e57e518c30bc5967961c6a87ecc804 | 60a831fb3c92a9d2a2b52ff7f5a0f665d4692a24 | /IronPythonStubs/release/stubs.min/System/__init___parts/PlatformNotSupportedException.py | 01352985b0163c59e7d7ded23ed6bdf1af466c25 | [
"MIT"
]
| permissive | shnlmn/Rhino-Grasshopper-Scripts | a9411098c5d1bbc55feb782def565d535b27b709 | 0e43c3c1d09fb12cdbd86a3c4e2ba49982e0f823 | refs/heads/master | 2020-04-10T18:59:43.518140 | 2020-04-08T02:49:07 | 2020-04-08T02:49:07 | 161,219,695 | 11 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,210 | py | class PlatformNotSupportedException(NotSupportedException,ISerializable,_Exception):
"""
The exception that is thrown when a feature does not run on a particular platform.
PlatformNotSupportedException()
PlatformNotSupportedException(message: str)
PlatformNotSupportedException(message: str,inner: Exception)
"""
def add_SerializeObjectState(self,*args):
""" add_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def remove_SerializeObjectState(self,*args):
""" remove_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,message=None,inner=None):
"""
__new__(cls: type)
__new__(cls: type,message: str)
__new__(cls: type,message: str,inner: Exception)
__new__(cls: type,info: SerializationInfo,context: StreamingContext)
"""
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
| [
"[email protected]"
]
| |
4287da8a49bf1158ca40e7e1ea59381b3a4f26dd | 58afefdde86346760bea40690b1675c6639c8b84 | /leetcode/magical-string/286608581.py | a40ef5d904e5cc169a0d044c1fefbec7c63a3043 | []
| no_license | ausaki/data_structures_and_algorithms | aaa563f713cbab3c34a9465039d52b853f95548e | 4f5f5124534bd4423356a5f5572b8a39b7828d80 | refs/heads/master | 2021-06-21T10:44:44.549601 | 2021-04-06T11:30:21 | 2021-04-06T11:30:21 | 201,942,771 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 858 | py | # title: magical-string
# detail: https://leetcode.com/submissions/detail/286608581/
# datetime: Tue Dec 17 18:06:50 2019
# runtime: 108 ms
# memory: 27.3 MB
class Solution:
magical_string = [[1, 1], [2,1 ], [2, 1]]
index = 2
def magicalString(self, n: int) -> int:
i = self.index
magical_string = self.magical_string
while i < n:
j = magical_string[i][0]
k = 3 - magical_string[-1][0]
magical_string.append([k, 0])
if j == 2: magical_string.append([k, 0])
if magical_string[i][0] == 1:
magical_string[i][1] = magical_string[i - 1][1] + 1
else:
magical_string[i][1] = magical_string[i - 1][1]
i += 1
self.__class__.index = i
# print(magical_string)
return magical_string[n - 1][1] | [
"[email protected]"
]
| |
7c62641e69e5474f174e7f451b799644b70e2c93 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/intermitt.py | 85759ce65aff2cf6852ec5b6f5d92a234f43ab13 | []
| no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 295 | py | ii = [('ProuWCM.py', 1), ('LeakWTI3.py', 2), ('PeckJNG.py', 7), ('ClarGE2.py', 4), ('KiddJAE.py', 1), ('LyelCPG.py', 4), ('SoutRD2.py', 1), ('BackGNE.py', 1), ('LeakWTI4.py', 2), ('LeakWTI.py', 2), ('WheeJPT.py', 1), ('FitzRNS4.py', 1), ('AinsWRR2.py', 1), ('MereHHB2.py', 2), ('ClarGE3.py', 2)] | [
"[email protected]"
]
| |
7bafee4b562b3ea3a9bc7a61db78cfd37ada3ea3 | 7b74696ff2ab729396cba6c203984fce5cd0ff83 | /tradeaccounts/migrations/0049_auto_20200607_1333.py | 46a52b8fab8cd24ab327a01dd31af3e81a8c975e | [
"MIT"
]
| permissive | webclinic017/investtrack | e9e9a7a8caeecaceebcd79111c32b334c4e1c1d0 | 4aa204b608e99dfec3dd575e72b64a6002def3be | refs/heads/master | 2023-06-18T12:57:32.417414 | 2021-07-10T14:26:53 | 2021-07-10T14:26:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 519 | py | # Generated by Django 3.0.2 on 2020-06-07 05:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tradeaccounts', '0048_auto_20200607_1302'),
]
operations = [
migrations.AlterField(
model_name='tradeaccountsnapshot',
name='applied_period',
field=models.CharField(blank=True, choices=[('m', '月'), ('d', '日'), ('w', '周')], default='d', max_length=1, verbose_name='收益周期'),
),
]
| [
"[email protected]"
]
| |
f852fac9e6a96838dd00315f50b067ab55750dbd | 3147716595f28ebb81516ec15e4f454358967b1e | /blogit/models.py | 07e206423ef39238ea32b9a8c1632946afbe1bd0 | [
"BSD-3-Clause"
]
| permissive | selectnull/djangocms-blogit | 04bef2b4ab3ad4c70f4ac39a4d88bc7ad4aa796b | 599da6acd756c611d74219c7378e0a0771090bcd | refs/heads/master | 2021-01-17T17:22:31.414023 | 2019-04-18T10:58:41 | 2019-04-18T10:58:41 | 39,001,497 | 0 | 0 | null | 2015-07-13T08:52:59 | 2015-07-13T08:52:59 | null | UTF-8 | Python | false | false | 9,133 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from cms.models.fields import PlaceholderField
from cms.utils.i18n import get_current_language
from django.conf import settings
from django.db import models
from django.urls import reverse
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.translation import ugettext_lazy as _
from filer.fields.image import FilerImageField
from mptt.models import MPTTModel, TreeForeignKey
from parler.managers import TranslatableManager
from parler.models import TranslatableModel, TranslatedFields
from parler.utils.context import switch_language
from blogit import settings as bs
from blogit.managers import PostManager
from blogit.utils import get_text_from_placeholder
try:
from django.utils.encoding import force_unicode
except ImportError:
from django.utils.encoding import force_text as force_unicode
USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
@python_2_unicode_compatible
class Category(MPTTModel, TranslatableModel):
"""
Category
"""
active = models.BooleanField(_('Active'), default=True, help_text=bs.ACTIVE_FIELD_HELP_TEXT)
date_added = models.DateTimeField(_('Date added'), auto_now_add=True)
last_modified = models.DateTimeField(_('Last modified'), auto_now=True)
parent = TreeForeignKey(
'self',
models.SET_NULL,
blank=True,
null=True,
related_name='children',
verbose_name=_('Parent'),
)
translations = TranslatedFields(
name=models.CharField(_('Name'), max_length=255),
slug=models.SlugField(_('Slug'), db_index=True),
description=models.TextField(_('description'), blank=True),
meta={'unique_together': [('slug', 'language_code')]},
)
objects = TranslatableManager()
class Meta:
db_table = 'blogit_categories'
verbose_name = _('Category')
verbose_name_plural = _('Categories')
def __str__(self):
return self.safe_translation_getter('name', any_language=True)
def get_absolute_url(self, language=None):
if not language:
language = get_current_language()
with switch_language(self, language):
return reverse('blogit_category_detail', args=[self.get_path()])
def get_path(self):
"""
Returns ful url path for category.
"""
path = []
for obj in self.get_ancestors(include_self=True):
path.append(obj.safe_translation_getter('slug', ''))
return '/'.join(path)
@python_2_unicode_compatible
class Tag(TranslatableModel):
"""
Tag
"""
active = models.BooleanField(_('Active'), default=True, help_text=bs.ACTIVE_FIELD_HELP_TEXT)
date_added = models.DateTimeField(_('Date added'), auto_now_add=True)
last_modified = models.DateTimeField(_('Last modified'), auto_now=True)
translations = TranslatedFields(
name=models.CharField(_('Name'), max_length=255),
slug=models.SlugField(_('Slug'), db_index=True),
description=models.TextField(_('description'), blank=True),
meta={'unique_together': [('slug', 'language_code')]},
)
objects = TranslatableManager()
class Meta:
db_table = 'blogit_tags'
verbose_name = _('Tag')
verbose_name_plural = _('Tags')
def __str__(self):
return self.safe_translation_getter('name', any_language=True)
def get_absolute_url(self, language=None):
if not language:
language = get_current_language()
with switch_language(self, language):
return reverse('blogit_tag_detail', kwargs={'slug': self.safe_translation_getter('slug')})
@python_2_unicode_compatible
class Post(TranslatableModel):
"""
Post
"""
DRAFT = 0 # Post is visible to staff
PRIVATE = 1 # Post is visible to author only
PUBLIC = 2 # Post is public
HIDDEN = 3 # Post is hidden from everybody
STATUS_CODES = (
(DRAFT, _('Draft')),
(PRIVATE, _('Private')),
(PUBLIC, _('Public')),
(HIDDEN, _('Hidden')),
)
date_added = models.DateTimeField(_('Date added'), auto_now_add=True)
last_modified = models.DateTimeField(_('Last modified'), auto_now=True)
status = models.IntegerField(_('Status'), choices=STATUS_CODES, default=DRAFT, help_text=_(
'When draft post is visible to staff only, when private to author only, and when public to everyone.'))
date_published = models.DateTimeField(_('Published on'), default=timezone.now)
category = TreeForeignKey(Category, models.SET_NULL, blank=True, null=True, verbose_name=_('Category'))
tags = models.ManyToManyField(Tag, blank=True, related_name='tagged_posts', verbose_name=_('Tags'))
author = models.ForeignKey(USER_MODEL, models.SET_NULL, blank=True, null=True, verbose_name=_('Author'))
featured_image = FilerImageField(
on_delete=models.SET_NULL,
blank=True,
null=True,
verbose_name=_('Featured Image'),
)
translations = TranslatedFields(
title=models.CharField(_('Title'), max_length=255),
slug=models.SlugField(_('Slug'), db_index=True),
description=models.TextField(_('Description'), blank=True),
meta_title=models.CharField(_('Meta title'), max_length=255, blank=True),
meta_description=models.TextField(_('Meta description'), max_length=155, blank=True, help_text=_(
'The text displayed in search engines.')),
meta={'unique_together': [('slug', 'language_code')]},
)
body = PlaceholderField('blogit_post_body', related_name='post_body_set')
objects = PostManager()
class Meta:
db_table = 'blogit_posts'
verbose_name = _('Post')
verbose_name_plural = _('Posts')
ordering = ('-date_published', )
get_latest_by = 'date_published'
def __str__(self):
return self.name
def get_absolute_url(self, language=None):
if not language:
language = get_current_language()
with switch_language(self, language):
if bs.POST_DETAIL_DATE_URL:
return reverse('blogit_post_detail_date', kwargs={
'year': self.date_published.year,
'month': self.date_published.month,
'day': self.date_published.day,
'slug': self.safe_translation_getter('slug'),
})
return reverse('blogit_post_detail', kwargs={
'slug': self.safe_translation_getter('slug')})
def get_search_data(self, language=None, request=None):
"""
Returns search text data for current object
"""
if not self.pk:
return ''
bits = [self.name]
description = self.safe_translation_getter('description')
if description:
bits.append(force_unicode(strip_tags(description)))
if self.category:
bits.append(self.category.safe_translation_getter('name'))
description = self.category.safe_translation_getter('description')
if description:
bits.append(force_unicode(strip_tags(description)))
for tag in self.tags.all():
bits.append(tag.safe_translation_getter('name'))
description = tag.safe_translation_getter('description', '')
if description:
bits.append(force_unicode(strip_tags(description)))
bits.append(get_text_from_placeholder(self.body, language, request))
return ' '.join(bits).strip()
def get_meta_title(self):
return self.safe_translation_getter('meta_title') or self.name
def get_meta_description(self):
return self.safe_translation_getter('meta_description') or self.safe_translation_getter('description')
@property
def name(self):
return self.safe_translation_getter('title', any_language=True)
@property
def is_published(self):
return self.status == self.PUBLIC and self.date_published <= timezone.now()
@property
def previous_post(self):
return self.previous_next_posts[0]
@property
def next_post(self):
return self.previous_next_posts[1]
@property
def previous_next_posts(self):
previous_next = getattr(self, 'previous_next', None)
if previous_next is None:
if not self.is_published:
previous_next = (None, None)
setattr(self, 'previous_next', previous_next)
return previous_next
posts = list(Post.objects.public().published())
index = posts.index(self)
try:
previous = posts[index + 1]
except IndexError:
previous = None
if index:
next = posts[index - 1]
else:
next = None
previous_next = (previous, next)
setattr(self, 'previous_next', previous_next)
return previous_next
| [
"[email protected]"
]
| |
0c2a8e2225a636a1c778cc23813b081284f9b3c5 | dffc22f1e363172d91c72582f54edf088ca96ea8 | /lib/ffmpeg/vppbase.py | 4b49b69842ed9fdd3fbb8bca01a3d3fef7767102 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | FocusLuo/vaapi-fits-1 | 975d0eec398d91dcb904660b7cfda4c7b02fb01f | 52dbc2decf68cf89cb3ed7a11fa7b86e6903e829 | refs/heads/master | 2023-07-26T07:49:59.857414 | 2023-05-09T23:25:00 | 2023-05-11T06:43:06 | 170,789,770 | 0 | 0 | BSD-3-Clause | 2019-02-15T02:29:44 | 2019-02-15T02:29:44 | null | UTF-8 | Python | false | false | 3,902 | py | ###
### Copyright (C) 2022 Intel Corporation
###
### SPDX-License-Identifier: BSD-3-Clause
###
import slash
from ...lib.common import timefn, get_media, call, exe2os, filepath2os
from ...lib.ffmpeg.util import have_ffmpeg, BaseFormatMapper
from ...lib.mixin.vpp import VppMetricMixin
from ...lib import metrics2
@slash.requires(have_ffmpeg)
class BaseVppTest(slash.Test, BaseFormatMapper, VppMetricMixin):
def before(self):
self.refctx = []
self.post_validate = lambda: None
self.hwdevice = f"hw:{get_media().render_device}"
def get_input_formats(self):
return self.caps.get("ifmts", [])
def get_output_formats(self):
return self.caps.get("ofmts", [])
def gen_vpp_opts(self):
raise NotImplementedError
def gen_input_opts(self):
if self.vpp_op in ["deinterlace"]:
opts = "-c:v {ffdecoder}"
elif self.vpp_op in ["stack"]:
opts = ""
else:
opts = "-f rawvideo -pix_fmt {mformat} -s:v {width}x{height}"
opts += " -i {ossource}"
return opts
def gen_output_opts(self):
fcomplex = ["composite", "stack"]
vpfilter = self.gen_vpp_opts()
vpfilter.append("hwdownload")
vpfilter.append("format={ohwformat}")
opts = "-filter_complex" if self.vpp_op in fcomplex else "-vf"
opts += f" '{','.join(vpfilter)}'"
opts += " -pix_fmt {mformat}" if self.vpp_op not in ["csc"] else ""
opts += " -f rawvideo -fps_mode passthrough -an -vframes {frames} -y {osdecoded}"
return opts
@timefn("ffmpeg:vpp")
def call_ffmpeg(self, iopts, oopts):
if vars(self).get("decoded", None) is not None:
get_media()._purge_test_artifact(self.decoded)
self.decoded = get_media()._test_artifact2("yuv")
self.osdecoded = filepath2os(self.decoded)
iopts = iopts.format(**vars(self))
oopts = oopts.format(**vars(self))
call(
f"{exe2os('ffmpeg')} -hwaccel {self.hwaccel}"
f" -init_hw_device {self.hwaccel}={self.hwdevice}"
f" -hwaccel_output_format {self.hwaccel}"
f" -v verbose {iopts} {oopts}"
)
def validate_caps(self):
ifmts = self.get_input_formats()
ofmts = self.get_output_formats()
self.ifmt = self.format
self.ofmt = self.format if "csc" != self.vpp_op else self.csc
self.mformat = self.map_format(self.format)
if self.mformat is None:
slash.skip_test(f"ffmpeg.{self.format} unsupported")
if self.vpp_op in ["csc"]:
self.ihwformat = self.map_format(self.ifmt if self.ifmt in ifmts else None)
self.ohwformat = self.map_format(self.ofmt if self.ofmt in ofmts else None)
else:
self.ihwformat = self.map_best_hw_format(self.ifmt, ifmts)
self.ohwformat = self.map_best_hw_format(self.ofmt, ofmts)
if self.ihwformat is None:
slash.skip_test(f"{self.ifmt} unsupported")
if self.ohwformat is None:
slash.skip_test(f"{self.ofmt} unsupported")
if self.vpp_op in ["composite"]:
self.owidth, self.oheight = self.width, self.height
for comp in self.comps:
self.owidth = max(self.owidth, self.width + comp['x'])
self.oheight = max(self.oheight, self.height + comp['y'])
self.post_validate()
def vpp(self):
self.validate_caps()
iopts = self.gen_input_opts()
oopts = self.gen_output_opts()
self.ossource = filepath2os(self.source)
self.call_ffmpeg(iopts, oopts)
if vars(self).get("r2r", None) is not None:
assert type(self.r2r) is int and self.r2r > 1, "invalid r2r value"
metric = metrics2.factory.create(metric = dict(type = "md5", numbytes = -1))
metric.update(filetest = self.decoded)
metric.expect = metric.actual # the first run is our reference for r2r
metric.check()
for i in range(1, self.r2r):
self.call_ffmpeg(iopts, oopts)
metric.update(filetest = self.decoded)
metric.check()
else:
self.check_metrics()
| [
"[email protected]"
]
| |
3d27b8603e7399f16a976fd41a6dda3461f31a61 | a1cbe24cb8646e7af91a64d1fbfce4a4d7adce99 | /teesta/config/desktop.py | 0b643613f46c5abe037d298f362e72a3fa2b58c6 | [
"MIT"
]
| permissive | mbauskar/teesta | a4d32f8ec941be42f08d832ad922f092bf77b2b8 | 004bacefec97759e8abf525a58da2f4b17fb9448 | refs/heads/master | 2021-01-24T01:28:06.474339 | 2016-06-22T10:01:29 | 2016-06-22T10:01:29 | 61,936,561 | 1 | 0 | null | 2016-06-25T09:37:11 | 2016-06-25T09:37:11 | null | UTF-8 | Python | false | false | 261 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"module_name": "Teesta",
"color": "grey",
"icon": "octicon octicon-file-directory",
"type": "module",
"label": _("Teesta")
}
]
| [
"[email protected]"
]
| |
730fd4d9ec2574b3ee5ec4a4b6f7490e1b36834c | e089f2598400d4115f9f1a91c48c7eef40e6d251 | /vgg16_2.py | 423a087f0d1ad03d149d1deda559efc8d94e0215 | []
| no_license | cwarny/flower-teller | c261ef10077f0b65d96bdb0e28a3e013ef32cef5 | 36b4350e7257ac11d3e89cb594f047963c65b3ac | refs/heads/master | 2021-01-21T15:49:39.393488 | 2017-06-26T02:13:47 | 2017-06-26T02:13:47 | 95,398,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,280 | py | import json
import csv
import numpy as np
from numpy.random import random, permutation
from scipy import misc, ndimage
from scipy.ndimage.interpolation import zoom
from matplotlib import pyplot as plt
from PIL import Image
from sklearn.preprocessing import OneHotEncoder
import keras
from keras import backend as K
from keras.utils.data_utils import get_file
from keras.models import Sequential, Model
from keras.layers.core import Flatten, Dense, Dropout, Lambda
from keras.layers import Input
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.optimizers import SGD, RMSprop, Adam
from keras.preprocessing import image
def ConvBlock(layers, model, filters):
for i in range(layers):
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(filters, 3, 3, activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
def FCBlock(model):
model.add(Dense(4096, activation='relu'))
model.add(Dropout(0.5))
vgg_mean = np.array([123.68, 116.779, 103.939]).reshape((3,1,1))
def vgg_preprocess(x):
x = x - vgg_mean # subtract mean
return x[:, ::-1] # reverse axis bgr->rgb
def VGG_16():
model = Sequential()
model.add(Lambda(vgg_preprocess, input_shape=(3,224,224)))
ConvBlock(2, model, 64)
ConvBlock(2, model, 128)
ConvBlock(3, model, 256)
ConvBlock(3, model, 512)
ConvBlock(3, model, 512)
model.add(Flatten())
FCBlock(model)
FCBlock(model)
model.add(Dense(1000, activation='softmax'))
return model
model = VGG_16()
fpath = get_file('vgg16.h5', 'vgg16.h5', cache_subdir='models') # See: https://gist.github.com/baraldilorenzo/07d7802847aaad0a35d3
model.load_weights(fpath)
def get_batches(dirname, gen=image.ImageDataGenerator(), shuffle=True, batch_size=4, class_mode='categorical', target_size=(224,224)):
return gen.flow_from_directory(dirname, target_size=target_size, class_mode=class_mode, shuffle=shuffle, batch_size=batch_size)
val_batches = get_batches('n11669921/sample/valid', shuffle=False, batch_size=64)
batches = get_batches('n11669921/sample/train', shuffle=False, batch_size=64)
def onehot(x):
return np.array(OneHotEncoder().fit_transform(x.reshape(-1,1)).todense())
val_classes = val_batches.classes
trn_classes = batches.classes
val_labels = onehot(val_classes)
trn_labels = onehot(trn_classes)
# Fine-tuning
model.pop()
for layer in model.layers:
layer.trainable = False
model.add(Dense(121, activation='softmax'))
def fit_model(model, batches, val_batches, nb_epoch=1):
model.fit_generator(batches, samples_per_epoch=batches.N, nb_epoch=nb_epoch, validation_data=val_batches, nb_val_samples=val_batches.N)
opt = RMSprop(lr=0.1)
model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy'])
fit_model(model, batches, val_batches, nb_epoch=2)
preds = model.predict_classes(val_data, batch_size=64)
probs = model.predict_proba(val_data, batch_size=64)[:,0]
layers = model.layers
# Get the index of the first dense layer
first_dense_idx = [index for index,layer in enumerate(layers) if type(layer) is Dense][0]
# Set this and all subsequent layers to trainable
for layer in layers[first_dense_idx:]:
layer.trainable = True
K.set_value(opt.lr, 0.0001)
fit_model(model, batches, val_batches, 3)
model.save_weights('models/finetune2.h5')
| [
"[email protected]"
]
| |
456f8a27016afc6e1e5da8a314af1625002e861a | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/224/users/4352/codes/1649_2445.py | c823fb8e8c5b6c16933c41968349e882bb866230 | []
| no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | escala = (input("digite a escala: (C/F)"))
valor = float(input("digite a temperatura: "))
formula1 = 5/9 * (valor - 32)
formula2 = 9*valor/5 + 32
if escala == "F":
print(round(formula1, 2))
else:
print(round(formula2, 2))
| [
"[email protected]"
]
| |
ad4a165dfc6950d2c638cdd134f1deeef8352d52 | cde2f83809b89ae0b01a7b30b9caae83183d09a0 | /correlation.py | 9825fc5f79db4b3d456117cd1171a2f4a5337ad5 | []
| no_license | airbaggie/judgmental_eye | 264fa8f4aaec452f4acbf34dbb99d070408c2c22 | 67cb86e450133d253c74b07950a846bf2bb4f06f | refs/heads/master | 2020-06-08T21:32:54.271881 | 2019-06-24T04:12:58 | 2019-06-24T04:12:58 | 193,310,214 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 753 | py | from math import sqrt
def pearson(pairs):
"""Return Pearson correlation for pairs.
Using a set of pairwise ratings, produces a Pearson similarity rating.
"""
series_1 = [float(pair[0]) for pair in pairs]
series_2 = [float(pair[1]) for pair in pairs]
sum_1 = sum(series_1)
sum_2 = sum(series_2)
squares_1 = sum([n * n for n in series_1])
squares_2 = sum([n * n for n in series_2])
product_sum = sum([n * m for n, m in pairs])
size = len(pairs)
numerator = product_sum - ((sum_1 * sum_2) / size)
denominator = sqrt(
(squares_1 - (sum_1 * sum_1) / size) *
(squares_2 - (sum_2 * sum_2) / size)
)
if denominator == 0:
return 0
return numerator / denominator
| [
"[email protected]"
]
| |
7dc433e6a444ecf139658897f4b616a313f2c5ee | 050fc5ca698dfd7612dee42aa980fc7b5eee40a2 | /tests/plugin/data/sw_kafka/test_kafka.py | 30f9f02021b73bbc4f9a958cb224e19b9557fff7 | [
"Apache-2.0"
]
| permissive | apache/skywalking-python | 8ac6ce06630c519f9984a45e74c1fcc88cf5b9d6 | 1a360228c63cd246dd4c5dd8e1f09bdd5556ad7d | refs/heads/master | 2023-09-05T02:45:56.225937 | 2023-08-28T22:19:24 | 2023-08-28T22:19:24 | 261,456,329 | 178 | 122 | Apache-2.0 | 2023-08-28T22:19:26 | 2020-05-05T12:13:49 | Python | UTF-8 | Python | false | false | 1,352 | py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Callable
import pytest
import requests
from skywalking.plugins.sw_kafka import support_matrix
from tests.orchestrator import get_test_vector
from tests.plugin.base import TestPluginBase
@pytest.fixture
def prepare():
# type: () -> Callable
return lambda *_: requests.get('http://0.0.0.0:9090/users', timeout=5)
class TestPlugin(TestPluginBase):
@pytest.mark.parametrize('version', get_test_vector(lib_name='kafka-python', support_matrix=support_matrix))
def test_plugin(self, docker_compose, version):
self.validate()
| [
"[email protected]"
]
| |
695e6a3453693a6839a8274d69b5d35e909f9015 | a87f87e71d971bc8d6c205400052a47f8d957e5d | /psuedo_train.py | b0fd34acf099b173fe37b8a78a74967baf39f36e | []
| no_license | bcaitech1/p1-img-MaiHon | 045eef675a92bb9b26532ff930f919fe3c6e6919 | d17a925e301349e167327c5eab9d3b65f06d61a3 | refs/heads/master | 2023-04-08T10:22:25.803079 | 2021-04-19T12:22:12 | 2021-04-19T12:22:12 | 359,440,859 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,604 | py | import os
import torch
import random
import argparse
import numpy as np
import pandas as pd
import albumentations as A
from tqdm import tqdm
from src.models import *
from src.configs.config import InferConfig
from src.dataset import PseudoDataset
import torch.nn.functional as F
from torch.utils.data import DataLoader
def seed_everything(seed=2021):
random.seed(seed)
np.random.seed(seed)
os.environ["PYTHONHASHSEED"] = str(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
import imgaug
imgaug.random.seed(seed)
def main():
parser = argparse.ArgumentParser(description='Arguments')
parser.add_argument('--seed', default=43, type=int, help='Reproduction Seed')
parser.add_argument('--batch_size', default=16, type=int)
parser.add_argument('--postfix', required=True)
parser.add_argument('--model_type', required=True)
parser.add_argument('--tta', default=0, type=int)
args = parser.parse_args()
seed_everything(args.seed)
cfg = InferConfig(args)
tta_infer = True if args.tta == 1 else False
if tta_infer:
print("TTA Inference")
tta_tfms = [
# A.CLAHE(clip_limit=2.0, p=1.0), --> 넣어도 같은 결과나옴
A.HorizontalFlip(p=1.0),
]
else:
tta_tfms = None
if tta_infer:
infer_ds = PseudoDataset(cfg, tta_tfms)
else:
infer_ds = PseudoDataset(cfg)
infer_dl = DataLoader(
infer_ds,
batch_size=args.batch_size,
shuffle=False,
num_workers=3,
pin_memory=True
)
models = []
for i in range(len(cfg.ckpts)):
model = Net(cfg)
model = model.to(cfg.device)
save_dict = torch.load(cfg.ckpts[i])
print(f"Epoch: {save_dict['epoch']}")
print(f"Loss : {save_dict['loss']}")
state_dict = save_dict["state_dict"]
model.load_state_dict(state_dict)
models.append(model)
print(f"Total {len(models)} models loaded.")
if tta_infer:
pred_paths = []
predictions = []
with torch.no_grad():
for sample in tqdm(infer_dl, total=len(infer_dl)):
images = sample['image']
paths = np.array(sample['path'])
pred = 0
for image in images:
for model in models:
model.eval()
pred = model(image.to(cfg.device))
pred += F.log_softmax(pred, dim=-1)
_, pred = torch.max(pred / (len(models)), -1)
predictions.extend(pred.detach().cpu().numpy())
pred_paths.extend(paths)
else:
pred_paths = []
predictions = []
with torch.no_grad():
for sample in tqdm(infer_dl, total=len(infer_dl)):
images = sample['image'].to(cfg.device)
paths = np.array(sample['path'])
pred = 0
for model in models:
model.eval()
pred = model(images)
pred += F.log_softmax(pred, dim=-1)
_, pred = torch.max(pred / (len(models)), -1)
predictions.extend(pred.detach().cpu().numpy())
pred_paths.extend(paths)
pseudo = pd.DataFrame(data={
'image': pred_paths,
'label': predictions
})
pseudo.to_csv(cfg.submission_dir, index=False)
print("Inference Done.")
if __name__ == "__main__":
main() | [
"[email protected]"
]
| |
f44312a56f753dec7e321a13f2d402666c08d473 | 779af874adf1647461981b0c36530cf9924f5f01 | /python3/dist-packages/plainbox/impl/exporter/text.py | c8889b30876cfff0a422e3a3d37debfa5f7bf396 | []
| no_license | hitsuyo/Library_Python_3.5 | 8974b5de04cb7780b0a1a75da5cb5478873f08e7 | 374e3f9443e4d5cae862fd9d81db8b61030ae172 | refs/heads/master | 2022-11-05T23:46:47.188553 | 2018-01-04T19:29:05 | 2018-01-04T19:29:05 | 116,093,537 | 1 | 2 | null | 2022-10-26T03:07:06 | 2018-01-03T05:02:20 | Python | UTF-8 | Python | false | false | 3,001 | py | # This file is part of Checkbox.
#
# Copyright 2012 Canonical Ltd.
# Written by:
# Zygmunt Krynicki <[email protected]>
#
# Checkbox is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3,
# as published by the Free Software Foundation.
#
# Checkbox is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Checkbox. If not, see <http://www.gnu.org/licenses/>.
"""
:mod:`plainbox.impl.exporter.text` -- plain text exporter
=========================================================
.. warning::
THIS MODULE DOES NOT HAVE STABLE PUBLIC API
"""
from plainbox.i18n import gettext as _
from plainbox.impl.color import Colorizer
from plainbox.impl.exporter import SessionStateExporterBase
from plainbox.impl.result import outcome_meta
class TextSessionStateExporter(SessionStateExporterBase):
"""Human-readable session state exporter."""
def __init__(self, option_list=None, color=None, exporter_unit=None):
super().__init__(option_list, exporter_unit=exporter_unit)
self.C = Colorizer(color)
def get_session_data_subset(self, session_manager):
return session_manager.state
def dump(self, session, stream):
for job in session.run_list:
state = session.job_state_map[job.id]
if state.result.is_hollow:
continue
if self.C.is_enabled:
stream.write(
" {}: {}\n".format(
self.C.custom(
outcome_meta(state.result.outcome).unicode_sigil,
outcome_meta(state.result.outcome).color_ansi
), state.job.tr_summary(),
).encode("UTF-8"))
if len(state.result_history) > 1:
stream.write(_(" history: {0}\n").format(
', '.join(
self.C.custom(
result.outcome_meta().tr_outcome,
result.outcome_meta().color_ansi)
for result in state.result_history)
).encode("UTF-8"))
else:
stream.write(
"{:^15}: {}\n".format(
state.result.tr_outcome(),
state.job.tr_summary(),
).encode("UTF-8"))
if state.result_history:
print(_("History:"), ', '.join(
self.C.custom(
result.outcome_meta().unicode_sigil,
result.outcome_meta().color_ansi)
for result in state.result_history))
| [
"[email protected]"
]
| |
3d248b9822e566b434bc50291ba5c73e7f9d7aa3 | 564d6a4d305a8ac6a7e01c761831fb2081c02d0f | /sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2018_06_01/aio/operations/_virtual_machine_run_commands_operations.py | ae68dd96f544a951679e1e4b41833dc0b708fe85 | [
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
]
| permissive | paultaiton/azure-sdk-for-python | 69af4d889bac8012b38f5b7e8108707be679b472 | d435a1a25fd6097454b7fdfbbdefd53e05029160 | refs/heads/master | 2023-01-30T16:15:10.647335 | 2020-11-14T01:09:50 | 2020-11-14T01:09:50 | 283,343,691 | 0 | 0 | MIT | 2020-07-28T22:43:43 | 2020-07-28T22:43:43 | null | UTF-8 | Python | false | false | 7,966 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualMachineRunCommandsOperations:
"""VirtualMachineRunCommandsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2018_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
location: str,
**kwargs
) -> AsyncIterable["models.RunCommandListResult"]:
"""Lists all available run commands for a subscription in a location.
:param location: The location upon which run commands is queried.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RunCommandListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2018_06_01.models.RunCommandListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.RunCommandListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('RunCommandListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/runCommands'} # type: ignore
async def get(
self,
location: str,
command_id: str,
**kwargs
) -> "models.RunCommandDocument":
"""Gets specific run command for a subscription in a location.
:param location: The location upon which run commands is queried.
:type location: str
:param command_id: The command id.
:type command_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RunCommandDocument, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2018_06_01.models.RunCommandDocument
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.RunCommandDocument"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
'commandId': self._serialize.url("command_id", command_id, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RunCommandDocument', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/runCommands/{commandId}'} # type: ignore
| [
"[email protected]"
]
| |
3a6ccd4f7a0edffa6f93e9687c076417d0a1b0d7 | 7b5828edda7751700ca7002b40a214e39e5f48a8 | /EA/simulation/__hooks__.py | 85e30235b7cb2d5c0d2088433d93d3f9f0f6c835 | []
| no_license | daniela-venuta/Sims-4-Python-Script-Workspace | 54c33dac02f84daed66f46b7307f222fede0fa62 | f408b28fb34626b2e3b2953152343d591a328d66 | refs/heads/main | 2023-03-29T18:08:39.202803 | 2021-03-30T19:00:42 | 2021-03-30T19:00:42 | 353,111,243 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 479 | py | RELOADER_ENABLED = False
__enable_gc_callback = True
import gc
try:
import _profile
except:
__enable_gc_callback = False
def system_init(gameplay):
import sims4.importer
sims4.importer.enable()
print('Server Startup')
if __enable_gc_callback:
gc.callbacks.append(_profile.notify_gc_function)
def system_shutdown():
global RELOADER_ENABLED
import sims4.importer
sims4.importer.disable()
RELOADER_ENABLED = False
| [
"[email protected]"
]
| |
19effaf2fd28cbfbcc5bf1197122f93d208d746b | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-res/huaweicloudsdkres/v1/model/show_res_datasource_request.py | 174e9c77405184d91a01ce3c43989fd6fba03d2b | [
"Apache-2.0"
]
| permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 4,956 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ShowResDatasourceRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'content_type': 'str',
'workspace_id': 'str',
'datasource_id': 'str'
}
attribute_map = {
'content_type': 'Content-Type',
'workspace_id': 'workspace_id',
'datasource_id': 'datasource_id'
}
def __init__(self, content_type=None, workspace_id=None, datasource_id=None):
"""ShowResDatasourceRequest
The model defined in huaweicloud sdk
:param content_type: 内容类型,取值为application/json。
:type content_type: str
:param workspace_id: 工作空间id。
:type workspace_id: str
:param datasource_id: 数据源id。
:type datasource_id: str
"""
self._content_type = None
self._workspace_id = None
self._datasource_id = None
self.discriminator = None
self.content_type = content_type
self.workspace_id = workspace_id
self.datasource_id = datasource_id
@property
def content_type(self):
"""Gets the content_type of this ShowResDatasourceRequest.
内容类型,取值为application/json。
:return: The content_type of this ShowResDatasourceRequest.
:rtype: str
"""
return self._content_type
@content_type.setter
def content_type(self, content_type):
"""Sets the content_type of this ShowResDatasourceRequest.
内容类型,取值为application/json。
:param content_type: The content_type of this ShowResDatasourceRequest.
:type content_type: str
"""
self._content_type = content_type
@property
def workspace_id(self):
"""Gets the workspace_id of this ShowResDatasourceRequest.
工作空间id。
:return: The workspace_id of this ShowResDatasourceRequest.
:rtype: str
"""
return self._workspace_id
@workspace_id.setter
def workspace_id(self, workspace_id):
"""Sets the workspace_id of this ShowResDatasourceRequest.
工作空间id。
:param workspace_id: The workspace_id of this ShowResDatasourceRequest.
:type workspace_id: str
"""
self._workspace_id = workspace_id
@property
def datasource_id(self):
"""Gets the datasource_id of this ShowResDatasourceRequest.
数据源id。
:return: The datasource_id of this ShowResDatasourceRequest.
:rtype: str
"""
return self._datasource_id
@datasource_id.setter
def datasource_id(self, datasource_id):
"""Sets the datasource_id of this ShowResDatasourceRequest.
数据源id。
:param datasource_id: The datasource_id of this ShowResDatasourceRequest.
:type datasource_id: str
"""
self._datasource_id = datasource_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ShowResDatasourceRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
f83d582584b1f3898d024e48b6fb7fe03a1db33f | fdab0c18eab28477d0980723c5ac5b4ba10c506f | /shelf/__init__.py | 7a691c178c4a1e6817a33c9fb6cc581431763af4 | [
"MIT"
]
| permissive | MIT-Informatics/PreservationSimulation | 58b53595841c39e1fe00a05241be43ed0bcf6430 | 38c6641a25108022ce8f225a352f566ad007b0f3 | refs/heads/master | 2021-08-25T10:35:46.066554 | 2021-08-24T20:17:13 | 2021-08-24T20:17:13 | 17,369,426 | 9 | 0 | NOASSERTION | 2021-03-20T02:55:37 | 2014-03-03T15:03:30 | R | UTF-8 | Python | false | false | 30 | py | # Nothing to say here, yet.
| [
"[email protected]"
]
| |
c810b83e4d978275269dbf2edf81ba3749d40a39 | 9edaf93c833ba90ae9a903aa3c44c407a7e55198 | /travelport/models/general_remark_3.py | 37e4e5426aafa26b117cdb81d48dcfa92904cffc | []
| no_license | tefra/xsdata-samples | c50aab4828b8c7c4448dbdab9c67d1ebc519e292 | ef027fe02e6a075d8ed676c86a80e9647d944571 | refs/heads/main | 2023-08-14T10:31:12.152696 | 2023-07-25T18:01:22 | 2023-07-25T18:01:22 | 222,543,692 | 6 | 1 | null | 2023-06-25T07:21:04 | 2019-11-18T21:00:37 | Python | UTF-8 | Python | false | false | 4,367 | py | from __future__ import annotations
from dataclasses import dataclass, field
from xsdata.models.datatype import XmlDate, XmlDateTime
from travelport.models.type_direction_3 import TypeDirection3
from travelport.models.type_element_status_4 import TypeElementStatus4
from travelport.models.type_product_3 import TypeProduct3
__NAMESPACE__ = "http://www.travelport.com/schema/common_v33_0"
@dataclass
class GeneralRemark3:
"""A textual remark container to hold any printable text.
(max 512 chars)
Parameters
----------
remark_data
Actual remarks data.
booking_traveler_ref
Reference to Booking Traveler.
key
category
A category to group and organize the various remarks. This is not
required, but it is recommended.
type_in_gds
supplier_type
The type of product this reservation is relative to
provider_reservation_info_ref
Provider reservation reference key.
provider_code
supplier_code
direction
Direction Incoming or Outgoing of the GeneralRemark.
create_date
The date and time that this GeneralRemark was created.
use_provider_native_mode
Will be true when terminal process required, else false
el_stat
This attribute is used to show the action results of an element.
Possible values are "A" (when elements have been added to the UR)
and "M" (when existing elements have been modified). Response only.
key_override
If a duplicate key is found where we are adding elements in some
cases like URAdd, then instead of erroring out set this attribute to
true.
"""
class Meta:
name = "GeneralRemark"
namespace = "http://www.travelport.com/schema/common_v33_0"
remark_data: None | str = field(
default=None,
metadata={
"name": "RemarkData",
"type": "Element",
"required": True,
}
)
booking_traveler_ref: list[str] = field(
default_factory=list,
metadata={
"name": "BookingTravelerRef",
"type": "Element",
"max_occurs": 999,
}
)
key: None | str = field(
default=None,
metadata={
"name": "Key",
"type": "Attribute",
}
)
category: None | str = field(
default=None,
metadata={
"name": "Category",
"type": "Attribute",
"max_length": 10,
}
)
type_in_gds: None | str = field(
default=None,
metadata={
"name": "TypeInGds",
"type": "Attribute",
"max_length": 30,
}
)
supplier_type: None | TypeProduct3 = field(
default=None,
metadata={
"name": "SupplierType",
"type": "Attribute",
}
)
provider_reservation_info_ref: None | str = field(
default=None,
metadata={
"name": "ProviderReservationInfoRef",
"type": "Attribute",
}
)
provider_code: None | str = field(
default=None,
metadata={
"name": "ProviderCode",
"type": "Attribute",
"min_length": 2,
"max_length": 5,
}
)
supplier_code: None | str = field(
default=None,
metadata={
"name": "SupplierCode",
"type": "Attribute",
"min_length": 2,
"max_length": 5,
}
)
direction: None | TypeDirection3 = field(
default=None,
metadata={
"name": "Direction",
"type": "Attribute",
}
)
create_date: None | XmlDateTime = field(
default=None,
metadata={
"name": "CreateDate",
"type": "Attribute",
}
)
use_provider_native_mode: bool = field(
default=False,
metadata={
"name": "UseProviderNativeMode",
"type": "Attribute",
}
)
el_stat: None | TypeElementStatus4 = field(
default=None,
metadata={
"name": "ElStat",
"type": "Attribute",
}
)
key_override: None | bool = field(
default=None,
metadata={
"name": "KeyOverride",
"type": "Attribute",
}
)
| [
"[email protected]"
]
| |
30acd6fabbb86e2029fe9bdb373bcb1912239b99 | 7b4820948845f55274b211d676ab8a6253a6298b | /addons/plugin.video.phstreams/default.py | 165d263833b1730268ad874343597e83e0a9e838 | []
| no_license | bopopescu/mw | 524c57d4b859751e298b907a12e44e9711ef72a6 | 5ef2acea0fb4150578e53201463c6bc5da37be20 | refs/heads/master | 2021-05-30T19:33:11.750160 | 2016-01-11T05:28:46 | 2016-01-11T05:28:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,159 | py | # -*- coding: utf-8 -*-
'''
Phoenix Add-on
Copyright (C) 2015 Blazetamer
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urlparse,sys
params = dict(urlparse.parse_qsl(sys.argv[2].replace('?','')))
try:
action = params['action']
except:
action = None
try:
name = params['name']
except:
name = '0'
try:
url = params['url']
except:
url = '0'
try:
playable = params['playable']
except:
playable = '0'
try:
content = params['content']
except:
content = '0'
try:
tvshow = params['tvshow']
except:
tvshow = '0'
try:
audio = params['audio']
except:
audio = '0'
try:
image = params['image']
except:
image = '0'
try:
fanart = params['fanart']
except:
fanart = '0'
if action == None:
from resources.lib.indexers import phstreams
phstreams.getCategory()
elif action == 'dmode' or action == 'ndmode':
from resources.lib.indexers import phstreams
phstreams.getDirectory(name, url, audio, image, fanart, playable, content)
elif action == 'subDirectory':
from resources.lib.indexers import phstreams
phstreams.subDirectory(name, url, audio, image, fanart, playable, tvshow, content)
elif action == 'localDirectory':
from resources.lib.indexers import phstreams
phstreams.localDirectory()
elif action == 'search':
from resources.lib.indexers import phstreams
phstreams.getSearch()
elif action == 'searchDirectory':
from resources.lib.indexers import phstreams
phstreams.searchDirectory()
elif action == 'searchDirectory2':
from resources.lib.indexers import phstreams
phstreams.searchDirectory(url)
elif action == 'clearSearch':
from resources.lib.indexers import phstreams
phstreams.clearSearch()
elif action == 'resolveUrl':
from resources.lib.indexers import phstreams
phstreams.resolveUrl(name, url, audio, image, fanart, playable, content)
elif action == 'openDialog':
from resources.lib.libraries import phdialogs
phdialogs.openDialog(url,audio)
elif action == 'openSettings':
from resources.lib.libraries import control
control.openSettings()
elif action == 'addView':
from resources.lib.libraries import views
views.addView(content)
elif action == 'downloader':
from resources.lib.libraries import downloader
downloader.downloader()
elif action == 'addDownload':
from resources.lib.libraries import downloader
downloader.addDownload(name,url,image)
elif action == 'removeDownload':
from resources.lib.libraries import downloader
downloader.removeDownload(url)
elif action == 'startDownload':
from resources.lib.libraries import downloader
downloader.startDownload()
elif action == 'startDownloadThread':
from resources.lib.libraries import downloader
downloader.startDownloadThread()
elif action == 'stopDownload':
from resources.lib.libraries import downloader
downloader.stopDownload()
elif action == 'statusDownload':
from resources.lib.libraries import downloader
downloader.statusDownload()
elif action == 'trailer':
from resources.lib.libraries import trailer
trailer.trailer().play(name)
elif action == 'clearCache':
from resources.lib.libraries import cache
cache.clear()
elif action == 'radioDirectory':
from resources.lib.indexers import phradios
phradios.radioDirectory()
elif action == 'radioResolve':
from resources.lib.indexers import phradios
phradios.radioResolve(name, url, image)
elif action == 'radio1fm':
from resources.lib.indexers import phradios
phradios.radio1fm(image, fanart)
elif action == 'radio181fm':
from resources.lib.indexers import phradios
phradios.radio181fm(image, fanart)
elif action == 'radiotunes':
from resources.lib.indexers import phradios
phradios.radiotunes(image, fanart)
elif action == 'Kickinradio':
from resources.lib.indexers import phradios
phradios.Kickinradio(image, fanart)
elif action == 'Kickinradiocats':
from resources.lib.indexers import phradios
phradios.Kickinradiocats(url, image, fanart)
elif action == 'CartoonDirectory':
from resources.lib.indexers import phtoons
phtoons.CartoonDirectory()
elif action == 'CartoonCrazy':
from resources.lib.indexers import phtoons
phtoons.CartoonCrazy(image, fanart)
elif action == 'CCsearch':
from resources.lib.indexers import phtoons
phtoons.CCsearch(url, image, fanart)
elif action == 'CCcat':
from resources.lib.indexers import phtoons
phtoons.CCcat(url, image, fanart)
elif action == 'CCpart':
from resources.lib.indexers import phtoons
phtoons.CCpart(url, image, fanart)
elif action == 'CCstream':
from resources.lib.indexers import phtoons
phtoons.CCstream(url)
elif action == 'nhlDirectory':
from resources.lib.indexers import nhlcom
nhlcom.nhlDirectory()
elif action == 'nhlScoreboard':
from resources.lib.indexers import nhlcom
nhlcom.nhlScoreboard()
elif action == 'nhlArchives':
from resources.lib.indexers import nhlcom
nhlcom.nhlArchives()
elif action == 'nhlStreams':
from resources.lib.indexers import nhlcom
nhlcom.nhlStreams(name,url)
elif action == 'nhlResolve':
from resources.lib.indexers import nhlcom
nhlcom.nhlResolve(url)
| [
"[email protected]"
]
| |
6da402554a5677cc3feb6dd00f350a495e2d3355 | 2dc9ee4a8c39d00c255f52e8af2486e7c2891a98 | /sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/aio/operations/_private_endpoint_connections_operations.py | 742624e8eb6a11c2dcc7c8daf69f8f34d2441314 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
]
| permissive | JoshuaLai/azure-sdk-for-python | fd780c2ab145a35ec0bf9519c4d08c928081e79c | 07614796a332bcfeed35dddee9dbfc2f5487a39f | refs/heads/master | 2023-04-04T17:49:58.177790 | 2021-04-06T21:31:48 | 2021-04-06T21:31:48 | 348,842,434 | 0 | 0 | MIT | 2021-03-17T20:24:55 | 2021-03-17T20:24:54 | null | UTF-8 | Python | false | false | 23,467 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PrivateEndpointConnectionsOperations:
"""PrivateEndpointConnectionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.synapse.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
**kwargs
) -> "_models.PrivateEndpointConnection":
"""Gets a private endpoint connection.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection, or the result of cls(response)
:rtype: ~azure.mgmt.synapse.models.PrivateEndpointConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
async def _create_initial(
self,
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
request: "_models.PrivateEndpointConnection",
**kwargs
) -> "_models.PrivateEndpointConnection":
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(request, 'PrivateEndpointConnection')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
async def begin_create(
self,
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
request: "_models.PrivateEndpointConnection",
**kwargs
) -> AsyncLROPoller["_models.PrivateEndpointConnection"]:
"""Approve or reject a private endpoint connection.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection.
:type private_endpoint_connection_name: str
:param request: Request body of private endpoint connection to create.
:type request: ~azure.mgmt.synapse.models.PrivateEndpointConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.synapse.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
request=request,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
**kwargs
) -> Optional["_models.OperationResource"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 202:
deserialized = self._deserialize('OperationResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
workspace_name: str,
private_endpoint_connection_name: str,
**kwargs
) -> AsyncLROPoller["_models.OperationResource"]:
"""Delete a private endpoint connection.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationResource"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('OperationResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
def list(
self,
resource_group_name: str,
workspace_name: str,
**kwargs
) -> AsyncIterable["_models.PrivateEndpointConnectionList"]:
"""Lists private endpoint connection in workspace.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PrivateEndpointConnectionList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.synapse.models.PrivateEndpointConnectionList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PrivateEndpointConnectionList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/privateEndpointConnections'} # type: ignore
| [
"[email protected]"
]
| |
59b02c1565c66a501284ae2b8e71274d82d42d8e | ce6cb09c21470d1981f1b459293d353407c8392e | /lib/jnpr/healthbot/swagger/models/command_rpc.py | 95cb417debd31ead46f6fe1b6170628878d617e1 | [
"Apache-2.0"
]
| permissive | minefuto/healthbot-py-client | c4be4c9c3153ef64b37e5344bf84154e93e7b521 | bb81452c974456af44299aebf32a73abeda8a943 | refs/heads/master | 2022-12-04T07:47:04.722993 | 2020-05-13T14:04:07 | 2020-05-13T14:04:07 | 290,145,286 | 0 | 0 | Apache-2.0 | 2020-08-25T07:27:54 | 2020-08-25T07:27:53 | null | UTF-8 | Python | false | false | 8,149 | py | # coding: utf-8
"""
Healthbot APIs
API interface for Healthbot application # noqa: E501
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class CommandRpc(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'args': 'dict(str, str)',
'filename': 'str',
'host': 'str',
'password': 'str',
'tablename': 'str',
'target': 'str',
'username': 'str'
}
attribute_map = {
'args': 'args',
'filename': 'filename',
'host': 'host',
'password': 'password',
'tablename': 'tablename',
'target': 'target',
'username': 'username'
}
def __init__(self, args=None, filename=None, host=None, password=None, tablename=None, target=None, username=None): # noqa: E501
"""CommandRpc - a model defined in Swagger""" # noqa: E501
self._args = None
self._filename = None
self._host = None
self._password = None
self._tablename = None
self._target = None
self._username = None
self.discriminator = None
if args is not None:
self.args = args
self.filename = filename
self.host = host
self.password = password
self.tablename = tablename
if target is not None:
self.target = target
self.username = username
@property
def args(self):
"""Gets the args of this CommandRpc. # noqa: E501
Optional key/value pair arguments to table # noqa: E501
:return: The args of this CommandRpc. # noqa: E501
:rtype: dict(str, str)
"""
return self._args
@args.setter
def args(self, args):
"""Sets the args of this CommandRpc.
Optional key/value pair arguments to table # noqa: E501
:param args: The args of this CommandRpc. # noqa: E501
:type: dict(str, str)
"""
self._args = args
@property
def filename(self):
"""Gets the filename of this CommandRpc. # noqa: E501
Command-rpc table filename in which the table is defined # noqa: E501
:return: The filename of this CommandRpc. # noqa: E501
:rtype: str
"""
return self._filename
@filename.setter
def filename(self, filename):
"""Sets the filename of this CommandRpc.
Command-rpc table filename in which the table is defined # noqa: E501
:param filename: The filename of this CommandRpc. # noqa: E501
:type: str
"""
if filename is None:
raise ValueError("Invalid value for `filename`, must not be `None`") # noqa: E501
self._filename = filename
@property
def host(self):
"""Gets the host of this CommandRpc. # noqa: E501
Host name or ip-address of the device in which command will be inspected # noqa: E501
:return: The host of this CommandRpc. # noqa: E501
:rtype: str
"""
return self._host
@host.setter
def host(self, host):
"""Sets the host of this CommandRpc.
Host name or ip-address of the device in which command will be inspected # noqa: E501
:param host: The host of this CommandRpc. # noqa: E501
:type: str
"""
if host is None:
raise ValueError("Invalid value for `host`, must not be `None`") # noqa: E501
self._host = host
@property
def password(self):
"""Gets the password of this CommandRpc. # noqa: E501
Password to connect to device # noqa: E501
:return: The password of this CommandRpc. # noqa: E501
:rtype: str
"""
return self._password
@password.setter
def password(self, password):
"""Sets the password of this CommandRpc.
Password to connect to device # noqa: E501
:param password: The password of this CommandRpc. # noqa: E501
:type: str
"""
if password is None:
raise ValueError("Invalid value for `password`, must not be `None`") # noqa: E501
self._password = password
@property
def tablename(self):
"""Gets the tablename of this CommandRpc. # noqa: E501
Command-rpc table name # noqa: E501
:return: The tablename of this CommandRpc. # noqa: E501
:rtype: str
"""
return self._tablename
@tablename.setter
def tablename(self, tablename):
"""Sets the tablename of this CommandRpc.
Command-rpc table name # noqa: E501
:param tablename: The tablename of this CommandRpc. # noqa: E501
:type: str
"""
if tablename is None:
raise ValueError("Invalid value for `tablename`, must not be `None`") # noqa: E501
self._tablename = tablename
@property
def target(self):
"""Gets the target of this CommandRpc. # noqa: E501
To run command on FPC, specifiy FPC target # noqa: E501
:return: The target of this CommandRpc. # noqa: E501
:rtype: str
"""
return self._target
@target.setter
def target(self, target):
"""Sets the target of this CommandRpc.
To run command on FPC, specifiy FPC target # noqa: E501
:param target: The target of this CommandRpc. # noqa: E501
:type: str
"""
self._target = target
@property
def username(self):
"""Gets the username of this CommandRpc. # noqa: E501
Username to connect to device # noqa: E501
:return: The username of this CommandRpc. # noqa: E501
:rtype: str
"""
return self._username
@username.setter
def username(self, username):
"""Sets the username of this CommandRpc.
Username to connect to device # noqa: E501
:param username: The username of this CommandRpc. # noqa: E501
:type: str
"""
if username is None:
raise ValueError("Invalid value for `username`, must not be `None`") # noqa: E501
self._username = username
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(CommandRpc, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CommandRpc):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
6a1ca19e78e7528364bc0b93d19a51ccb649f761 | f4be3422b28dda8802ea75368d665d17b634b83f | /babo/__init__.py | 5d6700be06951195e3f22bed513d5b4a0e966b6e | []
| no_license | carpedm20/babo | 0bab8abee49058fb4c0c6ab629f174d8a85d50a7 | 1fae47214918b4a64fc305787fb6a4df07c49768 | refs/heads/master | 2020-04-15T16:12:10.274756 | 2014-08-15T12:00:33 | 2014-08-15T12:00:33 | 22,943,235 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | # -*- coding: utf-8 -*-
"""
babo
~~~~
The world will have a generation of idiots.. by Albert Einstein
:copyright: (c) 2014 by Taehoon Kim.
:license: BSD, see LICENSE for more details.
"""
__copyright__ = 'Copyright 2014 by Taehoon Kim'
__version__ = '0.0.1'
__license__ = 'BSD'
__author__ = 'Taehoon Kim'
__author_email__ = '[email protected]'
__url__ = 'http://github.com/carpedm20/babo'
__all__ = [
]
| [
"[email protected]"
]
| |
a1250d373944bf65cff70e384219809151ab23bf | fadf50987ab3aaefc993f00187d8a833457e9e97 | /torchstat/model_hook.py | 3ff8d3a0ebcaae409b34f6e8da4cdb375d8cf88d | [
"MIT"
]
| permissive | Hulalazz/torchstat | 4cff14e2b272246d9fd7136b969eaab6165abfeb | b533d917ba8f2e0871a60c3ff73704e294b769eb | refs/heads/master | 2020-04-04T08:59:07.626893 | 2018-11-01T09:21:35 | 2018-11-01T09:21:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,980 | py | import time
from collections import OrderedDict
import numpy as np
import torch
import torch.nn as nn
from torchstat import compute_madd
from torchstat import compute_flops
class ModelHook(object):
def __init__(self, model, input_size):
assert isinstance(model, nn.Module)
assert isinstance(input_size, (list, tuple))
self._model = model
self._input_size = input_size
self._origin_call = dict() # sub module call hook
self._hook_model()
x = torch.rand(1, *self._input_size) # add module duration time
self._model.eval()
self._model(x)
@staticmethod
def _register_buffer(module):
assert isinstance(module, nn.Module)
if len(list(module.children())) > 0:
return
module.register_buffer('input_shape', torch.zeros(3).int())
module.register_buffer('output_shape', torch.zeros(3).int())
module.register_buffer('parameter_quantity', torch.zeros(1).int())
module.register_buffer('inference_memory', torch.zeros(1).long())
module.register_buffer('MAdd', torch.zeros(1).long())
module.register_buffer('duration', torch.zeros(1).float())
module.register_buffer('Flops', torch.zeros(1).long())
def _sub_module_call_hook(self):
def wrap_call(module, *input, **kwargs):
assert module.__class__ in self._origin_call
start = time.time()
output = self._origin_call[module.__class__](module, *input, **kwargs)
end = time.time()
module.duration = torch.from_numpy(
np.array([end - start], dtype=np.float32))
module.input_shape = torch.from_numpy(
np.array(input[0].size()[1:], dtype=np.int32))
module.output_shape = torch.from_numpy(
np.array(output.size()[1:], dtype=np.int32))
parameter_quantity = 0
# iterate through parameters and count num params
for name, p in module._parameters.items():
parameter_quantity += (0 if p is None else torch.numel(p.data))
module.parameter_quantity = torch.from_numpy(
np.array([parameter_quantity], dtype=np.long))
inference_memory = 1
for s in output.size()[1:]:
inference_memory *= s
# memory += parameters_number # exclude parameter memory
inference_memory = inference_memory * 4 / (1024 ** 2) # shown as MB unit
module.inference_memory = torch.from_numpy(
np.array([inference_memory], dtype=np.float32))
if len(input) == 1:
madd = compute_madd(module, input[0], output)
flops = compute_flops(module, input[0], output)
elif len(input) > 1:
madd = compute_madd(module, input, output)
flops = compute_flops(module, input, output)
else: # error
madd = 0
flops = 0
module.MAdd = torch.from_numpy(
np.array([madd], dtype=np.int64))
module.Flops = torch.from_numpy(
np.array([flops], dtype=np.int64))
return output
for module in self._model.modules():
if len(list(module.children())) == 0 and module.__class__ not in self._origin_call:
self._origin_call[module.__class__] = module.__class__.__call__
module.__class__.__call__ = wrap_call
def _hook_model(self):
self._model.apply(self._register_buffer)
self._sub_module_call_hook()
@staticmethod
def _retrieve_leaf_modules(model):
leaf_modules = []
for name, m in model.named_modules():
if len(list(m.children())) == 0:
leaf_modules.append((name, m))
return leaf_modules
def retrieve_leaf_modules(self):
return OrderedDict(self._retrieve_leaf_modules(self._model))
| [
"[email protected]"
]
| |
e037a3f03bb035f9294a2db24cabd7bccc5d1501 | 96dcea595e7c16cec07b3f649afd65f3660a0bad | /homeassistant/components/demo/time.py | 56ab715a7f7ed7eb6c4c24c99973845bd00f6ad4 | [
"Apache-2.0"
]
| permissive | home-assistant/core | 3455eac2e9d925c92d30178643b1aaccf3a6484f | 80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743 | refs/heads/dev | 2023-08-31T15:41:06.299469 | 2023-08-31T14:50:53 | 2023-08-31T14:50:53 | 12,888,993 | 35,501 | 20,617 | Apache-2.0 | 2023-09-14T21:50:15 | 2013-09-17T07:29:48 | Python | UTF-8 | Python | false | false | 1,515 | py | """Demo platform that offers a fake time entity."""
from __future__ import annotations
from datetime import time
from homeassistant.components.time import TimeEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import DOMAIN
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the demo time platform."""
async_add_entities([DemoTime("time", "Time", time(12, 0, 0), "mdi:clock", False)])
class DemoTime(TimeEntity):
"""Representation of a Demo time entity."""
_attr_has_entity_name = True
_attr_name = None
_attr_should_poll = False
def __init__(
self,
unique_id: str,
device_name: str,
state: time,
icon: str,
assumed_state: bool,
) -> None:
"""Initialize the Demo time entity."""
self._attr_assumed_state = assumed_state
self._attr_icon = icon
self._attr_native_value = state
self._attr_unique_id = unique_id
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, unique_id)}, name=device_name
)
async def async_set_value(self, value: time) -> None:
"""Update the time."""
self._attr_native_value = value
self.async_write_ha_state()
| [
"[email protected]"
]
| |
be55d9627d221ef15a7208f8625d6dac784efb54 | 64ae307de1a6966ec948662df695cd09cefd5eff | /Day_6/mm_project/mm_project/tests/conftest.py | b924ef165574f9638d0da245dcbadd78736a65fc | [
"BSD-3-Clause"
]
| permissive | janash/mm_project_sss2019 | 3a4d61c4f4dbe7eee5af401d831e7483480bb509 | 84f9da3efe335a7024213ddae6fd56113d4fda09 | refs/heads/master | 2020-05-19T23:03:53.143517 | 2019-07-26T23:03:08 | 2019-07-26T23:03:08 | 185,258,555 | 0 | 2 | BSD-3-Clause | 2019-07-09T17:33:19 | 2019-05-06T19:26:20 | Python | UTF-8 | Python | false | false | 682 | py | """
Fixtures for monte carlo tests
"""
# Import package, test suite, and other packages as needed
import mm_project as mc
import numpy as np
import os
import pytest
import sys
@pytest.fixture
def nist_file():
current_directory = os.path.dirname(os.path.abspath(__file__))
nist_file = os.path.join(current_directory,'..', 'data', 'nist_sample_config1.txt')
coordinates = mc.generate_initial_coordinates(method='file', fname=nist_file)
return coordinates, nist_file
@pytest.fixture
def mc_box(nist_file):
coordinates = nist_file[0][0]
box_length = nist_file[0][1]
fname = nist_file[1]
test_box = mc.Box(box_length, coordinates)
return test_box | [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.