blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d0bc43a4647f0dfc4d749e43af45a248efb2f22d | f6d1ed1a5369a5810429756fbdc07a8d293a4b3d | /conans/client/run_environment.py | 5f570cb8a0e111565aa4f5d270ca72669944acfd | [
"MIT"
] | permissive | lasote/conan | fc4e0b561e736b447b57999026ffe5291b7cab50 | 8f6978290d095778eff6a9a84ea3f06f723fcfea | refs/heads/develop | 2023-05-26T03:10:52.211304 | 2017-11-23T12:44:56 | 2017-11-23T12:44:56 | 47,204,017 | 3 | 3 | MIT | 2019-01-22T09:36:12 | 2015-12-01T16:56:42 | Python | UTF-8 | Python | false | false | 782 | py |
class RunEnvironment(object):
"""
- PATH: pointing to the bin/ directories of the requires
- LD_LIBRARY_PATH: requires lib_paths for Linux
- DYLD_LIBRARY_PATH: requires lib_paths for OSx
"""
def __init__(self, conanfile):
"""
:param conanfile: ConanFile instance
"""
self.conanfile = conanfile
@property
def vars(self):
lib_paths = []
bin_paths = []
for dep in self.conanfile.deps_cpp_info.deps:
lib_paths.extend(self.conanfile.deps_cpp_info[dep].lib_paths)
bin_paths.extend(self.conanfile.deps_cpp_info[dep].bin_paths)
ret = {"DYLD_LIBRARY_PATH": lib_paths,
"LD_LIBRARY_PATH": lib_paths,
"PATH": bin_paths}
return ret
| [
"[email protected]"
] | |
86d2dd1ccdf978c2fd6ced71b93ebce47e3063f6 | 0ad7f553df6b210b5ac004fbf490ed651a21d55e | /algos/discrete_esay_control_lib_03.py | 8f73937bb21a1a30f95dcd2cf3acc4546e94c645 | [] | no_license | MarianoDel/spyder_python | fa00987eb9aa1ef61d7224679a84c05a217c6c35 | 5f5896df68f95eb860bc08c21ae2b19516432cdc | refs/heads/master | 2020-05-23T06:14:57.329478 | 2020-04-23T14:58:16 | 2020-04-23T14:58:16 | 84,753,428 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,993 | py | # -*- coding: utf-8 -*-
#usando libreria de control
#http://python-control.readthedocs.org/en/latest/index.html
import numpy as np
import control as ct
import matplotlib.pyplot as plt
#from scipy import signal
#los coeficientes los saco del sistema digital creado en Tfilter_sympy_02.py
#numd1
#array([ 0. , 1.48941694, 0.9759379 , -0.00450648])
#dend
#array([ 1.00000000e+00, 9.21237959e-01, 5.39610404e-01, 1.33103857e-18])
b = [ 0. , 1.48941694, 0.9759379 , -0.00450648]
a = [ 1.00000000e+00, 9.21237959e-01, 5.39610404e-01, 1.33103857e-18]
dt = 1.0/25000
plt.figure(1)
dsys1 = ct.tf(b, a, dt)
omega = np.arange(100, 3.1415 / dt, 1)
mag, phase, omega = ct.bode_plot(dsys1, omega)
plt.show()
plt.draw()
G1 = dsys1
G2 = 350./3.3
Gt = ct.series(G1, G2)
plt.figure(2)
mag, phase, omega = ct.bode_plot(Gt, omega)
plt.show()
plt.draw()
#LAZO PID
#desde el algoritmo hacia atras
#uk = uk-1 + k1 ek + k2 ek-1 + k3 ek-2
#Uz/Ez = (b0 + b1 z-1 + b2 z-2) / (1 - z-1)
#b0 = kp + kd + ki
#b1 = -kp - 2kd
#b2 = kd
#a0 = 1
#a1 = -1
fs = 25000
kp = 0.0015/fs
#kd = 0.80
kd = 0.0
ki = 58.0/fs
#ki = 0.0
bpid = [kp + kd + ki, -kp - 2*kd, kd] #del spice
apid = [1, -1]
print ("bpid vale")
print (bpid)
print ("apid vale")
print (apid)
plt.figure(3)
Gpid = ct.tf(bpid, apid, dt)
mag, phase, omega = ct.bode_plot(Gpid, omega)
plt.show()
plt.draw()
#open loop
GH = ct.series(Gpid, Gt)
plt.figure(4)
mag, phase, omega = ct.bode_plot(GH, omega)
plt.show()
plt.draw()
#feedback
Gfeed = ct.feedback(GH, sys2=1, sign=-1)
plt.figure(5)
mag, phase, omega = ct.bode_plot(Gfeed, omega)
plt.show()
plt.draw()
plt.figure(6)
tin = np.arange(0.0, 0.005, 0.0001)
Tout, yout2 = ct.step_response(Gfeed, T=tin, X0=0.0, input=None, output=None, transpose=False,)
yout1 = np.transpose(yout2)
yout0 = yout1[0]
#yout = yout0[:50]
yout = yout0[:Tout.size]
plt.plot(Tout, yout)
plt.show()
plt.draw()
| [
"[email protected]"
] | |
63470244c2c7805bd495020c5dd5e40fef97303e | 7606f5755a83ad6670b64d2acf381a54cf635697 | /exercises/1901090010/1001S02E05_array.py | 8f0f333861e9e35aa5184ad7678148535e3985a2 | [] | no_license | imlzg/selfteaching-python-camp | 6c107e53740dbc6721008ea36f66c1b3d0c9ee72 | b620553ef4fb86d8528c54947d98c2f1c2df996e | refs/heads/master | 2022-01-09T04:09:14.849146 | 2019-05-20T15:17:07 | 2019-05-20T15:29:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 330 | py | array=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
print(array)
array.reverse()
ls1=[str(i) for i in array]
print(ls1)
ls2=''.join(ls1)
print(ls2)
ls3=ls2[2:8]
print(ls3)
ls4=ls3[::-1]
print(ls4)
ls5=int(ls4)
print("转换为二进制为:", bin(ls5))
print("转换为八进制为:", oct(ls5))
print("转换为十六进制为:", hex(ls5))
| [
"[email protected]"
] | |
5b37e01087be00d24c666fb7c11df28e7282eda9 | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /seem_case/work_great_world/come_early_company/great_world/know_little_place_above_public_company.py | 95daadfe99dac6c7ffb401779f2a1dd58b021e23 | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 235 | py |
#! /usr/bin/env python
def give_company_at_able_number(str_arg):
last_life(str_arg)
print('try_day')
def last_life(str_arg):
print(str_arg)
if __name__ == '__main__':
give_company_at_able_number('old_man_or_child')
| [
"[email protected]"
] | |
c3737714740d26def0bb562c9116aa4dde0191db | d1f87175c0aa3e8e844be1e6e0ddd75ddef93047 | /k2/python/k2/fsa.py | 30619f79427fa6fea4357ca326475d0e952b7d4c | [
"MIT"
] | permissive | zeta1999/k2 | bb6202fd483a4635cfbfac904db6fcfab5e45f45 | 02c36e8e5c0129042824e5e4b9927530bbb99009 | refs/heads/master | 2023-01-11T07:00:28.755715 | 2020-11-12T04:38:54 | 2020-11-12T04:38:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,053 | py | # Copyright (c) 2020 Mobvoi Inc. (authors: Fangjun Kuang)
# Guoguo Chen
#
# See ../../../LICENSE for clarification regarding multiple authors
from collections import OrderedDict
from typing import Any
from typing import Iterator
from typing import Optional
from typing import Tuple
import torch
import _k2
from _k2 import RaggedArc
from _k2 import _as_float
from _k2 import _as_int
from _k2 import _fsa_from_str
from _k2 import _fsa_from_tensor
from _k2 import _fsa_to_str
class Fsa(object):
'''This class represents a single fsa or a vector of fsas.
When it denotes a single FSA, its attribute :attr:`shape` is a tuple
containing two elements ``(num_states, None)``; when it represents
a vector of FSAs it is a tuple with three
elements ``(num_fsas, None, None)``. (Caution: it's possible
for a vector of FSAs to have zero or one elements).
An instance of FSA has the following attributes:
- ``arcs``: You will NOT use it directly in Python. It is an instance
of ``_k2.RaggedArc`` with only one method ``values()`` which
returns a 2-D `torch.Tensor`` of dtype ``torch.int32`` with 4
columns. Its number of rows indicates the number of arcs in the
FSA. The first column represents the source states, second
column the destination states, third column the labels and the
fourth column is the score. Note that the score is actually
a float number but it is **reinterpreted** as an integer.
- ``scores``: A 1-D ``torch.Tensor`` of dtype ``torch.float32``. It has
as many entries as the number of arcs representing the score
of every arc.
- ``labels``: A 1-D ``torch.Tensor`` of dtype ``torch.int32``. It has as
many entries as the number of arcs representing the label of
every arc.
It MAY have the following attributes:
- ``symbols``: An instance of ``k2.SymbolTable``. It maps an entry in
``labels`` to an integer and vice versa. It is used for
visualization only.
- ``aux_labels`: A 1-D ``torch.Tensor`` of dtype ``torch.int32``. It has the
same shape as ``labels``. NOTE: We will change it to a
ragged tensor in the future.
- ``aux_symbols``: An instance of ``k2.SymbolTable. It maps an entry in
``aux_labels`` to an integer and vice versa.
- ``properties``: An integer that encodes the properties of the FSA. It is
returned by :func:`get_properties`.
It MAY have other attributes that set by users.
CAUTION:
When an attribute is an instance of ``torch.Tensor``, its ``shape[0]``
has to be equal to the number arcs. Otherwise, an assertion error
will be thrown.
NOTE:
``symbols`` and ``aux_symbols`` are symbol tables, while ``labels``
and ``aux_labels`` are instances of ``torch.Tensor``.
'''
def __init__(self,
tensor: torch.Tensor,
aux_labels: Optional[torch.Tensor] = None) -> None:
'''Build an Fsa from a tensor with optional aux_labels.
It is useful when loading an Fsa from file.
Args:
tensor:
A torch tensor of dtype `torch.int32` with 4 columns.
Each row represents an arc. Column 0 is the src_state,
column 1 the dest_state, column 2 the label, and column
3 the score.
Caution:
Scores are floats and their binary pattern is
**reinterpreted** as integers and saved in a tensor
of dtype `torch.int32`.
aux_labels:
Optional. If not None, it associates an aux_label with every arc,
so it has as many rows as `tensor`. It is a 1-D tensor of dtype
`torch.int32`.
Returns:
An instance of Fsa.
'''
self._init_internal()
self.arcs: RaggedArc = _fsa_from_tensor(tensor)
self._init_properties()
self._tensor_attr['scores'] = _as_float(self.arcs.values()[:, -1])
if aux_labels is not None:
self.aux_labels = aux_labels.to(torch.int32)
def __str__(self) -> str:
'''Return a string representation of this object (note: does not
contain all the information in it for now)'''
if hasattr(self, 'aux_labels'):
aux_labels = self.aux_labels.to(torch.int32)
else:
aux_labels = None
ans = "k2.Fsa: " + _fsa_to_str(self.arcs, False, aux_labels)
ans += "\nproperties_str = " + _k2.fsa_properties_as_str(
self._properties)
return ans
def _init_properties(self) -> None:
if self.arcs.num_axes() == 2:
properties = _k2.get_fsa_basic_properties(self.arcs)
else:
properties = _k2.get_fsa_vec_basic_properties(self.arcs)
self._properties = properties
if properties & 1 != 1:
raise ValueError(
"Fsa is not valid, properties are: {} = {}, arcs are: {}".
format(properties, _k2.fsa_properties_as_str(properties),
str(self.arcs)))
def _init_internal(self) -> None:
# There are three kinds of attribute dictionaries:
#
# - `_tensor_attr`
# It saves attribute values of type torch.Tensor. `shape[0]` of
# attribute values have to be equal to the number of arcs
# in the FSA.
#
# - `_non_tensor_attr`
# It saves non-tensor attributes, e.g., :class:`SymbolTable`.
#
# - `_grad_cache`
# It contains tensors for autograd. Users should NOT manipulate it.
# The dict is filled in automagically.
self._tensor_attr = OrderedDict()
self._non_tensor_attr = OrderedDict()
self._grad_cache = OrderedDict()
# The `_grad_cache` dict contains the following attributes:
#
# - `state_batches`:
# returned by :func:`_k2._get_state_batches`
# - `dest_states`:
# returned by :func:`_k2._get_dest_states`
# - `incoming_arcs`:
# returned by :func:`_k2._get_incoming_arcs`
# - `entering_arc_batches`:
# returned by :func:`_k2._get_entering_arc_index_batches`
# - `leaving_arc_batches`:
# returned by :func:`_k2._get_leaving_arc_index_batches`
# - `forward_scores_tropical`:
# returned by :func:`_k2._get_forward_scores_float`
# with `log_semiring=False`
# - `forward_scores_log`:
# returned by :func:`_k2._get_forward_scores_float` or
# :func:`_get_forward_scores_double` with `log_semiring=True`
# - `tot_scores_tropical`:
# returned by :func:`_k2._get_tot_scores_float` or
# :func:`_k2._get_tot_scores_double` with
# `forward_scores_tropical`.
# - `tot_scores_log`:
# returned by :func:`_k2._get_tot_scores_float` or
# :func:`_k2._get_tot_scores_double` with
# `forward_scores_log`.
# - `backward_scores_tropical`:
# returned by :func:`_k2._get_backward_scores_float` or
# :func:`_k2._get_backward_scores_double` with
# `log_semiring=False`
# - `backward_scores_log_semiring`:
# returned by :func:`_k2._get_backward_scores_float` or
# :func:`_k2._get_backward_scores_double` with
# `log_semiring=True`
# - `entering_arcs`:
# returned by :func:`_k2._get_forward_scores_float` or
# :func:`_get_forward_scores_double` with `log_semiring=False`
def __setattr__(self, name: str, value: Any) -> None:
'''
Caution:
We save a reference to ``value``. If you need to change ``value``
afterwards, please consider passing a copy of it.
'''
if name in ('_tensor_attr', '_non_tensor_attr', 'arcs', '_properties'):
object.__setattr__(self, name, value)
elif isinstance(value, torch.Tensor):
assert value.shape[0] == self.arcs.values().shape[0]
if name == 'labels':
assert value.dtype == torch.int32
self.arcs.values()[:, 2] = value
return
self._tensor_attr[name] = value
if name == 'scores':
assert value.dtype == torch.float32
# NOTE: we **reinterpret** the float patterns
# to integer patterns here.
self.arcs.values()[:, -1] = _as_int(value.detach())
else:
assert name != 'properties'
self._non_tensor_attr[name] = value
def __getattr__(self, name: str) -> Any:
if name == 'labels':
return self.arcs.values()[:, 2]
elif name in self._tensor_attr:
return self._tensor_attr[name]
elif name in self._non_tensor_attr:
return self._non_tensor_attr[name]
elif name in self._grad_cache:
return self._grad_cache[name]
elif name == 'properties':
return self._properties
elif name == 'properties_str':
return _k2.fsa_properties_as_str(self._properties)
raise AttributeError(f'Unknown attribute {name}')
def __delattr__(self, name: str) -> None:
assert name not in ('arcs', 'scores', 'labels', 'properties',
'_properties')
if name in self._tensor_attr:
del self._tensor_attr[name]
elif name in self._non_tensor_attr:
del self._non_tensor_attr[name]
elif name in self._grad_cache:
del self._grad_cache[name]
else:
super().__delattr__(name)
def _update_cache(self, name: str, value: Any) -> None:
self._grad_cache[name] = value
def update_state_batches(self) -> _k2.RaggedInt:
if hasattr(self, 'state_batches') is False:
state_batches = _k2._get_state_batches(self.arcs, transpose=True)
self._update_cache('state_batches', state_batches)
return self.state_batches
def update_dest_states(self) -> torch.Tensor:
if hasattr(self, 'dest_states') is False:
dest_states = _k2._get_dest_states(self.arcs, as_idx01=True)
self._update_cache('dest_states', dest_states)
return self.dest_states
def update_incoming_arcs(self) -> _k2.RaggedInt:
if hasattr(self, 'incoming_arcs') is False:
dest_states = self.update_dest_states()
incoming_arcs = _k2._get_incoming_arcs(self.arcs, dest_states)
self._update_cache('incoming_arcs', incoming_arcs)
return self.incoming_arcs
def update_entering_arc_batches(self) -> _k2.RaggedInt:
if hasattr(self, 'entering_arc_batches') is False:
incoming_arcs = self.update_incoming_arcs()
state_batches = self.update_state_batches()
entering_arc_batches = _k2._get_entering_arc_index_batches(
self.arcs,
incoming_arcs=incoming_arcs,
state_batches=state_batches)
self._update_cache('entering_arc_batches', entering_arc_batches)
return self.entering_arc_batches
def update_leaving_arc_batches(self) -> _k2.RaggedInt:
if hasattr(self, 'leaving_arc_batches') is False:
state_batches = self.update_state_batches()
leaving_arc_batches = _k2._get_leaving_arc_index_batches(
self.arcs, state_batches)
self._update_cache('leaving_arc_batches', leaving_arc_batches)
return self.leaving_arc_batches
def update_forward_scores_tropical(self, use_float_scores) -> torch.Tensor:
if hasattr(self, 'forward_scores_tropical') is False \
or (use_float_scores is True and self.forward_scores_tropical.dtype == torch.float64) \
or (use_float_scores is False and self.forward_scores_tropical.dtype == torch.float32): # noqa
if use_float_scores:
func = _k2._get_forward_scores_float
else:
func = _k2._get_forward_scores_double
state_batches = self.update_state_batches()
entering_arc_batches = self.update_entering_arc_batches()
forward_scores_tropical, entering_arcs = func(
self.arcs,
state_batches=state_batches,
entering_arc_batches=entering_arc_batches,
log_semiring=False)
self._update_cache('forward_scores_tropical',
forward_scores_tropical)
self._update_cache('entering_arcs', entering_arcs)
return self.forward_scores_tropical
def update_forward_scores_log(self, use_float_scores) -> torch.Tensor:
if hasattr(self, 'forward_scores_log') is False \
or (use_float_scores is True and self.forward_scores_log.dtype == torch.float64) \
or (use_float_scores is False and self.forward_scores_log.dtype == torch.float32): # noqa
if use_float_scores:
func = _k2._get_forward_scores_float
else:
func = _k2._get_forward_scores_double
state_batches = self.update_state_batches()
entering_arc_batches = self.update_entering_arc_batches()
forward_scores_log, _ = func(
self.arcs,
state_batches=state_batches,
entering_arc_batches=entering_arc_batches,
log_semiring=True)
self._update_cache('forward_scores_log', forward_scores_log)
return self.forward_scores_log
def update_tot_scores_tropical(self, use_float_scores) -> torch.Tensor:
if hasattr(self, 'tot_scores_tropical') is False \
or (use_float_scores is True and self.tot_scores_tropical.dtype == torch.float64) \
or (use_float_scores is False and self.tot_scores_tropical.dtype == torch.float32): # noqa
if use_float_scores is True:
func = _k2._get_tot_scores_float
else:
func = _k2._get_tot_scores_double
forward_scores_tropical = self.update_forward_scores_tropical(
use_float_scores)
tot_scores_tropical = func(self.arcs, forward_scores_tropical)
self._update_cache('tot_scores_tropical', tot_scores_tropical)
return self.tot_scores_tropical
def update_tot_scores_log(self, use_float_scores) -> torch.Tensor:
if hasattr(self, 'tot_scores_log') is False \
or (use_float_scores is True and self.tot_scores_log.dtype == torch.float64) \
or (use_float_scores is False and self.tot_scores_log.dtype == torch.float32): # noqa
if use_float_scores is True:
func = _k2._get_tot_scores_float
else:
func = _k2._get_tot_scores_double
forward_scores_log = self.update_forward_scores_log(
use_float_scores)
tot_scores_log = func(self.arcs, forward_scores_log)
self._update_cache('tot_scores_log', tot_scores_log)
return self.tot_scores_log
def update_backward_scores_tropical(self,
use_float_scores) -> torch.Tensor:
if hasattr(self, 'backward_scores_tropical') is False \
or (use_float_scores is True and self.backward_scores_tropical.dtype == torch.float64) \
or (use_float_scores is False and self.backward_scores_tropical.dtype == torch.float32): # noqa
if use_float_scores:
func = _k2._get_backward_scores_float
else:
func = _k2._get_backward_scores_double
state_batches = self.update_state_batches()
leaving_arc_batches = self.update_leaving_arc_batches()
tot_scores_tropical = self.update_tot_scores_tropical(
use_float_scores)
backward_scores_tropical = func(
self.arcs,
state_batches=state_batches,
leaving_arc_batches=leaving_arc_batches,
tot_scores=tot_scores_tropical,
log_semiring=False)
self._update_cache('backward_scores_tropical',
backward_scores_tropical)
return self.backward_scores_tropical
def update_backward_scores_log(self, use_float_scores) -> torch.Tensor:
if hasattr(self, 'backward_scores_log') is False \
or (use_float_scores is True and self.backward_scores_log.dtype == torch.float64) \
or (use_float_scores is False and self.backward_scores_log.dtype == torch.float32): # noqa
if use_float_scores:
func = _k2._get_backward_scores_float
else:
func = _k2._get_backward_scores_double
state_batches = self.update_state_batches()
leaving_arc_batches = self.update_leaving_arc_batches()
tot_scores_log = self.update_tot_scores_log(use_float_scores)
backward_scores_log = func(self.arcs,
state_batches=state_batches,
leaving_arc_batches=leaving_arc_batches,
tot_scores=tot_scores_log,
log_semiring=True)
self._update_cache('backward_scores_log', backward_scores_log)
return self.backward_scores_log
def update_entering_arcs(self, use_float_scores) -> torch.Tensor:
if hasattr(self, 'entering_arcs') is False:
if hasattr(self, 'forward_scores_tropical'):
del self.forward_scores_tropical
self.update_forward_scores_tropical(use_float_scores)
return self.entering_arcs
def requires_grad_(self, requires_grad: bool) -> 'Fsa':
'''Change if autograd should record operations on this FSA:
Sets the `scores`'s requires_grad attribute in-place.
Returns this FSA.
Caution:
This is an **in-place** operation as you can see that the function
name ends with `_`.
Args:
requires_grad:
If autograd should record operations on this FSA or not.
Returns:
This FSA itself.
'''
self.scores.requires_grad_(requires_grad)
return self
def invert_(self) -> 'Fsa':
'''Swap the ``labels`` and ``aux_labels``.
If there are symbol tables associated with ``labels`` and
``aux_labels``, they are also swapped.
It is a no-op if the FSA contains no ``aux_labels``.
CAUTION:
The function name ends with an underscore which means this
is an **in-place** operation.
Returns:
Return ``self``.
'''
if hasattr(self, 'aux_labels'):
aux_labels = self.aux_labels
self.aux_labels = self.labels.clone()
self.labels = aux_labels
symbols = getattr(self, 'symbols', None)
aux_symbols = getattr(self, 'aux_symbols', None)
if symbols is not None:
del self.symbols
if aux_symbols is not None:
del self.aux_symbols
if symbols is not None:
self.aux_symbols = symbols
if aux_symbols is not None:
self.symbols = aux_symbols
self._init_properties()
return self
def is_cpu(self) -> bool:
'''Return true if this FSA is on CPU.
Returns:
True if the FSA is on CPU; False otherwise.
'''
return self.arcs.is_cpu()
def is_cuda(self) -> bool:
'''Return true if this FSA is on GPU.
Returns:
True if the FSA is on GPU; False otherwise.
'''
return self.arcs.is_cuda()
@property
def device(self) -> torch.device:
return self.scores.device
def __getitem__(self, i: int) -> 'Fsa':
'''Get the i-th FSA.
Caution:
`self` has to be an FsaVec.
Args:
i:
The i-th FSA to select. 0 <= i < self.arcs.dim0().
Returns:
The i-th FSA. Note it is a single FSA.
'''
assert len(self.shape) == 3
assert 0 <= i < self.shape[0]
ragged_arc, start = self.arcs.index(0, i)
end = start + ragged_arc.values().shape[0]
out_fsa = Fsa.from_ragged_arc(ragged_arc)
for name, value in self.named_tensor_attr():
setattr(out_fsa, name, value[start:end])
for name, value in self.named_non_tensor_attr():
setattr(out_fsa, name, value)
out_fsa._init_properties()
return out_fsa
def to_(self, device: torch.device) -> 'Fsa':
'''Move the FSA onto a given device.
Caution:
This is an in-place operation.
Args:
device:
An instance of `torch.device`. It supports only cpu and cuda.
Returns:
Return `self`.
'''
assert device.type in ('cpu', 'cuda')
if device.type == 'cpu' and self.is_cpu():
return self
elif device.type == 'cuda' and self.is_cuda():
return self
if device.type == 'cpu':
self.arcs = self.arcs.to_cpu()
else:
self.arcs = self.arcs.to_cuda(device.index)
for name, value in self.named_tensor_attr():
setattr(self, name, value.to(device))
self._grad_cache = OrderedDict()
return self
def named_tensor_attr(self) -> Iterator[Tuple[str, torch.Tensor]]:
'''Return an iterator over tensor attributes containing both
the name of the attribute as well as the tensor value.
Returns:
A tuple containing the name and the value.
'''
for name, value in self._tensor_attr.items():
yield name, value
def named_non_tensor_attr(self) -> Iterator[Tuple[str, Any]]:
'''Return an iterator over non-tensor attributes containing both
the name of the attribute as well as the value.
Returns:
A tuple containing the name and the value.
'''
for name, value in self._non_tensor_attr.items():
yield name, value
@property
def shape(self) -> Tuple[int, ...]:
'''
Returns:
``(num_states, None)`` if this is an Fsa;
``(num_fsas, None, None)`` if this is an FsaVec.
'''
if self.arcs.num_axes() == 2:
return (self.arcs.dim0(), None)
elif self.arcs.num_axes() == 3:
return (self.arcs.dim0(), None, None)
else:
raise ValueError(f'Unsupported num_axes: {self.arcs.num_axes()}')
@classmethod
def from_ragged_arc(cls, ragged_arc: RaggedArc) -> 'Fsa':
'''Create an Fsa from a RaggedArc directly.
Note:
Fsa algorithms will always produce some RaggedArc output. You can
use this function to construct a Python FSA from RaggedArc.
Args:
ragged_arc:
The input ragged arc. It is usually generated by some FSA
algorithms. You do not need to know how to construct it in Python.
Returns:
An Fsa.
'''
ans = cls.__new__(cls)
super(Fsa, ans).__init__()
ans._init_internal()
ans.arcs = ragged_arc
ans._init_properties()
ans._tensor_attr['scores'] = _as_float(ans.arcs.values()[:, -1])
return ans
@classmethod
def from_str(cls, s: str) -> 'Fsa':
'''Create an Fsa from a string.
The given string `s` consists of lines with the following format:
(1) When it represents an acceptor:
src_state dest_state label score
(2) When it represents a transducer:
src_state dest_state label aux_label score
The line for the final state consists of only one field:
final_state
Note:
Fields are separated by space(s), tab(s) or both. The `score`
field is a float, while other fields are integers.
Caution:
The first column has to be non-decreasing.
Caution:
The final state has the largest state number. There is only
one final state. All arcs that are connected to the final state
have label -1.
Args:
s:
The input string. Refer to the above comment for its format.
'''
# Figure out acceptor/transducer for k2 fsa.
acceptor = True
line = s.strip().split('\n', 1)[0]
fields = line.strip().split()
assert len(fields) == 4 or len(fields) == 5
if len(fields) == 5:
acceptor = False
ans = cls.__new__(cls)
super(Fsa, ans).__init__()
ans._init_internal()
arcs, aux_labels = _fsa_from_str(s, acceptor, False)
ans.arcs = arcs
ans._init_properties()
ans._tensor_attr['scores'] = _as_float(ans.arcs.values()[:, -1])
if aux_labels is not None:
ans.aux_labels = aux_labels.to(torch.int32)
return ans
@classmethod
def from_openfst(cls, s: str, acceptor: bool = True) -> 'Fsa':
'''Create an Fsa from a string in OpenFST format.
The given string `s` consists of lines with the following format:
(1) When it represents an acceptor:
src_state dest_state label score
(2) When it represents a transducer:
src_state dest_state label aux_label score
The line for the final state consists of two fields:
final_state score
Note:
Fields are separated by space(s), tab(s) or both. The `score`
field is a float, while other fields are integers.
There might be multiple final states. Also, OpenFst may omit the score
if it is 0.0.
Args:
s:
The input string. Refer to the above comment for its format.
acceptor:
Optional. If true, interpret the input string as an acceptor;
otherwise, interpret it as a transducer.
'''
ans = cls.__new__(cls)
super(Fsa, ans).__init__()
ans._init_internal()
arcs, aux_labels = _fsa_from_str(s, acceptor, True)
ans.arcs = arcs
ans._init_properties()
ans._tensor_attr['scores'] = _as_float(ans.arcs.values()[:, -1])
if aux_labels is not None:
ans.aux_labels = aux_labels.to(torch.int32)
return ans
| [
"[email protected]"
] | |
73a44163086c3f3bc57dae1906bd48ffdc92761c | f0ee987789f5a6fe8f104890e95ee56e53f5b9b2 | /pythia-0.8/packages/pyre/pyre/inventory/odb/Registry.py | c72724141667dc8e6e35621c08aee1ae78d01a96 | [] | no_license | echoi/Coupling_SNAC_CHILD | 457c01adc439e6beb257ac8a33915d5db9a5591b | b888c668084a3172ffccdcc5c4b8e7fff7c503f2 | refs/heads/master | 2021-01-01T18:34:00.403660 | 2015-10-26T13:48:18 | 2015-10-26T13:48:18 | 19,891,618 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,864 | py | #!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Michael A.G. Aivazis
# California Institute of Technology
# (C) 1998-2005 All Rights Reserved
#
# <LicenseText>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
class Registry(object):
def identify(self, inspector):
return inspector.onRegistry(self)
def getFacility(self, name, default=None):
return self.facilities.get(name, default)
def getProperty(self, name, default=''):
try:
return self.properties[name].value
except KeyError:
return default
# UNREACHABLE
import journal
journal.firewall("inventory").log("UNREACHABLE")
return
def setProperty(self, name, value, locator):
self.properties[name] = self._createDescriptor(value, locator)
return
def deleteProperty(self, name):
"""remove the named property"""
try:
del self.properties[name]
except KeyError:
pass
return
def update(self, registry):
if not registry:
return self
for name, descriptor in registry.properties.iteritems():
self.setProperty(name, descriptor.value, descriptor.locator)
for name, node in registry.facilities.iteritems():
self.getNode(name).update(node)
return self
def getNode(self, name):
try:
node = self.facilities[name]
except KeyError:
node = Registry(name)
self.facilities[name] = node
return node
def attachNode(self, node):
self.facilities[node.name] = node
return
def extractNode(self, facility):
try:
node = self.facilities[facility]
except KeyError:
return None
del self.facilities[facility]
return node
def render(self):
listing = [
("%s.%s" % (self.name, name), descriptor.value, "%s" % descriptor.locator)
for name, descriptor in self.properties.iteritems()
]
listing += [
("%s.%s" % (self.name, name), value, "%s" % locator)
for facility in self.facilities.itervalues()
for name, value, locator in facility.render()
]
return listing
def __init__(self, name):
self.name = name
self.properties = {}
self.facilities = {}
return
def _createDescriptor(self, value, locator):
from Descriptor import Descriptor
return Descriptor(value, locator)
# version
__id__ = "$Id: Registry.py,v 1.1.1.1 2005/03/08 16:13:43 aivazis Exp $"
# End of file
| [
"[email protected]"
] | |
cfd0938addfe5e354eb030032a683f83195c8112 | 312a8fde11293cb142334a3860966ec1f75ac401 | /timesketch/views/spa.py | c7401a6e3a270c6062b441b881b34d8e5e754333 | [
"Apache-2.0"
] | permissive | google/timesketch | f0fd09062a8a24bac581d2d4286d095d667d2f10 | 24f471b58ca4a87cb053961b5f05c07a544ca7b8 | refs/heads/master | 2023-08-31T21:48:19.602686 | 2023-08-31T11:24:17 | 2023-08-31T11:24:17 | 21,009,909 | 2,263 | 647 | Apache-2.0 | 2023-09-14T14:08:07 | 2014-06-19T17:49:45 | Python | UTF-8 | Python | false | false | 1,540 | py | # Copyright 2019 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module implements HTTP request handler."""
from __future__ import unicode_literals
from flask import Blueprint
from flask import redirect
from flask import render_template
from flask_login import login_required
# Register flask blueprint
spa_views = Blueprint("spa_views", __name__)
@spa_views.route("/sketch/<int:sketch_id>/explore/view/<int:view_id>/", methods=["GET"])
@login_required
# pylint: disable=unused-argument
def redirect_view(sketch_id, view_id):
"""Redirect old (deprecated) view URLs to scheme.
Returns:
Redirect to new URL scheme.
"""
return redirect("/sketch/{0:d}/explore?view={1:d}".format(sketch_id, view_id))
@spa_views.route("/", defaults={"path": ""})
@spa_views.route("/<path:path>")
@login_required
# pylint: disable=unused-argument
def overview(path):
"""Generates the template.
Returns:
Template with context.
"""
return render_template("index.html")
| [
"[email protected]"
] | |
eb8afd30c6b49c35fee9b1cfbc72fd3d53933d36 | bd185738ea6a74d1e76d9fc9d8cbc59f94990842 | /onadata/libs/serializers/xform_serializer.py | fa30b5ec3f282006bb33a18f94727949f24141c2 | [
"BSD-2-Clause"
] | permissive | aondiaye/myhelpline | c4ad9e812b3a13c6c3c8bc65028a3d3567fd6a98 | d72120ee31b6713cbaec79f299f5ee8bcb7ea429 | refs/heads/master | 2020-12-22T05:32:59.576519 | 2019-10-29T08:52:55 | 2019-10-29T08:52:55 | 236,683,448 | 1 | 0 | NOASSERTION | 2020-01-28T07:50:18 | 2020-01-28T07:50:17 | null | UTF-8 | Python | false | false | 15,564 | py | import os
import logging
from hashlib import md5
from future.moves.urllib.parse import urlparse
from future.utils import listvalues
from django.core.exceptions import ValidationError
from django.core.validators import URLValidator
from django.conf import settings
from django.contrib.auth.models import User
from django.core.cache import cache
from django.db.models import Count
from requests.exceptions import ConnectionError
from rest_framework import serializers
from rest_framework.reverse import reverse
from onadata.apps.logger.models import DataView, Instance, XForm
from onadata.apps.main.models.meta_data import MetaData
from onadata.libs.permissions import get_role, is_organization
from onadata.libs.serializers.dataview_serializer import \
DataViewMinimalSerializer
from onadata.libs.serializers.metadata_serializer import MetaDataSerializer
from onadata.libs.serializers.tag_list_serializer import TagListSerializer
from onadata.libs.utils.cache_tools import (
ENKETO_PREVIEW_URL_CACHE, ENKETO_URL_CACHE, XFORM_DATA_VERSIONS,
XFORM_LINKED_DATAVIEWS, XFORM_METADATA_CACHE, XFORM_PERMISSIONS_CACHE,
XFORM_COUNT)
from onadata.libs.utils.decorators import check_obj
from onadata.libs.utils.viewer_tools import (
enketo_url, get_enketo_preview_url, get_form_url)
from onadata.libs.exceptions import EnketoError
from onadata.libs.utils.common_tags import (GROUP_DELIMETER_TAG,
REPEAT_INDEX_TAGS)
def _create_enketo_url(request, xform):
"""
Generate enketo url for a form
:param request:
:param xform:
:return: enketo url
"""
form_url = get_form_url(request, xform.user.username,
settings.ENKETO_PROTOCOL, xform_pk=xform.pk)
url = ""
try:
url = enketo_url(form_url, xform.id_string)
MetaData.enketo_url(xform, url)
except ConnectionError as e:
logging.exception("Connection Error: %s" % e.message)
except EnketoError as e:
logging.exception("Enketo Error: %s" % e.message)
return url
def _set_cache(cache_key, cache_data, obj):
"""
Utility function that set the specified info to the provided cache key
:param cache_key:
:param cache_data:
:param obj:
:return: Data that has been cached
"""
cache.set('{}{}'.format(cache_key, obj.pk), cache_data)
return cache_data
def user_to_username(item):
item['user'] = item['user'].username
return item
class XFormMixin(object):
def _get_metadata(self, obj, key):
if key:
for m in obj.metadata_set.all():
if m.data_type == key:
return m.data_value
else:
return obj.metadata_set.all()
def get_users(self, obj):
xform_perms = []
if obj:
xform_perms = cache.get(
'{}{}'.format(XFORM_PERMISSIONS_CACHE, obj.pk))
if xform_perms:
return xform_perms
cache.set('{}{}'.format(XFORM_PERMISSIONS_CACHE, obj.pk),
xform_perms)
data = {}
for perm in obj.xformuserobjectpermission_set.all():
if perm.user_id not in data:
user = perm.user
data[perm.user_id] = {
'permissions': [],
'is_org': is_organization(user.profile),
'metadata': user.profile.metadata,
'first_name': user.first_name,
'last_name': user.last_name,
'user': user.username
}
if perm.user_id in data:
data[perm.user_id]['permissions'].append(
perm.permission.codename)
for k in list(data):
data[k]['permissions'].sort()
data[k]['role'] = get_role(data[k]['permissions'], XForm)
del (data[k]['permissions'])
xform_perms = listvalues(data)
cache.set('{}{}'.format(XFORM_PERMISSIONS_CACHE, obj.pk), xform_perms)
return xform_perms
def get_enketo_url(self, obj):
if obj:
_enketo_url = cache.get('{}{}'.format(ENKETO_URL_CACHE, obj.pk))
if _enketo_url:
return _enketo_url
url = self._get_metadata(obj, 'enketo_url')
if url is None:
url = _create_enketo_url(self.context.get('request'), obj)
return _set_cache(ENKETO_URL_CACHE, url, obj)
return None
def get_enketo_preview_url(self, obj):
if obj:
_enketo_preview_url = cache.get(
'{}{}'.format(ENKETO_PREVIEW_URL_CACHE, obj.pk))
if _enketo_preview_url:
return _enketo_preview_url
url = self._get_metadata(obj, 'enketo_preview_url')
if url is None:
try:
url = get_enketo_preview_url(
self.context.get('request'), obj.user.username,
obj.id_string, xform_pk=obj.pk)
except Exception:
return url
else:
MetaData.enketo_preview_url(obj, url)
return _set_cache(ENKETO_PREVIEW_URL_CACHE, url, obj)
return None
def get_data_views(self, obj):
if obj:
key = '{}{}'.format(XFORM_LINKED_DATAVIEWS, obj.pk)
data_views = cache.get(key)
if data_views:
return data_views
data_views = DataViewMinimalSerializer(
obj.dataview_set.filter(deleted_at__isnull=True),
many=True, context=self.context).data
cache.set(key, list(data_views))
return data_views
return []
def get_num_of_submissions(self, obj):
if obj:
key = '{}{}'.format(XFORM_COUNT, obj.pk)
count = cache.get(key)
if count:
return count
force_update = True if obj.is_merged_dataset else False
count = obj.submission_count(force_update)
cache.set(key, count)
return count
def get_last_submission_time(self, obj):
"""Return datetime of last submission
If a form is a merged dataset then it is picked from the list of forms
attached to that merged dataset.
"""
if 'last_submission_time' not in self.fields:
return None
if obj.is_merged_dataset:
values = [
x.last_submission_time.isoformat()
for x in obj.mergedxform.xforms.only('last_submission_time')
if x.last_submission_time
]
if values:
return sorted(values, reverse=True)[0]
return obj.last_submission_time.isoformat() \
if obj.last_submission_time else None
class XFormBaseSerializer(XFormMixin, serializers.HyperlinkedModelSerializer):
formid = serializers.ReadOnlyField(source='id')
owner = serializers.HyperlinkedRelatedField(
view_name='user-detail',
source='user',
lookup_field='username',
queryset=User.objects.exclude(
username__iexact=settings.ANONYMOUS_DEFAULT_USERNAME))
created_by = serializers.HyperlinkedRelatedField(
view_name='user-detail',
lookup_field='username',
queryset=User.objects.exclude(
username__iexact=settings.ANONYMOUS_DEFAULT_USERNAME))
public = serializers.BooleanField(source='shared')
public_data = serializers.BooleanField(source='shared_data')
require_auth = serializers.BooleanField()
tags = TagListSerializer(read_only=True)
title = serializers.CharField(max_length=255)
url = serializers.HyperlinkedIdentityField(
view_name='xform-detail', lookup_field='pk')
users = serializers.SerializerMethodField()
enketo_url = serializers.SerializerMethodField()
enketo_preview_url = serializers.SerializerMethodField()
num_of_submissions = serializers.SerializerMethodField()
last_submission_time = serializers.SerializerMethodField()
data_views = serializers.SerializerMethodField()
class Meta:
model = XForm
read_only_fields = ('json', 'xml', 'date_created', 'date_modified',
'encrypted', 'bamboo_dataset',
'last_submission_time', 'is_merged_dataset')
exclude = ('json', 'xml', 'xls', 'user', 'has_start_time', 'shared',
'shared_data', 'deleted_at', 'deleted_by')
class XFormSerializer(XFormMixin, serializers.HyperlinkedModelSerializer):
formid = serializers.ReadOnlyField(source='id')
metadata = serializers.SerializerMethodField()
owner = serializers.HyperlinkedRelatedField(
view_name='user-detail',
source='user',
lookup_field='username',
queryset=User.objects.exclude(
username__iexact=settings.ANONYMOUS_DEFAULT_USERNAME))
created_by = serializers.HyperlinkedRelatedField(
view_name='user-detail',
lookup_field='username',
queryset=User.objects.exclude(
username__iexact=settings.ANONYMOUS_DEFAULT_USERNAME))
public = serializers.BooleanField(source='shared')
public_data = serializers.BooleanField(source='shared_data')
require_auth = serializers.BooleanField()
submission_count_for_today = serializers.ReadOnlyField()
tags = TagListSerializer(read_only=True)
title = serializers.CharField(max_length=255)
url = serializers.HyperlinkedIdentityField(
view_name='xform-detail', lookup_field='pk')
users = serializers.SerializerMethodField()
enketo_url = serializers.SerializerMethodField()
enketo_preview_url = serializers.SerializerMethodField()
num_of_submissions = serializers.SerializerMethodField()
last_submission_time = serializers.SerializerMethodField()
form_versions = serializers.SerializerMethodField()
data_views = serializers.SerializerMethodField()
class Meta:
model = XForm
read_only_fields = ('json', 'xml', 'date_created', 'date_modified',
'encrypted', 'bamboo_dataset',
'last_submission_time', 'is_merged_dataset')
exclude = ('json', 'xml', 'xls', 'user', 'has_start_time', 'shared',
'shared_data', 'deleted_at', 'deleted_by')
def get_metadata(self, obj):
xform_metadata = []
if obj:
xform_metadata = cache.get(
'{}{}'.format(XFORM_METADATA_CACHE, obj.pk))
if xform_metadata:
return xform_metadata
xform_metadata = list(
MetaDataSerializer(
obj.metadata_set.all(), many=True, context=self.context)
.data)
cache.set('{}{}'.format(XFORM_METADATA_CACHE, obj.pk),
xform_metadata)
return xform_metadata
def get_form_versions(self, obj):
versions = []
if obj:
versions = cache.get('{}{}'.format(XFORM_DATA_VERSIONS, obj.pk))
if versions:
return versions
versions = list(
Instance.objects.filter(xform=obj, deleted_at__isnull=True)
.values('version').annotate(total=Count('version')))
if versions:
cache.set('{}{}'.format(XFORM_DATA_VERSIONS, obj.pk),
list(versions))
return versions
class XFormCreateSerializer(XFormSerializer):
has_id_string_changed = serializers.SerializerMethodField()
def get_has_id_string_changed(self, obj):
return obj.has_id_string_changed
class XFormListSerializer(serializers.Serializer):
formID = serializers.ReadOnlyField(source='id_string')
name = serializers.ReadOnlyField(source='title')
version = serializers.SerializerMethodField()
hash = serializers.ReadOnlyField()
descriptionText = serializers.ReadOnlyField(source='description')
downloadUrl = serializers.SerializerMethodField('get_url')
manifestUrl = serializers.SerializerMethodField('get_manifest_url')
@check_obj
def get_version(self, obj):
if obj.version and obj.version.isdigit():
return obj.version
@check_obj
def get_url(self, obj):
kwargs = {'pk': obj.pk, 'username': obj.user.username}
request = self.context.get('request')
return reverse('download_xform', kwargs=kwargs, request=request)
@check_obj
def get_manifest_url(self, obj):
kwargs = {'pk': obj.pk, 'username': obj.user.username}
request = self.context.get('request')
object_list = MetaData.objects.filter(data_type='media',
object_id=obj.pk)
if object_list:
return reverse('manifest-url', kwargs=kwargs, request=request)
return None
class XFormManifestSerializer(serializers.Serializer):
filename = serializers.SerializerMethodField()
hash = serializers.SerializerMethodField()
downloadUrl = serializers.SerializerMethodField('get_url')
@check_obj
def get_url(self, obj):
kwargs = {
'pk': obj.content_object.pk,
'username': obj.content_object.user.username,
'metadata': obj.pk
}
request = self.context.get('request')
try:
fmt = obj.data_value[obj.data_value.rindex('.') + 1:]
except ValueError:
fmt = 'csv'
url = reverse(
'xform-media', kwargs=kwargs, request=request, format=fmt.lower())
group_delimiter = self.context.get(GROUP_DELIMETER_TAG)
repeat_index_tags = self.context.get(REPEAT_INDEX_TAGS)
if group_delimiter and repeat_index_tags and fmt == 'csv':
return (url+"?%s=%s&%s=%s" % (
GROUP_DELIMETER_TAG, group_delimiter, REPEAT_INDEX_TAGS,
repeat_index_tags))
return url
@check_obj
def get_hash(self, obj):
filename = obj.data_value
hsh = obj.file_hash
parts = filename.split(' ')
# filtered dataset is of the form "xform PK name", xform pk is the
# second item
if len(parts) > 2:
dataset_type = parts[0]
pk = parts[1]
xform = None
if dataset_type == 'xform':
xform = XForm.objects.filter(pk=pk)\
.only('last_submission_time').first()
else:
data_view = DataView.objects.filter(pk=pk)\
.only('xform__last_submission_time').first()
if data_view:
xform = data_view.xform
if xform and xform.last_submission_time:
hsh = u'md5:%s' % (md5(
xform.last_submission_time.isoformat().encode(
'utf-8')).hexdigest())
return u"%s" % (hsh or 'md5:')
@check_obj
def get_filename(self, obj):
filename = obj.data_value
parts = filename.split(' ')
# filtered dataset is of the form "xform PK name", filename is the
# third item
if len(parts) > 2:
filename = u'%s.csv' % parts[2]
else:
try:
URLValidator()(filename)
except ValidationError:
pass
else:
urlparts = urlparse(obj.data_value)
filename = os.path.basename(urlparts.path) or urlparts.netloc
return filename
| [
"[email protected]"
] | |
5a6822dc37ab0181188d67e82b773b3e8e59d089 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /benchmark/startQiskit2487.py | 641b39dc24dfafe2c36913591797358aa26dea60 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,147 | py | # qubit number=4
# total number=43
import cirq
import qiskit
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[3]) # number=24
prog.cz(input_qubit[0],input_qubit[3]) # number=25
prog.h(input_qubit[3]) # number=26
prog.h(input_qubit[3]) # number=21
prog.cz(input_qubit[0],input_qubit[3]) # number=22
prog.h(input_qubit[3]) # number=23
prog.h(input_qubit[3]) # number=27
prog.cz(input_qubit[0],input_qubit[3]) # number=28
prog.h(input_qubit[3]) # number=29
prog.h(input_qubit[3]) # number=37
prog.cz(input_qubit[0],input_qubit[3]) # number=38
prog.h(input_qubit[3]) # number=39
prog.x(input_qubit[3]) # number=31
prog.h(input_qubit[3]) # number=33
prog.cz(input_qubit[0],input_qubit[3]) # number=34
prog.h(input_qubit[3]) # number=35
prog.cx(input_qubit[0],input_qubit[3]) # number=18
prog.rx(-0.364424747816416,input_qubit[3]) # number=36
prog.y(input_qubit[3]) # number=20
prog.h(input_qubit[3]) # number=40
prog.cz(input_qubit[0],input_qubit[3]) # number=41
prog.h(input_qubit[3]) # number=42
prog.cx(input_qubit[0],input_qubit[3]) # number=12
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=19
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = BasicAer.get_backend('qasm_simulator')
sample_shot =8000
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit2487.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| [
"[email protected]"
] | |
3b636769f9391a37b518ae880c23423c3395cc7a | 09a6d8dbad5b92f93791948b5bf9b75f5cb2e5ce | /pennylane/data/attributes/operator/_wires.py | f953c9b8f78d3b7681b90320cb92d2c2f394b24d | [
"Apache-2.0"
] | permissive | PennyLaneAI/pennylane | 458efd5d9457e90ada31ca2ef0fb6bb96a24e9a7 | 0843183ff15a013c2622af5e61fea431d18076d3 | refs/heads/master | 2023-09-03T17:00:43.105784 | 2023-09-01T16:15:07 | 2023-09-01T16:15:07 | 129,936,360 | 1,431 | 410 | Apache-2.0 | 2023-09-14T21:30:56 | 2018-04-17T16:45:42 | Python | UTF-8 | Python | false | false | 1,798 | py | # Copyright 2018-2023 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains utility function for converting ``Wires`` objects to JSON."""
import json
import numbers
from typing import Any
from pennylane.wires import Wires
class UnserializableWireError(TypeError):
"""Raised if a wire label is not JSON-serializable."""
def __init__(self, wire: Any) -> None:
super().__init__(
f"Cannot serialize wire label '{wire}': Type '{type(wire)}' is not json-serializable."
)
_JSON_TYPES = {int, str, float, type(None), bool}
def wires_to_json(wires: Wires) -> str:
"""Converts ``wires`` to a JSON list, with wire labels in
order of their index.
Returns:
JSON list of wires
Raises:
UnserializableWireError: if any of the wires are not JSON-serializable.
"""
jsonable_wires = []
for w in wires:
if type(w) in _JSON_TYPES:
jsonable_wires.append(w)
elif isinstance(w, numbers.Integral):
w_converted = int(w)
if hash(w_converted) != hash(w):
raise UnserializableWireError(w)
jsonable_wires.append(w_converted)
else:
raise UnserializableWireError(w)
return json.dumps(jsonable_wires)
| [
"[email protected]"
] | |
00c9393bf91ee19f9a3510de60b39d0abef22f7b | 40bee13ce471aa704de68ede1ee8579106396ba3 | /app_folder_name/tests/__init__.py | 68719da9a01694302ea3bf05d95488fed982d647 | [] | no_license | mikelopez/django-app-skel-crud | 9029648982ef8881c5859faac60ada4424cac576 | a01b0eeaffbe12d314eb3f9ab1cd692d6defce34 | refs/heads/master | 2020-12-24T16:58:47.140247 | 2013-03-04T21:28:03 | 2013-03-04T21:28:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 67 | py | from app_folder_name.tests.main_app_tests import TestSchoolStudents | [
"[email protected]"
] | |
2ece2111f21d161cc9f0a108b71ef2c8149855fc | 99dcb18a9e3ea367272f740b8cbf3c34285a0c08 | /samples/snippets/endpoint_service/deploy_model_sample_test.py | b12b234b8bddc0e5080ee113729a18515957f0d0 | [
"Apache-2.0"
] | permissive | googleapis/python-aiplatform | 926a4873f35dbea15b2fd86c0e16b5e6556d803e | 76b95b92c1d3b87c72d754d8c02b1bca652b9a27 | refs/heads/main | 2023-08-19T23:49:02.180075 | 2023-08-19T13:25:59 | 2023-08-19T13:27:27 | 298,017,988 | 418 | 240 | Apache-2.0 | 2023-09-14T21:08:33 | 2020-09-23T15:43:39 | Python | UTF-8 | Python | false | false | 1,745 | py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from uuid import uuid4
import deploy_model_sample
import pytest
import helpers
PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT")
LOCATION = "us-central1"
# Resource Name of "permanent_50_flowers_new_model"
MODEL_NAME = "projects/580378083368/locations/us-central1/models/4190810559500779520"
@pytest.fixture(scope="function", autouse=True)
def setup(create_endpoint):
create_endpoint(PROJECT_ID, LOCATION)
yield
@pytest.fixture(scope="function", autouse=True)
def teardown(teardown_endpoint):
yield
def test_ucaip_generated_deploy_model_sample(capsys, shared_state):
assert shared_state["endpoint_name"] is not None
# Deploy existing image classification model to endpoint
deploy_model_sample.deploy_model_sample(
project=PROJECT_ID,
model_name=MODEL_NAME,
deployed_model_display_name=f"temp_deploy_model_test_{uuid4()}",
endpoint_id=shared_state["endpoint_name"].split("/")[-1],
)
# Store deployed model ID for undeploying
out, _ = capsys.readouterr()
assert "deploy_model_response" in out
shared_state["deployed_model_id"] = helpers.get_name(out=out, key="id")
| [
"[email protected]"
] | |
23daf8e24dd73325c2f0ec16a12b272eeab45c76 | 320bd873b6cf5db2fc9194cc4ad782a49373d6ee | /temp/1/11/conftest.py | 576834bb0e0714ce25adf6d1af3e28750d5c2993 | [] | no_license | donniezhanggit/AppiumDemo8_Android | 7b0aed903969e2101330b5da4e89c39e3d591723 | 7a2ed3be27ed6cb27bd4e30e13d48cc8f34aa654 | refs/heads/master | 2020-09-13T17:35:33.749237 | 2019-03-10T10:04:46 | 2019-03-10T10:04:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | import pytest
import logging
logging.basicConfig(level=logging.DEBUG)
@pytest.fixture(scope="class")
def username11():
print("username 11 module 11")
return "module username 11" | [
"[email protected]"
] | |
42cc165f79e9d6404b25f857a6aa9197f15590c7 | 2a3606551a4d850a7b4d6a4e08089c51108ef7be | /plugin.video.mrknow/mylib/pydevd_attach_to_process/winappdbg/system.py | 26e355dd5ea3bf17e0832c36c973f3aa87ac0a80 | [
"Apache-2.0"
] | permissive | rrosajp/filmkodi | a6bb1823f4ed45453c8b8e54ffbd6a7b49f44450 | 0162cde9ae25ddbf4a69330948714833ff2f78c9 | refs/heads/master | 2021-09-18T06:03:17.561062 | 2018-06-22T23:28:53 | 2018-06-22T23:28:53 | 234,768,781 | 1 | 0 | Apache-2.0 | 2021-06-03T20:33:07 | 2020-01-18T17:11:57 | null | UTF-8 | Python | false | false | 45,882 | py | #!~/.wine/drive_c/Python25/python.exe
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2014, Mario Vilas
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice,this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
System settings.
@group Instrumentation:
System
"""
from __future__ import with_statement
__revision__ = "$Id$"
__all__ = ['System']
from winappdbg import win32
from winappdbg.registry import Registry
from winappdbg.textio import HexInput, HexDump
from winappdbg.util import Regenerator, PathOperations, MemoryAddresses, DebugRegister, \
classproperty
from winappdbg.process import _ProcessContainer
from winappdbg.window import Window
import sys
import os
import ctypes
import warnings
from os import path, getenv
#==============================================================================
class System (_ProcessContainer):
"""
Interface to a batch of processes, plus some system wide settings.
Contains a snapshot of processes.
@group Platform settings:
arch, bits, os, wow64, pageSize
@group Instrumentation:
find_window, get_window_at, get_foreground_window,
get_desktop_window, get_shell_window
@group Debugging:
load_dbghelp, fix_symbol_store_path,
request_debug_privileges, drop_debug_privileges
@group Postmortem debugging:
get_postmortem_debugger, set_postmortem_debugger,
get_postmortem_exclusion_list, add_to_postmortem_exclusion_list,
remove_from_postmortem_exclusion_list
@group System services:
get_services, get_active_services,
start_service, stop_service,
pause_service, resume_service,
get_service_display_name, get_service_from_display_name
@group Permissions and privileges:
request_privileges, drop_privileges, adjust_privileges, is_admin
@group Miscellaneous global settings:
set_kill_on_exit_mode, read_msr, write_msr, enable_step_on_branch_mode,
get_last_branch_location
@type arch: str
@cvar arch: Name of the processor architecture we're running on.
For more details see L{win32.version._get_arch}.
@type bits: int
@cvar bits: Size of the machine word in bits for the current architecture.
For more details see L{win32.version._get_bits}.
@type os: str
@cvar os: Name of the Windows version we're runing on.
For more details see L{win32.version._get_os}.
@type wow64: bool
@cvar wow64: C{True} if the debugger is a 32 bits process running in a 64
bits version of Windows, C{False} otherwise.
@type pageSize: int
@cvar pageSize: Page size in bytes. Defaults to 0x1000 but it's
automatically updated on runtime when importing the module.
@type registry: L{Registry}
@cvar registry: Windows Registry for this machine.
"""
arch = win32.arch
bits = win32.bits
os = win32.os
wow64 = win32.wow64
@classproperty
def pageSize(cls):
pageSize = MemoryAddresses.pageSize
cls.pageSize = pageSize
return pageSize
registry = Registry()
#------------------------------------------------------------------------------
@staticmethod
def find_window(className = None, windowName = None):
"""
Find the first top-level window in the current desktop to match the
given class name and/or window name. If neither are provided any
top-level window will match.
@see: L{get_window_at}
@type className: str
@param className: (Optional) Class name of the window to find.
If C{None} or not used any class name will match the search.
@type windowName: str
@param windowName: (Optional) Caption text of the window to find.
If C{None} or not used any caption text will match the search.
@rtype: L{Window} or None
@return: A window that matches the request. There may be more matching
windows, but this method only returns one. If no matching window
is found, the return value is C{None}.
@raise WindowsError: An error occured while processing this request.
"""
# I'd love to reverse the order of the parameters
# but that might create some confusion. :(
hWnd = win32.FindWindow(className, windowName)
if hWnd:
return Window(hWnd)
@staticmethod
def get_window_at(x, y):
"""
Get the window located at the given coordinates in the desktop.
If no such window exists an exception is raised.
@see: L{find_window}
@type x: int
@param x: Horizontal coordinate.
@type y: int
@param y: Vertical coordinate.
@rtype: L{Window}
@return: Window at the requested position. If no such window
exists a C{WindowsError} exception is raised.
@raise WindowsError: An error occured while processing this request.
"""
return Window( win32.WindowFromPoint( (x, y) ) )
@staticmethod
def get_foreground_window():
"""
@rtype: L{Window}
@return: Returns the foreground window.
@raise WindowsError: An error occured while processing this request.
"""
return Window( win32.GetForegroundWindow() )
@staticmethod
def get_desktop_window():
"""
@rtype: L{Window}
@return: Returns the desktop window.
@raise WindowsError: An error occured while processing this request.
"""
return Window( win32.GetDesktopWindow() )
@staticmethod
def get_shell_window():
"""
@rtype: L{Window}
@return: Returns the shell window.
@raise WindowsError: An error occured while processing this request.
"""
return Window( win32.GetShellWindow() )
#------------------------------------------------------------------------------
@classmethod
def request_debug_privileges(cls, bIgnoreExceptions = False):
"""
Requests debug privileges.
This may be needed to debug processes running as SYSTEM
(such as services) since Windows XP.
@type bIgnoreExceptions: bool
@param bIgnoreExceptions: C{True} to ignore any exceptions that may be
raised when requesting debug privileges.
@rtype: bool
@return: C{True} on success, C{False} on failure.
@raise WindowsError: Raises an exception on error, unless
C{bIgnoreExceptions} is C{True}.
"""
try:
cls.request_privileges(win32.SE_DEBUG_NAME)
return True
except Exception:
if not bIgnoreExceptions:
raise
return False
@classmethod
def drop_debug_privileges(cls, bIgnoreExceptions = False):
"""
Drops debug privileges.
This may be needed to avoid being detected
by certain anti-debug tricks.
@type bIgnoreExceptions: bool
@param bIgnoreExceptions: C{True} to ignore any exceptions that may be
raised when dropping debug privileges.
@rtype: bool
@return: C{True} on success, C{False} on failure.
@raise WindowsError: Raises an exception on error, unless
C{bIgnoreExceptions} is C{True}.
"""
try:
cls.drop_privileges(win32.SE_DEBUG_NAME)
return True
except Exception:
if not bIgnoreExceptions:
raise
return False
@classmethod
def request_privileges(cls, *privileges):
"""
Requests privileges.
@type privileges: int...
@param privileges: Privileges to request.
@raise WindowsError: Raises an exception on error.
"""
cls.adjust_privileges(True, privileges)
@classmethod
def drop_privileges(cls, *privileges):
"""
Drops privileges.
@type privileges: int...
@param privileges: Privileges to drop.
@raise WindowsError: Raises an exception on error.
"""
cls.adjust_privileges(False, privileges)
@staticmethod
def adjust_privileges(state, privileges):
"""
Requests or drops privileges.
@type state: bool
@param state: C{True} to request, C{False} to drop.
@type privileges: list(int)
@param privileges: Privileges to request or drop.
@raise WindowsError: Raises an exception on error.
"""
with win32.OpenProcessToken(win32.GetCurrentProcess(),
win32.TOKEN_ADJUST_PRIVILEGES) as hToken:
NewState = ( (priv, state) for priv in privileges )
win32.AdjustTokenPrivileges(hToken, NewState)
@staticmethod
def is_admin():
"""
@rtype: bool
@return: C{True} if the current user as Administrator privileges,
C{False} otherwise. Since Windows Vista and above this means if
the current process is running with UAC elevation or not.
"""
return win32.IsUserAnAdmin()
#------------------------------------------------------------------------------
__binary_types = {
win32.VFT_APP: "application",
win32.VFT_DLL: "dynamic link library",
win32.VFT_STATIC_LIB: "static link library",
win32.VFT_FONT: "font",
win32.VFT_DRV: "driver",
win32.VFT_VXD: "legacy driver",
}
__driver_types = {
win32.VFT2_DRV_COMM: "communications driver",
win32.VFT2_DRV_DISPLAY: "display driver",
win32.VFT2_DRV_INSTALLABLE: "installable driver",
win32.VFT2_DRV_KEYBOARD: "keyboard driver",
win32.VFT2_DRV_LANGUAGE: "language driver",
win32.VFT2_DRV_MOUSE: "mouse driver",
win32.VFT2_DRV_NETWORK: "network driver",
win32.VFT2_DRV_PRINTER: "printer driver",
win32.VFT2_DRV_SOUND: "sound driver",
win32.VFT2_DRV_SYSTEM: "system driver",
win32.VFT2_DRV_VERSIONED_PRINTER: "versioned printer driver",
}
__font_types = {
win32.VFT2_FONT_RASTER: "raster font",
win32.VFT2_FONT_TRUETYPE: "TrueType font",
win32.VFT2_FONT_VECTOR: "vector font",
}
__months = (
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December",
)
__days_of_the_week = (
"Sunday",
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday",
)
@classmethod
def get_file_version_info(cls, filename):
"""
Get the program version from an executable file, if available.
@type filename: str
@param filename: Pathname to the executable file to query.
@rtype: tuple(str, str, bool, bool, str, str)
@return: Tuple with version information extracted from the executable
file metadata, containing the following:
- File version number (C{"major.minor"}).
- Product version number (C{"major.minor"}).
- C{True} for debug builds, C{False} for production builds.
- C{True} for legacy OS builds (DOS, OS/2, Win16),
C{False} for modern OS builds.
- Binary file type.
May be one of the following values:
- "application"
- "dynamic link library"
- "static link library"
- "font"
- "raster font"
- "TrueType font"
- "vector font"
- "driver"
- "communications driver"
- "display driver"
- "installable driver"
- "keyboard driver"
- "language driver"
- "legacy driver"
- "mouse driver"
- "network driver"
- "printer driver"
- "sound driver"
- "system driver"
- "versioned printer driver"
- Binary creation timestamp.
Any of the fields may be C{None} if not available.
@raise WindowsError: Raises an exception on error.
"""
# Get the file version info structure.
pBlock = win32.GetFileVersionInfo(filename)
pBuffer, dwLen = win32.VerQueryValue(pBlock, "\\")
if dwLen != ctypes.sizeof(win32.VS_FIXEDFILEINFO):
raise ctypes.WinError(win32.ERROR_BAD_LENGTH)
pVersionInfo = ctypes.cast(pBuffer,
ctypes.POINTER(win32.VS_FIXEDFILEINFO))
VersionInfo = pVersionInfo.contents
if VersionInfo.dwSignature != 0xFEEF04BD:
raise ctypes.WinError(win32.ERROR_BAD_ARGUMENTS)
# File and product versions.
FileVersion = "%d.%d" % (VersionInfo.dwFileVersionMS,
VersionInfo.dwFileVersionLS)
ProductVersion = "%d.%d" % (VersionInfo.dwProductVersionMS,
VersionInfo.dwProductVersionLS)
# Debug build?
if VersionInfo.dwFileFlagsMask & win32.VS_FF_DEBUG:
DebugBuild = (VersionInfo.dwFileFlags & win32.VS_FF_DEBUG) != 0
else:
DebugBuild = None
# Legacy OS build?
LegacyBuild = (VersionInfo.dwFileOS != win32.VOS_NT_WINDOWS32)
# File type.
FileType = cls.__binary_types.get(VersionInfo.dwFileType)
if VersionInfo.dwFileType == win32.VFT_DRV:
FileType = cls.__driver_types.get(VersionInfo.dwFileSubtype)
elif VersionInfo.dwFileType == win32.VFT_FONT:
FileType = cls.__font_types.get(VersionInfo.dwFileSubtype)
# Timestamp, ex: "Monday, July 7, 2013 (12:20:50.126)".
# FIXME: how do we know the time zone?
FileDate = (VersionInfo.dwFileDateMS << 32) + VersionInfo.dwFileDateLS
if FileDate:
CreationTime = win32.FileTimeToSystemTime(FileDate)
CreationTimestamp = "%s, %s %d, %d (%d:%d:%d.%d)" % (
cls.__days_of_the_week[CreationTime.wDayOfWeek],
cls.__months[CreationTime.wMonth],
CreationTime.wDay,
CreationTime.wYear,
CreationTime.wHour,
CreationTime.wMinute,
CreationTime.wSecond,
CreationTime.wMilliseconds,
)
else:
CreationTimestamp = None
# Return the file version info.
return (
FileVersion,
ProductVersion,
DebugBuild,
LegacyBuild,
FileType,
CreationTimestamp,
)
#------------------------------------------------------------------------------
# Locations for dbghelp.dll.
# Unfortunately, Microsoft started bundling WinDbg with the
# platform SDK, so the install directories may vary across
# versions and platforms.
__dbghelp_locations = {
# Intel 64 bits.
win32.ARCH_AMD64: set([
# WinDbg bundled with the SDK, version 8.0.
path.join(
getenv("ProgramFiles", "C:\\Program Files"),
"Windows Kits",
"8.0",
"Debuggers",
"x64",
"dbghelp.dll"),
path.join(
getenv("ProgramW6432", getenv("ProgramFiles",
"C:\\Program Files")),
"Windows Kits",
"8.0",
"Debuggers",
"x64",
"dbghelp.dll"),
# Old standalone versions of WinDbg.
path.join(
getenv("ProgramFiles", "C:\\Program Files"),
"Debugging Tools for Windows (x64)",
"dbghelp.dll"),
]),
# Intel 32 bits.
win32.ARCH_I386 : set([
# WinDbg bundled with the SDK, version 8.0.
path.join(
getenv("ProgramFiles", "C:\\Program Files"),
"Windows Kits",
"8.0",
"Debuggers",
"x86",
"dbghelp.dll"),
path.join(
getenv("ProgramW6432", getenv("ProgramFiles",
"C:\\Program Files")),
"Windows Kits",
"8.0",
"Debuggers",
"x86",
"dbghelp.dll"),
# Old standalone versions of WinDbg.
path.join(
getenv("ProgramFiles", "C:\\Program Files"),
"Debugging Tools for Windows (x86)",
"dbghelp.dll"),
# Version shipped with Windows.
path.join(
getenv("ProgramFiles", "C:\\Program Files"),
"Debugging Tools for Windows (x86)",
"dbghelp.dll"),
]),
}
@classmethod
def load_dbghelp(cls, pathname = None):
"""
Load the specified version of the C{dbghelp.dll} library.
This library is shipped with the Debugging Tools for Windows, and it's
required to load debug symbols.
Normally you don't need to call this method, as WinAppDbg already tries
to load the latest version automatically - but it may come in handy if
the Debugging Tools are installed in a non standard folder.
Example::
from winappdbg import Debug
def simple_debugger( argv ):
# Instance a Debug object, passing it the event handler callback
debug = Debug( my_event_handler )
try:
# Load a specific dbghelp.dll file
debug.system.load_dbghelp("C:\Some folder\dbghelp.dll")
# Start a new process for debugging
debug.execv( argv )
# Wait for the debugee to finish
debug.loop()
# Stop the debugger
finally:
debug.stop()
@see: U{http://msdn.microsoft.com/en-us/library/ms679294(VS.85).aspx}
@type pathname: str
@param pathname:
(Optional) Full pathname to the C{dbghelp.dll} library.
If not provided this method will try to autodetect it.
@rtype: ctypes.WinDLL
@return: Loaded instance of C{dbghelp.dll}.
@raise NotImplementedError: This feature was not implemented for the
current architecture.
@raise WindowsError: An error occured while processing this request.
"""
# If an explicit pathname was not given, search for the library.
if not pathname:
# Under WOW64 we'll treat AMD64 as I386.
arch = win32.arch
if arch == win32.ARCH_AMD64 and win32.bits == 32:
arch = win32.ARCH_I386
# Check if the architecture is supported.
if not arch in cls.__dbghelp_locations:
msg = "Architecture %s is not currently supported."
raise NotImplementedError(msg % arch)
# Grab all versions of the library we can find.
found = []
for pathname in cls.__dbghelp_locations[arch]:
if path.isfile(pathname):
try:
f_ver, p_ver = cls.get_file_version_info(pathname)[:2]
except WindowsError:
msg = "Failed to parse file version metadata for: %s"
warnings.warn(msg % pathname)
if not f_ver:
f_ver = p_ver
elif p_ver and p_ver > f_ver:
f_ver = p_ver
found.append( (f_ver, pathname) )
# If we found any, use the newest version.
if found:
found.sort()
pathname = found.pop()[1]
# If we didn't find any, trust the default DLL search algorithm.
else:
pathname = "dbghelp.dll"
# Load the library.
dbghelp = ctypes.windll.LoadLibrary(pathname)
# Set it globally as the library to be used.
ctypes.windll.dbghelp = dbghelp
# Return the library.
return dbghelp
@staticmethod
def fix_symbol_store_path(symbol_store_path = None,
remote = True,
force = False):
"""
Fix the symbol store path. Equivalent to the C{.symfix} command in
Microsoft WinDbg.
If the symbol store path environment variable hasn't been set, this
method will provide a default one.
@type symbol_store_path: str or None
@param symbol_store_path: (Optional) Symbol store path to set.
@type remote: bool
@param remote: (Optional) Defines the symbol store path to set when the
C{symbol_store_path} is C{None}.
If C{True} the default symbol store path is set to the Microsoft
symbol server. Debug symbols will be downloaded through HTTP.
This gives the best results but is also quite slow.
If C{False} the default symbol store path is set to the local
cache only. This prevents debug symbols from being downloaded and
is faster, but unless you've installed the debug symbols on this
machine or downloaded them in a previous debugging session, some
symbols may be missing.
If the C{symbol_store_path} argument is not C{None}, this argument
is ignored entirely.
@type force: bool
@param force: (Optional) If C{True} the new symbol store path is set
always. If C{False} the new symbol store path is only set if
missing.
This allows you to call this method preventively to ensure the
symbol server is always set up correctly when running your script,
but without messing up whatever configuration the user has.
Example::
from winappdbg import Debug, System
def simple_debugger( argv ):
# Instance a Debug object
debug = Debug( MyEventHandler() )
try:
# Make sure the remote symbol store is set
System.fix_symbol_store_path(remote = True,
force = False)
# Start a new process for debugging
debug.execv( argv )
# Wait for the debugee to finish
debug.loop()
# Stop the debugger
finally:
debug.stop()
@rtype: str or None
@return: The previously set symbol store path if any,
otherwise returns C{None}.
"""
try:
if symbol_store_path is None:
local_path = "C:\\SYMBOLS"
if not path.isdir(local_path):
local_path = "C:\\Windows\\Symbols"
if not path.isdir(local_path):
local_path = path.abspath(".")
if remote:
symbol_store_path = (
"cache*;SRV*"
+ local_path +
"*"
"http://msdl.microsoft.com/download/symbols"
)
else:
symbol_store_path = "cache*;SRV*" + local_path
previous = os.environ.get("_NT_SYMBOL_PATH", None)
if not previous or force:
os.environ["_NT_SYMBOL_PATH"] = symbol_store_path
return previous
except Exception:
e = sys.exc_info()[1]
warnings.warn("Cannot fix symbol path, reason: %s" % str(e),
RuntimeWarning)
#------------------------------------------------------------------------------
@staticmethod
def set_kill_on_exit_mode(bKillOnExit = False):
"""
Defines the behavior of the debugged processes when the debugging
thread dies. This method only affects the calling thread.
Works on the following platforms:
- Microsoft Windows XP and above.
- Wine (Windows Emulator).
Fails on the following platforms:
- Microsoft Windows 2000 and below.
- ReactOS.
@type bKillOnExit: bool
@param bKillOnExit: C{True} to automatically kill processes when the
debugger thread dies. C{False} to automatically detach from
processes when the debugger thread dies.
@rtype: bool
@return: C{True} on success, C{False} on error.
@note:
This call will fail if a debug port was not created. That is, if
the debugger isn't attached to at least one process. For more info
see: U{http://msdn.microsoft.com/en-us/library/ms679307.aspx}
"""
try:
# won't work before calling CreateProcess or DebugActiveProcess
win32.DebugSetProcessKillOnExit(bKillOnExit)
except (AttributeError, WindowsError):
return False
return True
@staticmethod
def read_msr(address):
"""
Read the contents of the specified MSR (Machine Specific Register).
@type address: int
@param address: MSR to read.
@rtype: int
@return: Value of the specified MSR.
@raise WindowsError:
Raises an exception on error.
@raise NotImplementedError:
Current architecture is not C{i386} or C{amd64}.
@warning:
It could potentially brick your machine.
It works on my machine, but your mileage may vary.
"""
if win32.arch not in (win32.ARCH_I386, win32.ARCH_AMD64):
raise NotImplementedError(
"MSR reading is only supported on i386 or amd64 processors.")
msr = win32.SYSDBG_MSR()
msr.Address = address
msr.Data = 0
win32.NtSystemDebugControl(win32.SysDbgReadMsr,
InputBuffer = msr,
OutputBuffer = msr)
return msr.Data
@staticmethod
def write_msr(address, value):
"""
Set the contents of the specified MSR (Machine Specific Register).
@type address: int
@param address: MSR to write.
@type value: int
@param value: Contents to write on the MSR.
@raise WindowsError:
Raises an exception on error.
@raise NotImplementedError:
Current architecture is not C{i386} or C{amd64}.
@warning:
It could potentially brick your machine.
It works on my machine, but your mileage may vary.
"""
if win32.arch not in (win32.ARCH_I386, win32.ARCH_AMD64):
raise NotImplementedError(
"MSR writing is only supported on i386 or amd64 processors.")
msr = win32.SYSDBG_MSR()
msr.Address = address
msr.Data = value
win32.NtSystemDebugControl(win32.SysDbgWriteMsr, InputBuffer = msr)
@classmethod
def enable_step_on_branch_mode(cls):
"""
When tracing, call this on every single step event
for step on branch mode.
@raise WindowsError:
Raises C{ERROR_DEBUGGER_INACTIVE} if the debugger is not attached
to least one process.
@raise NotImplementedError:
Current architecture is not C{i386} or C{amd64}.
@warning:
This method uses the processor's machine specific registers (MSR).
It could potentially brick your machine.
It works on my machine, but your mileage may vary.
@note:
It doesn't seem to work in VMWare or VirtualBox machines.
Maybe it fails in other virtualization/emulation environments,
no extensive testing was made so far.
"""
cls.write_msr(DebugRegister.DebugCtlMSR,
DebugRegister.BranchTrapFlag | DebugRegister.LastBranchRecord)
@classmethod
def get_last_branch_location(cls):
"""
Returns the source and destination addresses of the last taken branch.
@rtype: tuple( int, int )
@return: Source and destination addresses of the last taken branch.
@raise WindowsError:
Raises an exception on error.
@raise NotImplementedError:
Current architecture is not C{i386} or C{amd64}.
@warning:
This method uses the processor's machine specific registers (MSR).
It could potentially brick your machine.
It works on my machine, but your mileage may vary.
@note:
It doesn't seem to work in VMWare or VirtualBox machines.
Maybe it fails in other virtualization/emulation environments,
no extensive testing was made so far.
"""
LastBranchFromIP = cls.read_msr(DebugRegister.LastBranchFromIP)
LastBranchToIP = cls.read_msr(DebugRegister.LastBranchToIP)
return ( LastBranchFromIP, LastBranchToIP )
#------------------------------------------------------------------------------
@classmethod
def get_postmortem_debugger(cls, bits = None):
"""
Returns the postmortem debugging settings from the Registry.
@see: L{set_postmortem_debugger}
@type bits: int
@param bits: Set to C{32} for the 32 bits debugger, or C{64} for the
64 bits debugger. Set to {None} for the default (L{System.bits}.
@rtype: tuple( str, bool, int )
@return: A tuple containing the command line string to the postmortem
debugger, a boolean specifying if user interaction is allowed
before attaching, and an integer specifying a user defined hotkey.
Any member of the tuple may be C{None}.
See L{set_postmortem_debugger} for more details.
@raise WindowsError:
Raises an exception on error.
"""
if bits is None:
bits = cls.bits
elif bits not in (32, 64):
raise NotImplementedError("Unknown architecture (%r bits)" % bits)
if bits == 32 and cls.bits == 64:
keyname = 'HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug'
else:
keyname = 'HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug'
key = cls.registry[keyname]
debugger = key.get('Debugger')
auto = key.get('Auto')
hotkey = key.get('UserDebuggerHotkey')
if auto is not None:
auto = bool(auto)
return (debugger, auto, hotkey)
@classmethod
def get_postmortem_exclusion_list(cls, bits = None):
"""
Returns the exclusion list for the postmortem debugger.
@see: L{get_postmortem_debugger}
@type bits: int
@param bits: Set to C{32} for the 32 bits debugger, or C{64} for the
64 bits debugger. Set to {None} for the default (L{System.bits}).
@rtype: list( str )
@return: List of excluded application filenames.
@raise WindowsError:
Raises an exception on error.
"""
if bits is None:
bits = cls.bits
elif bits not in (32, 64):
raise NotImplementedError("Unknown architecture (%r bits)" % bits)
if bits == 32 and cls.bits == 64:
keyname = 'HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug\\AutoExclusionList'
else:
keyname = 'HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug\\AutoExclusionList'
try:
key = cls.registry[keyname]
except KeyError:
return []
return [name for (name, enabled) in key.items() if enabled]
@classmethod
def set_postmortem_debugger(cls, cmdline,
auto = None, hotkey = None, bits = None):
"""
Sets the postmortem debugging settings in the Registry.
@warning: This method requires administrative rights.
@see: L{get_postmortem_debugger}
@type cmdline: str
@param cmdline: Command line to the new postmortem debugger.
When the debugger is invoked, the first "%ld" is replaced with the
process ID and the second "%ld" is replaced with the event handle.
Don't forget to enclose the program filename in double quotes if
the path contains spaces.
@type auto: bool
@param auto: Set to C{True} if no user interaction is allowed, C{False}
to prompt a confirmation dialog before attaching.
Use C{None} to leave this value unchanged.
@type hotkey: int
@param hotkey: Virtual key scan code for the user defined hotkey.
Use C{0} to disable the hotkey.
Use C{None} to leave this value unchanged.
@type bits: int
@param bits: Set to C{32} for the 32 bits debugger, or C{64} for the
64 bits debugger. Set to {None} for the default (L{System.bits}).
@rtype: tuple( str, bool, int )
@return: Previously defined command line and auto flag.
@raise WindowsError:
Raises an exception on error.
"""
if bits is None:
bits = cls.bits
elif bits not in (32, 64):
raise NotImplementedError("Unknown architecture (%r bits)" % bits)
if bits == 32 and cls.bits == 64:
keyname = 'HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug'
else:
keyname = 'HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug'
key = cls.registry[keyname]
if cmdline is not None:
key['Debugger'] = cmdline
if auto is not None:
key['Auto'] = int(bool(auto))
if hotkey is not None:
key['UserDebuggerHotkey'] = int(hotkey)
@classmethod
def add_to_postmortem_exclusion_list(cls, pathname, bits = None):
"""
Adds the given filename to the exclusion list for postmortem debugging.
@warning: This method requires administrative rights.
@see: L{get_postmortem_exclusion_list}
@type pathname: str
@param pathname:
Application pathname to exclude from postmortem debugging.
@type bits: int
@param bits: Set to C{32} for the 32 bits debugger, or C{64} for the
64 bits debugger. Set to {None} for the default (L{System.bits}).
@raise WindowsError:
Raises an exception on error.
"""
if bits is None:
bits = cls.bits
elif bits not in (32, 64):
raise NotImplementedError("Unknown architecture (%r bits)" % bits)
if bits == 32 and cls.bits == 64:
keyname = 'HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug\\AutoExclusionList'
else:
keyname = 'HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug\\AutoExclusionList'
try:
key = cls.registry[keyname]
except KeyError:
key = cls.registry.create(keyname)
key[pathname] = 1
@classmethod
def remove_from_postmortem_exclusion_list(cls, pathname, bits = None):
"""
Removes the given filename to the exclusion list for postmortem
debugging from the Registry.
@warning: This method requires administrative rights.
@warning: Don't ever delete entries you haven't created yourself!
Some entries are set by default for your version of Windows.
Deleting them might deadlock your system under some circumstances.
For more details see:
U{http://msdn.microsoft.com/en-us/library/bb204634(v=vs.85).aspx}
@see: L{get_postmortem_exclusion_list}
@type pathname: str
@param pathname: Application pathname to remove from the postmortem
debugging exclusion list.
@type bits: int
@param bits: Set to C{32} for the 32 bits debugger, or C{64} for the
64 bits debugger. Set to {None} for the default (L{System.bits}).
@raise WindowsError:
Raises an exception on error.
"""
if bits is None:
bits = cls.bits
elif bits not in (32, 64):
raise NotImplementedError("Unknown architecture (%r bits)" % bits)
if bits == 32 and cls.bits == 64:
keyname = 'HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug\\AutoExclusionList'
else:
keyname = 'HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug\\AutoExclusionList'
try:
key = cls.registry[keyname]
except KeyError:
return
try:
del key[pathname]
except KeyError:
return
#------------------------------------------------------------------------------
@staticmethod
def get_services():
"""
Retrieve a list of all system services.
@see: L{get_active_services},
L{start_service}, L{stop_service},
L{pause_service}, L{resume_service}
@rtype: list( L{win32.ServiceStatusProcessEntry} )
@return: List of service status descriptors.
"""
with win32.OpenSCManager(
dwDesiredAccess = win32.SC_MANAGER_ENUMERATE_SERVICE
) as hSCManager:
try:
return win32.EnumServicesStatusEx(hSCManager)
except AttributeError:
return win32.EnumServicesStatus(hSCManager)
@staticmethod
def get_active_services():
"""
Retrieve a list of all active system services.
@see: L{get_services},
L{start_service}, L{stop_service},
L{pause_service}, L{resume_service}
@rtype: list( L{win32.ServiceStatusProcessEntry} )
@return: List of service status descriptors.
"""
with win32.OpenSCManager(
dwDesiredAccess = win32.SC_MANAGER_ENUMERATE_SERVICE
) as hSCManager:
return [ entry for entry in win32.EnumServicesStatusEx(hSCManager,
dwServiceType = win32.SERVICE_WIN32,
dwServiceState = win32.SERVICE_ACTIVE) \
if entry.ProcessId ]
@staticmethod
def get_service(name):
"""
Get the service descriptor for the given service name.
@see: L{start_service}, L{stop_service},
L{pause_service}, L{resume_service}
@type name: str
@param name: Service unique name. You can get this value from the
C{ServiceName} member of the service descriptors returned by
L{get_services} or L{get_active_services}.
@rtype: L{win32.ServiceStatusProcess}
@return: Service status descriptor.
"""
with win32.OpenSCManager(
dwDesiredAccess = win32.SC_MANAGER_ENUMERATE_SERVICE
) as hSCManager:
with win32.OpenService(hSCManager, name,
dwDesiredAccess = win32.SERVICE_QUERY_STATUS
) as hService:
try:
return win32.QueryServiceStatusEx(hService)
except AttributeError:
return win32.QueryServiceStatus(hService)
@staticmethod
def get_service_display_name(name):
"""
Get the service display name for the given service name.
@see: L{get_service}
@type name: str
@param name: Service unique name. You can get this value from the
C{ServiceName} member of the service descriptors returned by
L{get_services} or L{get_active_services}.
@rtype: str
@return: Service display name.
"""
with win32.OpenSCManager(
dwDesiredAccess = win32.SC_MANAGER_ENUMERATE_SERVICE
) as hSCManager:
return win32.GetServiceDisplayName(hSCManager, name)
@staticmethod
def get_service_from_display_name(displayName):
"""
Get the service unique name given its display name.
@see: L{get_service}
@type displayName: str
@param displayName: Service display name. You can get this value from
the C{DisplayName} member of the service descriptors returned by
L{get_services} or L{get_active_services}.
@rtype: str
@return: Service unique name.
"""
with win32.OpenSCManager(
dwDesiredAccess = win32.SC_MANAGER_ENUMERATE_SERVICE
) as hSCManager:
return win32.GetServiceKeyName(hSCManager, displayName)
@staticmethod
def start_service(name, argv = None):
"""
Start the service given by name.
@warn: This method requires UAC elevation in Windows Vista and above.
@see: L{stop_service}, L{pause_service}, L{resume_service}
@type name: str
@param name: Service unique name. You can get this value from the
C{ServiceName} member of the service descriptors returned by
L{get_services} or L{get_active_services}.
"""
with win32.OpenSCManager(
dwDesiredAccess = win32.SC_MANAGER_CONNECT
) as hSCManager:
with win32.OpenService(hSCManager, name,
dwDesiredAccess = win32.SERVICE_START
) as hService:
win32.StartService(hService)
@staticmethod
def stop_service(name):
"""
Stop the service given by name.
@warn: This method requires UAC elevation in Windows Vista and above.
@see: L{get_services}, L{get_active_services},
L{start_service}, L{pause_service}, L{resume_service}
"""
with win32.OpenSCManager(
dwDesiredAccess = win32.SC_MANAGER_CONNECT
) as hSCManager:
with win32.OpenService(hSCManager, name,
dwDesiredAccess = win32.SERVICE_STOP
) as hService:
win32.ControlService(hService, win32.SERVICE_CONTROL_STOP)
@staticmethod
def pause_service(name):
"""
Pause the service given by name.
@warn: This method requires UAC elevation in Windows Vista and above.
@note: Not all services support this.
@see: L{get_services}, L{get_active_services},
L{start_service}, L{stop_service}, L{resume_service}
"""
with win32.OpenSCManager(
dwDesiredAccess = win32.SC_MANAGER_CONNECT
) as hSCManager:
with win32.OpenService(hSCManager, name,
dwDesiredAccess = win32.SERVICE_PAUSE_CONTINUE
) as hService:
win32.ControlService(hService, win32.SERVICE_CONTROL_PAUSE)
@staticmethod
def resume_service(name):
"""
Resume the service given by name.
@warn: This method requires UAC elevation in Windows Vista and above.
@note: Not all services support this.
@see: L{get_services}, L{get_active_services},
L{start_service}, L{stop_service}, L{pause_service}
"""
with win32.OpenSCManager(
dwDesiredAccess = win32.SC_MANAGER_CONNECT
) as hSCManager:
with win32.OpenService(hSCManager, name,
dwDesiredAccess = win32.SERVICE_PAUSE_CONTINUE
) as hService:
win32.ControlService(hService, win32.SERVICE_CONTROL_CONTINUE)
# TODO: create_service, delete_service
| [
"[email protected]"
] | |
4c235f4e1cb7df6459e5a27dc2db0e9523efbd8d | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/galex_j18372-3125/sdB_GALEX_J18372-3125_coadd.py | 034c74a4d5e3f32e646d3ef889df15b3efc39fd6 | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 455 | py | from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[279.319917,-31.420586], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_GALEX_J18372-3125/sdB_GALEX_J18372-3125_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_GALEX_J18372-3125/sdB_GALEX_J18372-3125_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
271de1ac2b54d0734818c659bda503b6581faecf | f7edc11e203532cf755c73c64023ed0381bdb92f | /backend/giddy_up_18698/settings.py | 20f0de0620ad7ffa1bbac2990a2347f5f7384a09 | [] | no_license | crowdbotics-apps/giddy-up-18698 | b146616a279418eb26ffd92e3b3417ecd00a89e6 | 10c80047e774f2bc9195adb3a6ace97da5981510 | refs/heads/master | 2022-11-14T04:55:34.914713 | 2020-07-08T02:26:33 | 2020-07-08T02:26:33 | 277,969,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,794 | py | """
Django settings for giddy_up_18698 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
# start fcm_django push notifications
'fcm_django',
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'giddy_up_18698.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'giddy_up_18698.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {
"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")
}
# end fcm_django push notifications
if DEBUG:
# output email to console instead of sending
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
] | |
7a8a0495f011281a56470d70c04e8e2dbe1b09db | f92385943346eccca8cc4d7caca66d2d5455caa2 | /2020.8/老虎-1.py | 4944f91312e7c445e82228e8f02965aa40b7fbe9 | [] | no_license | IamWilliamWang/Leetcode-practice | 83861c5f8672a716141dc6ec9f61f21dc5041535 | c13c0380a3ae9fef201ae53d7004b9f4224f1620 | refs/heads/master | 2023-04-01T12:15:19.335312 | 2020-10-15T14:49:36 | 2020-10-15T14:49:36 | 281,846,435 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 968 | py | #
#
# @param HP long长整型 HP
# @param ACK long长整型 ACK
# @param HP2 long长整型 HP2
# @param ACK2 long长整型 ACK2
# @return long长整型
#
import math
class Solution:
def Pokemonfight(self, HP, ACK, HP2, ACK2):
hp, ack, hpEnemy, ackEnemy = HP2, ACK2, HP, ACK
if ackEnemy >= hp: # 一回合被拍死
return -1
eatMadicine = False
for round in range(1, 100000):
hp -= ackEnemy # 敌人攻击
if hp <= 0:
return -1
if hp <= ackEnemy and hpEnemy > ack: # 该考虑吃药了
if eatMadicine: # 发现不停的要吃药,没有机会攻击
return -1
hp = HP2
eatMadicine = True
else:
hpEnemy -= ack
eatMadicine = False
if hpEnemy <= 0:
return round
return -1
print(Solution().Pokemonfight(8, 3, 8, 1))
| [
"[email protected]"
] | |
35d505a038c7b205e90b3d1f3f3ee044a8950306 | d66818f4b951943553826a5f64413e90120e1fae | /hackerearth/Algorithms/Types of burgers/solution.py | 94cc0b583a01253b636b13ccbe80d38732c353cd | [
"MIT"
] | permissive | HBinhCT/Q-project | 0f80cd15c9945c43e2e17072416ddb6e4745e7fa | 19923cbaa3c83c670527899ece5c3ad31bcebe65 | refs/heads/master | 2023-08-30T08:59:16.006567 | 2023-08-29T15:30:21 | 2023-08-29T15:30:21 | 247,630,603 | 8 | 1 | MIT | 2020-07-22T01:20:23 | 2020-03-16T06:48:02 | Python | UTF-8 | Python | false | false | 873 | py | from heapq import heappop, heappush
t = int(input())
moves = ((1, 0, 0), (0, 1, 0), (0, 0, 1))
for _ in range(t):
x, y, z = map(int, input().strip().split())
k = int(input())
b1 = sorted(map(int, input().strip().split()), reverse=True)
b2 = sorted(map(int, input().strip().split()), reverse=True)
b3 = sorted(map(int, input().strip().split()), reverse=True)
heap = [(-(b1[0] + b2[0] + b3[0]), 0, 0, 0)]
visited = {(0, 0, 0)}
ans = 0
while k:
total, bx, by, bz = heappop(heap)
ans -= total
for mx, my, mz in moves:
nx = bx + mx
ny = by + my
nz = bz + mz
if nx < x and ny < y and nz < z and (nx, ny, nz) not in visited:
heappush(heap, (-(b1[nx] + b2[ny] + b3[nz]), nx, ny, nz))
visited.add((nx, ny, nz))
k -= 1
print(ans)
| [
"[email protected]"
] | |
4a70fedc08979200d7ed54db5e4a6469a9d88d01 | 53d203e73331d2ee0f1d644946bf6650f5716edd | /quantifiedcode/backend/tasks/email.py | 95859928af4da04b416a6a77b273f397ad3bc141 | [
"BSD-3-Clause"
] | permissive | martynbristow/quantifiedcode | 3f6a6c935aeb8d78e90086cefaee6cd3b6f4558d | a7485ceaeffc5cf5894654dde35ed9a501657a07 | refs/heads/master | 2022-01-25T20:42:24.124289 | 2021-07-07T10:44:54 | 2021-07-07T10:44:54 | 239,041,449 | 0 | 0 | BSD-3-Clause | 2022-01-06T22:47:22 | 2020-02-07T23:37:47 | JavaScript | UTF-8 | Python | false | false | 3,998 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Contains tasks and helper functions to send notifications.
"""
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
import logging
import datetime
import traceback
import re
from six import string_types
from quantifiedcode.settings import settings, backend
from quantifiedcode.backend.settings.jinja import jinja_env
from quantifiedcode.backend.worker import celery
from quantifiedcode.backend.models import User
logger = logging.getLogger(__name__)
def send_mail(*args, **kwargs):
if settings.get('debug'):
send_mail_async(*args, **kwargs)
else:
send_mail_async.delay(*args, **kwargs)
@celery.task(time_limit=120, queue="email", ignore_result=False)
def send_mail_async(email_to,
template,
template_context=None,
email_from=None,
name_from=None,
email_reply_to=None,
attachments=None):
""" Sends an email based on the specified template.
:param email_to: address or a list of email addresses
:param template: name of the template to use for the email
:param template_context: dict with template context, ie `template_context = {"diffs": aggregated_diffs}`
:param email_from: sender of the email
:param name_from: name of the sender
:param email_reply_to: email address to set as the reply-to address
:param attachments: list of attachments
:return:
"""
if isinstance(email_to, string_types):
email_to = [email_to]
if email_to is None or not isinstance(email_to, (list, tuple)):
raise ValueError("email_to is None or incompatible type!")
if template_context is None:
template_context = {}
email_from = email_from if email_from is not None else settings.get('email.from_email')
name_from = name_from if name_from is not None else settings.get('email.from_name')
email_reply_to = email_reply_to if email_reply_to is not None else email_from
if attachments is None:
attachments = []
# render mail content
template_context.update(settings.get('render_context', {}))
template_path = "email/{0}.multipart".format(template)
template = jinja_env.get_template(template_path)
#we generate the module, which allows us the extract individual blocks from it
#we capture those blocks of interest using the {% set ... %} syntax
module = template.make_module(template_context)
logger.info("Sending an email to: {}\ntemplate: {}\ntemplate_context: {}\nsubject: {}"
.format("".join(email_to), template, template_context, module.subject))
message = {
'from_email': email_from,
'from_name': name_from,
'reply_to' : email_reply_to,
'subject': module.subject,
'html': module.html,
'text': module.text if module.text else None,
'to': email_to,
'attachments': attachments,
}
if not settings.providers['email.send']:
logger.warning("No e-mail providers defined, aborting...")
return
for params in settings.providers['email.send']:
params['provider'](message)
break
def send_mail_to_user(user,
template,
template_context=None,
delay=False,
**kwargs):
""" Sends an email message if the user has a verified email and enabled email notifications
:param user: user to send the email message to
:param template: template for the message to send
:param template_context: `template_context = {"diffs": aggregated_diffs}`
:param delay: if True the send_mail function will be run asynchronously
:return: None
"""
function = send_mail.delay if delay is True else send_mail
if user.email and user.email_validated:
return function(user.email, template, template_context=template_context, **kwargs)
| [
"[email protected]"
] | |
de912f9609f0a22e4153cd9d3c53bdd16fe3d6c1 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_040/ch25_2020_03_09_19_44_46_012241.py | 65c6d90914c5b631ee3669e31ddbbd97f3da12dc | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 309 | py | import math
v=float(input("Velocidade de lançamento: "))
angulo=float(input("Ângulo de lançamento: "))
def d(v,angulo):
d = (v**2*math.sin(math.radians(2*angulo))/9.8)
if (d>=102):
return ("Acertou!")
elif (d<=98):
return ("Muito perto")
else:
return ("Muito longe") | [
"[email protected]"
] | |
d15740ee02928888baf46ea11ad875d8f4194fcc | da0a7446122a44887fa2c4f391e9630ae033daa2 | /python/ray/train/tests/test_torch_trainer.py | 8ccc79b096a47fcfdea805fb3ec786775f9cd30b | [
"MIT",
"BSD-3-Clause",
"Apache-2.0"
] | permissive | whiledoing/ray | d8d9ba09b7545e8fd00cca5cfad451278e61fffd | 9272bcbbcae1630c5bb2db08a8279f0401ce6f92 | refs/heads/master | 2023-03-06T16:23:18.006757 | 2022-07-22T02:06:47 | 2022-07-22T02:06:47 | 252,420,044 | 0 | 0 | Apache-2.0 | 2023-03-04T08:57:20 | 2020-04-02T10:07:23 | Python | UTF-8 | Python | false | false | 3,493 | py | import pytest
from ray.air import session
from ray.air.checkpoint import Checkpoint
import torch
import ray
from ray.air.examples.pytorch.torch_linear_example import (
train_func as linear_train_func,
)
from ray.train.torch import TorchPredictor, TorchTrainer
from ray.tune import TuneError
from ray.air.config import ScalingConfig
@pytest.fixture
def ray_start_4_cpus():
address_info = ray.init(num_cpus=4)
yield address_info
# The code after the yield will run as teardown code.
ray.shutdown()
@pytest.mark.parametrize("num_workers", [1, 2])
def test_torch_linear(ray_start_4_cpus, num_workers):
def train_func(config):
result = linear_train_func(config)
assert len(result) == epochs
assert result[-1]["loss"] < result[0]["loss"]
num_workers = num_workers
epochs = 3
scaling_config = ScalingConfig(num_workers=num_workers)
config = {"lr": 1e-2, "hidden_size": 1, "batch_size": 4, "epochs": epochs}
trainer = TorchTrainer(
train_loop_per_worker=train_func,
train_loop_config=config,
scaling_config=scaling_config,
)
trainer.fit()
def test_torch_e2e(ray_start_4_cpus):
def train_func():
model = torch.nn.Linear(1, 1)
session.report({}, checkpoint=Checkpoint.from_dict(dict(model=model)))
scaling_config = ScalingConfig(num_workers=2)
trainer = TorchTrainer(
train_loop_per_worker=train_func, scaling_config=scaling_config
)
result = trainer.fit()
predict_dataset = ray.data.range(3)
class TorchScorer:
def __init__(self):
self.pred = TorchPredictor.from_checkpoint(result.checkpoint)
def __call__(self, x):
return self.pred.predict(x, dtype=torch.float)
predictions = predict_dataset.map_batches(
TorchScorer, batch_format="pandas", compute="actors"
)
assert predictions.count() == 3
def test_torch_e2e_state_dict(ray_start_4_cpus):
def train_func():
model = torch.nn.Linear(1, 1).state_dict()
session.report({}, checkpoint=Checkpoint.from_dict(dict(model=model)))
scaling_config = ScalingConfig(num_workers=2)
trainer = TorchTrainer(
train_loop_per_worker=train_func, scaling_config=scaling_config
)
result = trainer.fit()
# If loading from a state dict, a model definition must be passed in.
with pytest.raises(ValueError):
TorchPredictor.from_checkpoint(result.checkpoint)
class TorchScorer:
def __init__(self):
self.pred = TorchPredictor.from_checkpoint(
result.checkpoint, model=torch.nn.Linear(1, 1)
)
def __call__(self, x):
return self.pred.predict(x, dtype=torch.float)
predict_dataset = ray.data.range(3)
predictions = predict_dataset.map_batches(
TorchScorer, batch_format="pandas", compute="actors"
)
assert predictions.count() == 3
def test_checkpoint_freq(ray_start_4_cpus):
# checkpoint_freq is not supported so raise an error
trainer = TorchTrainer(
train_loop_per_worker=lambda config: None,
scaling_config=ray.air.ScalingConfig(num_workers=1),
run_config=ray.air.RunConfig(
checkpoint_config=ray.air.CheckpointConfig(
checkpoint_frequency=2,
),
),
)
with pytest.raises(TuneError):
trainer.fit()
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-v", "-x", __file__]))
| [
"[email protected]"
] | |
174dff48ac9b3bf40f36add33d6c4d489b16d1c6 | 267867b5bcaeeed26228a295b877a62473e1d1bb | /scripts/python/client.py | f4a4d313384dccede10d4ca0c3367a4ffb51601e | [] | no_license | gitgaoqian/cloud_v2 | 602a2a3ea8e4457606ce9f1a55e2166c9b227781 | b35d89132799042ac34eba435b76c311c2a4b287 | refs/heads/master | 2021-05-06T07:08:52.055547 | 2021-01-24T12:25:38 | 2021-01-24T12:25:38 | 113,935,952 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 546 | py | #!/usr/bin/env python
import sys
import rospy
from cloud_v2.srv import call
def local_client(service,action):
rospy.wait_for_service('bridge_service')
try:
client = rospy.ServiceProxy('bridge_service', call)
resp1 = client(service,action)
return resp1
except rospy.ServiceException, e:
print "Service call failed: %s"%e
if __name__ == "__main__":
service = str(sys.argv[1])
action = str(sys.argv[2])
print "request "+service+' '+action
print " %s"%(local_client(service,action))
| [
"[email protected]"
] | |
0cce847454191ea31bb62590d59b1466784e8cc7 | ef8c5c55b6ec3971adff9afe2db1f76556b87082 | /code_examples.bak/wave2d_numpy_f90_cuda/wave2d_2plot.py | 8ecd2c9c6ad280e4676bd5d9c627e86e1cdfa832 | [] | no_license | wbkifun/my_stuff | 7007efc94b678234097abf0df9babfbd79dcf0ff | 0b5ad5d4d103fd05989b514bca0d5114691f8ff7 | refs/heads/master | 2020-12-10T22:40:28.532993 | 2017-11-15T11:39:41 | 2017-11-15T11:39:41 | 5,178,225 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,863 | py | #------------------------------------------------------------------------------
# Author : Ki-Hwan Kim ([email protected])
#
# Written date : 2010. 6. 17
# Modify date : 2012. 9. 17
#
# Copyright : GNU GPL
#
# Description :
# Solve the 2-D wave equation with the FD(Finite-Difference) scheme
#
# These are educational codes to study the scientific python programming.
# Step 1: Using the numpy
# Step 2: Convert the hotspot to the Fortran code using F2PY
# Step 3: Convert the hotspot to the CUDA code using PyCUDA
#------------------------------------------------------------------------------
from __future__ import division
import numpy
import matplotlib.pyplot as plt
from time import time
#from core_numpy import advance
from core_fortran import advance
# Setup
nx, ny = 2400, 2000
tmax, tgap = 1500, 40
c = numpy.ones((nx,ny), order='F')*0.25
f = numpy.zeros_like(c, order='F')
g = numpy.zeros_like(c, order='F')
# Plot using the matplotlib
plt.ion()
fig = plt.figure(figsize=(8,10))
ax1 = fig.add_subplot(2,1,1)
ax1.plot([nx//2,nx//2], [0,ny], '--k')
imag = ax1.imshow(c.T, origin='lower', vmin=-0.1, vmax=0.1)
fig.colorbar(imag)
ax2 = fig.add_subplot(2,1,2)
line, = ax2.plot(c[nx//2,:])
ax2.set_xlim(0, ny)
ax2.set_ylim(-0.1, 0.1)
# Main loop for the time evolution
t0 = time()
f_avg = numpy.zeros(ny)
for tn in xrange(1,tmax+1):
#g[nx//3,ny//2] += numpy.sin(0.05*numpy.pi*tn)
g[nx//3,ny//2+100] += numpy.sin(0.05*numpy.pi*tn)
g[nx//3,ny//2-100] += numpy.sin(0.05*numpy.pi*tn)
advance(c, f, g)
advance(c, g, f)
f_avg[:] += f[nx//2,:]**2
if tn%tgap == 0:
print "%d (%d %%)" % (tn, tn/tmax*100)
imag.set_array(f.T)
line.set_ydata(f_avg)
f_avg[:] = 0
plt.draw()
#plt.savefig('./png/%.5d.png' % tn)
print "throughput: %1.3f Mcell/s" % (nx*ny*tmax/(time()-t0)/1e6)
| [
"[email protected]"
] | |
ea43c9fb9d97749418f21a7acb65a4abd48d203b | 786027545626c24486753351d6e19093b261cd7d | /ghidra9.2.1_pyi/ghidra/file/formats/ios/decmpfs/DecmpfsHeader.pyi | 14fadb23d185b60636313cbd85e5c0fceeb233fc | [
"MIT"
] | permissive | kohnakagawa/ghidra_scripts | 51cede1874ef2b1fed901b802316449b4bf25661 | 5afed1234a7266c0624ec445133280993077c376 | refs/heads/main | 2023-03-25T08:25:16.842142 | 2021-03-18T13:31:40 | 2021-03-18T13:31:40 | 338,577,905 | 14 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,782 | pyi | from typing import List
import ghidra.app.util.bin
import ghidra.program.model.data
import java.lang
class DecmpfsHeader(object, ghidra.app.util.bin.StructConverter):
ASCII: ghidra.program.model.data.DataType = char
BYTE: ghidra.program.model.data.DataType = byte
DWORD: ghidra.program.model.data.DataType = dword
IBO32: ghidra.program.model.data.DataType = ImageBaseOffset32
POINTER: ghidra.program.model.data.DataType = pointer
QWORD: ghidra.program.model.data.DataType = qword
STRING: ghidra.program.model.data.DataType = string
UTF16: ghidra.program.model.data.DataType = unicode
UTF8: ghidra.program.model.data.DataType = string-utf8
VOID: ghidra.program.model.data.DataType = void
WORD: ghidra.program.model.data.DataType = word
def __init__(self, __a0: ghidra.app.util.bin.BinaryReader, __a1: int): ...
def equals(self, __a0: object) -> bool: ...
def getAttrBytes(self) -> List[int]: ...
def getClass(self) -> java.lang.Class: ...
def getCompressionMagic(self) -> unicode: ...
def getCompressionType(self) -> int: ...
def getUncompressedSize(self) -> long: ...
def hashCode(self) -> int: ...
def notify(self) -> None: ...
def notifyAll(self) -> None: ...
def toDataType(self) -> ghidra.program.model.data.DataType: ...
def toString(self) -> unicode: ...
@overload
def wait(self) -> None: ...
@overload
def wait(self, __a0: long) -> None: ...
@overload
def wait(self, __a0: long, __a1: int) -> None: ...
@property
def attrBytes(self) -> List[int]: ...
@property
def compressionMagic(self) -> unicode: ...
@property
def compressionType(self) -> int: ...
@property
def uncompressedSize(self) -> long: ...
| [
"[email protected]"
] | |
c1c5d53023c9e08c075a4a87131b688e7c2d1e90 | 3f309b1dd9774ca1eef2c7bb7626447e6c3dbe70 | /peripheral/wdt_01385/config/wdt.py | 236b654f56f213aaee60a0d9144932b7da7f0238 | [
"LicenseRef-scancode-unknown-license-reference",
"ISC",
"LicenseRef-scancode-public-domain"
] | permissive | Unitek-KL/csp | 30892ddf1375f5191173cafdfba5f098245a0ff7 | 2ac7ba59465f23959e51d2f16a5712b57b79ef5f | refs/heads/master | 2020-12-10T13:42:26.878408 | 2019-10-14T17:55:22 | 2019-10-14T17:56:20 | 233,609,402 | 0 | 0 | NOASSERTION | 2020-01-13T14:04:51 | 2020-01-13T14:04:51 | null | UTF-8 | Python | false | false | 9,962 | py | # coding: utf-8
"""*****************************************************************************
* Copyright (C) 2019 Microchip Technology Inc. and its subsidiaries.
*
* Subject to your compliance with these terms, you may use Microchip software
* and any derivatives exclusively with Microchip products. It is your
* responsibility to comply with third party license terms applicable to your
* use of third party software (including open source software) that may
* accompany Microchip software.
*
* THIS SOFTWARE IS SUPPLIED BY MICROCHIP "AS IS". NO WARRANTIES, WHETHER
* EXPRESS, IMPLIED OR STATUTORY, APPLY TO THIS SOFTWARE, INCLUDING ANY IMPLIED
* WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY, AND FITNESS FOR A
* PARTICULAR PURPOSE.
*
* IN NO EVENT WILL MICROCHIP BE LIABLE FOR ANY INDIRECT, SPECIAL, PUNITIVE,
* INCIDENTAL OR CONSEQUENTIAL LOSS, DAMAGE, COST OR EXPENSE OF ANY KIND
* WHATSOEVER RELATED TO THE SOFTWARE, HOWEVER CAUSED, EVEN IF MICROCHIP HAS
* BEEN ADVISED OF THE POSSIBILITY OR THE DAMAGES ARE FORESEEABLE. TO THE
* FULLEST EXTENT ALLOWED BY LAW, MICROCHIP'S TOTAL LIABILITY ON ALL CLAIMS IN
* ANY WAY RELATED TO THIS SOFTWARE WILL NOT EXCEED THE AMOUNT OF FEES, IF ANY,
* THAT YOU HAVE PAID DIRECTLY TO MICROCHIP FOR THIS SOFTWARE.
*****************************************************************************"""
###################################################################################################
##################################### Global Variables ############################################
###################################################################################################
global wdtSym_Use
global wdtHeaderFile
global wdtSourceFile
global wdtSystemDefFile
global getWDTTimeOutPeriod
global getWDTAllowedWindowPeriod
global wdtTimeOutDictionary
global wdtAllowedWindowDictionary
wdtTimeOutDictionary = {
#Entry : [#Period, #TimeUnit(mili-second/second)]
"PS1" : ["1", "ms"],
"PS2" : ["2", "ms"],
"PS4" : ["4", "ms"],
"PS8" : ["8", "ms"],
"PS16" : ["16", "ms"],
"PS32" : ["32", "ms"],
"PS64" : ["64", "ms"],
"PS128" : ["128", "ms"],
"PS256" : ["256", "ms"],
"PS512" : ["512", "ms"],
"PS1024" : ["1.024", "s"],
"PS2048" : ["2.048", "s"],
"PS4096" : ["4.096", "s"],
"PS8192" : ["8.192", "s"],
"PS16384" : ["16.384", "s"],
"PS32768" : ["32.768", "s"],
"PS65536" : ["65.536", "s"],
"PS131072" : ["131.072", "s"],
"PS262144" : ["262.144", "s"],
"PS524288" : ["524.288", "s"],
"PS1048576" : ["1048.576", "s"]
}
wdtAllowedWindowDictionary = {
"WINSZ_25" : "0.25",
"WINSZ_37" : "37.5",
"WINSZ_50" : "0.50",
"WINSZ_75" : "0.75"
}
###################################################################################################
########################################## Callbacks ##############################################
###################################################################################################
def getWDTAllowedWindowPeriod(period, windowSize):
allowedPeriod = str((float(wdtTimeOutDictionary[period][0]) * float(wdtAllowedWindowDictionary[windowSize])))
return (allowedPeriod + " " + wdtTimeOutDictionary[period][1])
def getWDTTimeOutPeriod(period):
return (wdtTimeOutDictionary[period][0] + " " + wdtTimeOutDictionary[period][1])
def updateWDTUseProperties(symbol, event):
symbol.setReadOnly((event["value"] == "ON"))
symbol.setValue((event["value"] == "ON"), 1)
def updateWDTTimeOutPeriodVisibleProperty(symbol, event):
if event["id"] == "WDT_USE":
symbol.setVisible(event["value"])
else:
symbol.setValue(getWDTTimeOutPeriod(event["value"]), 1)
def updateWDTWindowModeEnableVisibleProperty(symbol, event):
if event["id"] == "WDT_USE":
symbol.setVisible(event["value"])
else:
if event["value"] == "ON":
symbol.setValue("WINDOW", 1)
else:
symbol.setValue("NORMAL", 1)
def updateWDTConfigCommentVisibleProperty(symbol, event):
wdtHeaderFile.setEnabled(event["value"])
wdtSourceFile.setEnabled(event["value"])
wdtSystemDefFile.setEnabled(event["value"])
symbol.setVisible(event["value"])
def updateWDTAllowedWindowPeriodVisibleProperty(symbol, event):
if event["id"] == "CONFIG_FWDTWINSZ" or event["id"] == "CONFIG_WDTPS":
period = Database.getSymbolValue("core", "CONFIG_WDTPS")
windowSize = Database.getSymbolValue("core", "CONFIG_FWDTWINSZ")
symbol.setValue(getWDTAllowedWindowPeriod(period, windowSize), 1)
else:
if wdtSym_Use.getValue() == True and Database.getSymbolValue("core", "CONFIG_WINDIS") == "ON":
symbol.setVisible(True)
else:
symbol.setVisible(False)
###################################################################################################
############################################# WDT ###############################################
###################################################################################################
isWDTEnabled = (Database.getSymbolValue("core", "CONFIG_FWDTEN") == "ON")
isWDTWindowModeEnabled = (Database.getSymbolValue("core", "CONFIG_WINDIS") == "ON")
wdtTimeOut = Database.getSymbolValue("core", "CONFIG_WDTPS")
wdtAllowedWindowSize = Database.getSymbolValue("core", "CONFIG_FWDTWINSZ")
wdtInstances = ATDF.getNode("/avr-tools-device-file/devices/device/peripherals/module@[name=\"WDT\"]")
wdtInstanceName = coreComponent.createStringSymbol("WDT_INSTANCE_NAME", None)
wdtInstanceName.setVisible(False)
wdtInstanceName.setDefaultValue(wdtInstances.getAttribute("name"))
#WDT menu
wdtMenu = coreComponent.createMenuSymbol("WDT_MENU", None)
wdtMenu.setLabel("WDT")
#WDT Use
wdtSym_Use = coreComponent.createBooleanSymbol("WDT_USE", wdtMenu)
wdtSym_Use.setLabel("Use WDT ?")
wdtSym_Use.setDefaultValue(isWDTEnabled)
wdtSym_Use.setReadOnly(isWDTEnabled)
wdtSym_Use.setDependencies(updateWDTUseProperties, ["CONFIG_FWDTEN"])
#WDT Configuration comment
wdtSym_ConfigComment = coreComponent.createCommentSymbol("WDT_CONFIG_COMMENT", wdtSym_Use)
wdtSym_ConfigComment.setLabel("************** Configure WDT From Device Configuration Fuses ***************")
wdtSym_ConfigComment.setVisible(isWDTEnabled)
wdtSym_ConfigComment.setDependencies(updateWDTConfigCommentVisibleProperty, ["WDT_USE"])
#WDT Operation mode
wdtSym_WindowMode = coreComponent.createComboSymbol("WDT_MODE", wdtSym_Use, ["NORMAL", "WINDOW"])
wdtSym_WindowMode.setLabel("Configured WDT Operation Mode")
if isWDTEnabled and isWDTWindowModeEnabled:
wdtSym_WindowMode.setDefaultValue("WINDOW")
else:
wdtSym_WindowMode.setDefaultValue("NORMAL")
wdtSym_WindowMode.setVisible(isWDTEnabled)
wdtSym_WindowMode.setReadOnly(True)
wdtSym_WindowMode.setDependencies(updateWDTWindowModeEnableVisibleProperty, ["WDT_USE", "CONFIG_WINDIS"])
#WDT Time-out Period
wdtSym_TimeOutPeriod = coreComponent.createStringSymbol("WDT_TIMEOUT_PERIOD", wdtSym_Use)
wdtSym_TimeOutPeriod.setLabel("Configured WDT Time-out Period")
wdtSym_TimeOutPeriod.setDefaultValue(getWDTTimeOutPeriod(wdtTimeOut))
wdtSym_TimeOutPeriod.setReadOnly(True)
wdtSym_TimeOutPeriod.setVisible(isWDTEnabled)
wdtSym_TimeOutPeriod.setDependencies(updateWDTTimeOutPeriodVisibleProperty, ["WDT_USE", "CONFIG_WDTPS"])
#WDT Allowed Window Period
wdtSym_AllowedWindowPeriod = coreComponent.createStringSymbol("WDT_ALLOWED_WINDOW_PERIOD", wdtSym_Use)
wdtSym_AllowedWindowPeriod.setLabel("Configured WDT Allowed Window Period")
wdtSym_AllowedWindowPeriod.setDefaultValue(getWDTAllowedWindowPeriod(wdtTimeOut, wdtAllowedWindowSize))
wdtSym_AllowedWindowPeriod.setReadOnly(True)
wdtSym_AllowedWindowPeriod.setVisible(isWDTEnabled and isWDTWindowModeEnabled)
wdtSym_AllowedWindowPeriod.setDependencies(updateWDTAllowedWindowPeriodVisibleProperty, ["WDT_USE", "CONFIG_FWDTWINSZ", "CONFIG_WINDIS", "CONFIG_WDTPS"])
###################################################################################################
####################################### Code Generation ##########################################
###################################################################################################
configName = Variables.get("__CONFIGURATION_NAME")
wdtHeaderFile = coreComponent.createFileSymbol("WDT_HEADER", None)
wdtHeaderFile.setSourcePath("../peripheral/wdt_01385/templates/plib_wdt.h.ftl")
wdtHeaderFile.setOutputName("plib_" + wdtInstanceName.getValue().lower() + ".h")
wdtHeaderFile.setDestPath("peripheral/wdt/")
wdtHeaderFile.setProjectPath("config/" + configName + "/peripheral/wdt/")
wdtHeaderFile.setType("HEADER")
wdtHeaderFile.setMarkup(True)
wdtHeaderFile.setEnabled(wdtSym_Use.getValue())
wdtSourceFile = coreComponent.createFileSymbol("WDT_SOURCE", None)
wdtSourceFile.setSourcePath("../peripheral/wdt_01385/templates/plib_wdt.c.ftl")
wdtSourceFile.setOutputName("plib_" + wdtInstanceName.getValue().lower() + ".c")
wdtSourceFile.setDestPath("peripheral/wdt/")
wdtSourceFile.setProjectPath("config/" + configName + "/peripheral/wdt/")
wdtSourceFile.setType("SOURCE")
wdtSourceFile.setMarkup(True)
wdtSourceFile.setEnabled(wdtSym_Use.getValue())
wdtSystemDefFile = coreComponent.createFileSymbol("WDT_SYS_DEF", None)
wdtSystemDefFile.setType("STRING")
wdtSystemDefFile.setOutputName("core.LIST_SYSTEM_DEFINITIONS_H_INCLUDES")
wdtSystemDefFile.setSourcePath("../peripheral/wdt_01385/templates/system/definitions.h.ftl")
wdtSystemDefFile.setMarkup(True)
wdtSystemDefFile.setEnabled(wdtSym_Use.getValue())
| [
"http://support.microchip.com"
] | http://support.microchip.com |
fdfbe937a2a646632ade4d132625cc393a0fe83c | 90cd41da01e181bf689feb6d305a2610c88e3902 | /senlin/tests/tempest/api/clusters/test_cluster_delete_negative.py | 5e9aab7633647c77ac10cac70360dffb82e45287 | [
"Apache-2.0"
] | permissive | paperandsoap/senlin | 368980e1fb01d91659f8b0d7dd532c3260386fa7 | 5d98dae3911aa4d5b71e491f3a4e0c21371cc75a | refs/heads/master | 2020-12-25T17:45:02.490958 | 2016-05-29T14:42:39 | 2016-05-29T14:42:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,100 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib import decorators
from tempest.lib import exceptions
from tempest import test
from senlin.tests.tempest.api import base
from senlin.tests.tempest.api import utils
class TestClusterDeleteNegative(base.BaseSenlinTest):
def setUp(self):
super(TestClusterDeleteNegative, self).setUp()
profile_id = utils.create_a_profile(self)
self.addCleanup(utils.delete_a_profile, self, profile_id)
self.cluster_id = utils.create_a_cluster(self, profile_id)
self.addCleanup(utils.delete_a_cluster, self, self.cluster_id)
policy_id = utils.create_a_policy(self)
self.addCleanup(utils.delete_a_policy, self, policy_id)
utils.attach_policy(self, self.cluster_id, policy_id)
self.addCleanup(utils.detach_policy, self, self.cluster_id, policy_id)
@test.attr(type=['negative'])
@decorators.idempotent_id('0de81427-2b2f-4821-9462-c893d35fb212')
def test_cluster_delete_conflict(self):
# Verify conflict exception(409) is raised.
self.assertRaises(exceptions.Conflict,
self.client.delete_obj,
'clusters', self.cluster_id)
@test.attr(type=['negative'])
@decorators.idempotent_id('8a583b8e-eeaa-4920-a6f5-2880b070624f')
def test_cluster_delete_not_found(self):
# Verify notfound exception(404) is raised.
self.assertRaises(exceptions.NotFound,
self.client.delete_obj,
'clusters', '8a583b8e-eeaa-4920-a6f5-2880b070624f')
| [
"[email protected]"
] | |
84c5278933ccbfe033e99a67fd923dea7a9c80d2 | 2af6a5c2d33e2046a1d25ae9dd66d349d3833940 | /res_bw/scripts/common/lib/encodings/utf_32.py | 6a7ba7022f222c97d748b69be9c9ec7f90231268 | [] | no_license | webiumsk/WOT-0.9.12-CT | e6c8b5bb106fad71b5c3056ada59fb1aebc5f2b2 | 2506e34bd6634ad500b6501f4ed4f04af3f43fa0 | refs/heads/master | 2021-01-10T01:38:38.080814 | 2015-11-11T00:08:04 | 2015-11-11T00:08:04 | 45,803,240 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 4,805 | py | # 2015.11.10 21:35:58 Střední Evropa (běžný čas)
# Embedded file name: scripts/common/Lib/encodings/utf_32.py
"""
Python 'utf-32' Codec
"""
import codecs, sys
encode = codecs.utf_32_encode
def decode(input, errors = 'strict'):
return codecs.utf_32_decode(input, errors, True)
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors = 'strict'):
codecs.IncrementalEncoder.__init__(self, errors)
self.encoder = None
return
def encode(self, input, final = False):
if self.encoder is None:
result = codecs.utf_32_encode(input, self.errors)[0]
if sys.byteorder == 'little':
self.encoder = codecs.utf_32_le_encode
else:
self.encoder = codecs.utf_32_be_encode
return result
else:
return self.encoder(input, self.errors)[0]
def reset(self):
codecs.IncrementalEncoder.reset(self)
self.encoder = None
return
def getstate(self):
if self.encoder is None:
return 2
else:
return 0
def setstate(self, state):
if state:
self.encoder = None
elif sys.byteorder == 'little':
self.encoder = codecs.utf_32_le_encode
else:
self.encoder = codecs.utf_32_be_encode
return
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
def __init__(self, errors = 'strict'):
codecs.BufferedIncrementalDecoder.__init__(self, errors)
self.decoder = None
return
def _buffer_decode(self, input, errors, final):
if self.decoder is None:
output, consumed, byteorder = codecs.utf_32_ex_decode(input, errors, 0, final)
if byteorder == -1:
self.decoder = codecs.utf_32_le_decode
elif byteorder == 1:
self.decoder = codecs.utf_32_be_decode
elif consumed >= 4:
raise UnicodeError('UTF-32 stream does not start with BOM')
return (output, consumed)
else:
return self.decoder(input, self.errors, final)
def reset(self):
codecs.BufferedIncrementalDecoder.reset(self)
self.decoder = None
return
def getstate(self):
state = codecs.BufferedIncrementalDecoder.getstate(self)[0]
if self.decoder is None:
return (state, 2)
else:
addstate = int((sys.byteorder == 'big') != (self.decoder is codecs.utf_32_be_decode))
return (state, addstate)
def setstate(self, state):
codecs.BufferedIncrementalDecoder.setstate(self, state)
state = state[1]
if state == 0:
self.decoder = codecs.utf_32_be_decode if sys.byteorder == 'big' else codecs.utf_32_le_decode
elif state == 1:
self.decoder = codecs.utf_32_le_decode if sys.byteorder == 'big' else codecs.utf_32_be_decode
else:
self.decoder = None
return
class StreamWriter(codecs.StreamWriter):
def __init__(self, stream, errors = 'strict'):
self.encoder = None
codecs.StreamWriter.__init__(self, stream, errors)
return
def reset(self):
codecs.StreamWriter.reset(self)
self.encoder = None
return
def encode(self, input, errors = 'strict'):
if self.encoder is None:
result = codecs.utf_32_encode(input, errors)
if sys.byteorder == 'little':
self.encoder = codecs.utf_32_le_encode
else:
self.encoder = codecs.utf_32_be_encode
return result
else:
return self.encoder(input, errors)
return
class StreamReader(codecs.StreamReader):
def reset(self):
codecs.StreamReader.reset(self)
try:
del self.decode
except AttributeError:
pass
def decode(self, input, errors = 'strict'):
object, consumed, byteorder = codecs.utf_32_ex_decode(input, errors, 0, False)
if byteorder == -1:
self.decode = codecs.utf_32_le_decode
elif byteorder == 1:
self.decode = codecs.utf_32_be_decode
elif consumed >= 4:
raise UnicodeError, 'UTF-32 stream does not start with BOM'
return (object, consumed)
def getregentry():
return codecs.CodecInfo(name='utf-32', encode=encode, decode=decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter)
# okay decompyling c:\Users\PC\wotsources\files\originals\res_bw\scripts\common\lib\encodings\utf_32.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2015.11.10 21:35:58 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
856f194b3643fa63d846f488a7b5eaa655451e14 | e00186e71a1f52b394315a0cbc27162254cfffb9 | /durga/full_durga/withrestc1/withrestc1/asgi.py | 9033cff3915786e32a148f3a2d204f1ea519d37c | [] | no_license | anilkumar0470/git_practice | cf132eb7970c40d0d032520d43e6d4a1aca90742 | 588e7f654f158e974f9893e5018d3367a0d88eeb | refs/heads/master | 2023-04-27T04:50:14.688534 | 2023-04-22T05:54:21 | 2023-04-22T05:54:21 | 100,364,712 | 0 | 1 | null | 2021-12-08T19:44:58 | 2017-08-15T10:02:33 | Python | UTF-8 | Python | false | false | 397 | py | """
ASGI config for withrestc1 project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'withrestc1.settings')
application = get_asgi_application()
| [
"[email protected]"
] | |
0763a1481b2be60155335147da398bc603dc0912 | 426aed70aa6925105f10c7fcb7b611b277bf8b84 | /python/dgl/nn/pytorch/utils.py | 930f0268f0b5732e5d61030dc5d1357d7c38d35a | [
"Apache-2.0"
] | permissive | hengruizhang98/dgl | 0ce7201ca7380482440f031cb8ced6ca0e8c8dc1 | 195f99362d883f8b6d131b70a7868a537e55b786 | refs/heads/master | 2023-06-10T22:21:45.835646 | 2021-04-13T12:29:43 | 2021-04-13T12:29:43 | 336,804,001 | 3 | 0 | Apache-2.0 | 2021-02-07T14:16:20 | 2021-02-07T14:16:20 | null | UTF-8 | Python | false | false | 9,275 | py | """Utilities for pytorch NN package"""
#pylint: disable=no-member, invalid-name
import torch as th
from torch import nn
from ... import DGLGraph
from ...base import dgl_warning
def matmul_maybe_select(A, B):
"""Perform Matrix multiplication C = A * B but A could be an integer id vector.
If A is an integer vector, we treat it as multiplying a one-hot encoded tensor.
In this case, the expensive dense matrix multiply can be replaced by a much
cheaper index lookup.
For example,
::
A = [2, 0, 1],
B = [[0.1, 0.2],
[0.3, 0.4],
[0.5, 0.6]]
then matmul_maybe_select(A, B) is equivalent to
::
[[0, 0, 1], [[0.1, 0.2],
[1, 0, 0], * [0.3, 0.4],
[0, 1, 0]] [0.5, 0.6]]
In all other cases, perform a normal matmul.
Parameters
----------
A : torch.Tensor
lhs tensor
B : torch.Tensor
rhs tensor
Returns
-------
C : torch.Tensor
result tensor
"""
if A.dtype == th.int64 and len(A.shape) == 1:
return B.index_select(0, A)
else:
return th.matmul(A, B)
def bmm_maybe_select(A, B, index):
"""Slice submatrices of A by the given index and perform bmm.
B is a 3D tensor of shape (N, D1, D2), which can be viewed as a stack of
N matrices of shape (D1, D2). The input index is an integer vector of length M.
A could be either:
(1) a dense tensor of shape (M, D1),
(2) an integer vector of length M.
The result C is a 2D matrix of shape (M, D2)
For case (1), C is computed by bmm:
::
C[i, :] = matmul(A[i, :], B[index[i], :, :])
For case (2), C is computed by index select:
::
C[i, :] = B[index[i], A[i], :]
Parameters
----------
A : torch.Tensor
lhs tensor
B : torch.Tensor
rhs tensor
index : torch.Tensor
index tensor
Returns
-------
C : torch.Tensor
return tensor
"""
if A.dtype == th.int64 and len(A.shape) == 1:
# following is a faster version of B[index, A, :]
B = B.view(-1, B.shape[2])
flatidx = index * B.shape[1] + A
return B.index_select(0, flatidx)
else:
BB = B.index_select(0, index)
return th.bmm(A.unsqueeze(1), BB).squeeze()
# pylint: disable=W0235
class Identity(nn.Module):
"""A placeholder identity operator that is argument-insensitive.
(Identity has already been supported by PyTorch 1.2, we will directly
import torch.nn.Identity in the future)
"""
def __init__(self):
super(Identity, self).__init__()
def forward(self, x):
"""Return input"""
return x
class Sequential(nn.Sequential):
r"""
Description
-----------
A squential container for stacking graph neural network modules.
DGL supports two modes: sequentially apply GNN modules on 1) the same graph or
2) a list of given graphs. In the second case, the number of graphs equals the
number of modules inside this container.
Parameters
----------
*args :
Sub-modules of torch.nn.Module that will be added to the container in
the order by which they are passed in the constructor.
Examples
--------
The following example uses PyTorch backend.
Mode 1: sequentially apply GNN modules on the same graph
>>> import torch
>>> import dgl
>>> import torch.nn as nn
>>> import dgl.function as fn
>>> from dgl.nn.pytorch import Sequential
>>> class ExampleLayer(nn.Module):
>>> def __init__(self):
>>> super().__init__()
>>> def forward(self, graph, n_feat, e_feat):
>>> with graph.local_scope():
>>> graph.ndata['h'] = n_feat
>>> graph.update_all(fn.copy_u('h', 'm'), fn.sum('m', 'h'))
>>> n_feat += graph.ndata['h']
>>> graph.apply_edges(fn.u_add_v('h', 'h', 'e'))
>>> e_feat += graph.edata['e']
>>> return n_feat, e_feat
>>>
>>> g = dgl.DGLGraph()
>>> g.add_nodes(3)
>>> g.add_edges([0, 1, 2, 0, 1, 2, 0, 1, 2], [0, 0, 0, 1, 1, 1, 2, 2, 2])
>>> net = Sequential(ExampleLayer(), ExampleLayer(), ExampleLayer())
>>> n_feat = torch.rand(3, 4)
>>> e_feat = torch.rand(9, 4)
>>> net(g, n_feat, e_feat)
(tensor([[39.8597, 45.4542, 25.1877, 30.8086],
[40.7095, 45.3985, 25.4590, 30.0134],
[40.7894, 45.2556, 25.5221, 30.4220]]),
tensor([[80.3772, 89.7752, 50.7762, 60.5520],
[80.5671, 89.3736, 50.6558, 60.6418],
[80.4620, 89.5142, 50.3643, 60.3126],
[80.4817, 89.8549, 50.9430, 59.9108],
[80.2284, 89.6954, 50.0448, 60.1139],
[79.7846, 89.6882, 50.5097, 60.6213],
[80.2654, 90.2330, 50.2787, 60.6937],
[80.3468, 90.0341, 50.2062, 60.2659],
[80.0556, 90.2789, 50.2882, 60.5845]]))
Mode 2: sequentially apply GNN modules on different graphs
>>> import torch
>>> import dgl
>>> import torch.nn as nn
>>> import dgl.function as fn
>>> import networkx as nx
>>> from dgl.nn.pytorch import Sequential
>>> class ExampleLayer(nn.Module):
>>> def __init__(self):
>>> super().__init__()
>>> def forward(self, graph, n_feat):
>>> with graph.local_scope():
>>> graph.ndata['h'] = n_feat
>>> graph.update_all(fn.copy_u('h', 'm'), fn.sum('m', 'h'))
>>> n_feat += graph.ndata['h']
>>> return n_feat.view(graph.number_of_nodes() // 2, 2, -1).sum(1)
>>>
>>> g1 = dgl.DGLGraph(nx.erdos_renyi_graph(32, 0.05))
>>> g2 = dgl.DGLGraph(nx.erdos_renyi_graph(16, 0.2))
>>> g3 = dgl.DGLGraph(nx.erdos_renyi_graph(8, 0.8))
>>> net = Sequential(ExampleLayer(), ExampleLayer(), ExampleLayer())
>>> n_feat = torch.rand(32, 4)
>>> net([g1, g2, g3], n_feat)
tensor([[209.6221, 225.5312, 193.8920, 220.1002],
[250.0169, 271.9156, 240.2467, 267.7766],
[220.4007, 239.7365, 213.8648, 234.9637],
[196.4630, 207.6319, 184.2927, 208.7465]])
"""
def __init__(self, *args):
super(Sequential, self).__init__(*args)
def forward(self, graph, *feats):
r"""
Sequentially apply modules to the input.
Parameters
----------
graph : DGLGraph or list of DGLGraphs
The graph(s) to apply modules on.
*feats :
Input features.
The output of the :math:`i`-th module should match the input
of the :math:`(i+1)`-th module in the sequential.
"""
if isinstance(graph, list):
for graph_i, module in zip(graph, self):
if not isinstance(feats, tuple):
feats = (feats,)
feats = module(graph_i, *feats)
elif isinstance(graph, DGLGraph):
for module in self:
if not isinstance(feats, tuple):
feats = (feats,)
feats = module(graph, *feats)
else:
raise TypeError('The first argument of forward must be a DGLGraph'
' or a list of DGLGraph s')
return feats
class WeightBasis(nn.Module):
r"""Basis decomposition module.
Basis decomposition is introduced in "`Modeling Relational Data with Graph
Convolutional Networks <https://arxiv.org/abs/1703.06103>`__"
and can be described as below:
.. math::
W_o = \sum_{b=1}^B a_{ob} V_b
Each weight output :math:`W_o` is essentially a linear combination of basis
transformations :math:`V_b` with coefficients :math:`a_{ob}`.
If is useful as a form of regularization on a large parameter matrix. Thus,
the number of weight outputs is usually larger than the number of bases.
Parameters
----------
shape : tuple[int]
Shape of the basis parameter.
num_bases : int
Number of bases.
num_outputs : int
Number of outputs.
"""
def __init__(self,
shape,
num_bases,
num_outputs):
super(WeightBasis, self).__init__()
self.shape = shape
self.num_bases = num_bases
self.num_outputs = num_outputs
if num_outputs <= num_bases:
dgl_warning('The number of weight outputs should be larger than the number'
' of bases.')
self.weight = nn.Parameter(th.Tensor(self.num_bases, *shape))
nn.init.xavier_uniform_(self.weight, gain=nn.init.calculate_gain('relu'))
# linear combination coefficients
self.w_comp = nn.Parameter(th.Tensor(self.num_outputs, self.num_bases))
nn.init.xavier_uniform_(self.w_comp, gain=nn.init.calculate_gain('relu'))
def forward(self):
r"""Forward computation
Returns
-------
weight : torch.Tensor
Composed weight tensor of shape ``(num_outputs,) + shape``
"""
# generate all weights from bases
weight = th.matmul(self.w_comp, self.weight.view(self.num_bases, -1))
return weight.view(self.num_outputs, *self.shape)
| [
"[email protected]"
] | |
29d9dae4b3ca012575e80a559d1fcd7ebd0b9277 | 90047daeb462598a924d76ddf4288e832e86417c | /third_party/WebKit/Tools/gdb/webkit.py | 3f75a6d089880483ddb5a036273761ec9a52185d | [
"BSD-3-Clause",
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"MIT",
"Apache-2.0"
] | permissive | massbrowser/android | 99b8c21fa4552a13c06bbedd0f9c88dd4a4ad080 | a9c4371682c9443d6e1d66005d4db61a24a9617c | refs/heads/master | 2022-11-04T21:15:50.656802 | 2017-06-08T12:31:39 | 2017-06-08T12:31:39 | 93,747,579 | 2 | 2 | BSD-3-Clause | 2022-10-31T10:34:25 | 2017-06-08T12:36:07 | null | UTF-8 | Python | false | false | 15,072 | py | # Copyright (C) 2010, Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""GDB support for WebKit types.
Add this to your gdb by amending your ~/.gdbinit as follows:
python
import sys
sys.path.insert(0, "/path/to/tools/gdb/")
import webkit
"""
from __future__ import print_function
import gdb
import re
import struct
def guess_string_length(ptr):
"""Guess length of string pointed by ptr.
Returns a tuple of (length, an error message).
"""
# Try to guess at the length.
for i in range(0, 2048):
try:
if int((ptr + i).dereference()) == 0:
return i, ''
except RuntimeError:
# We indexed into inaccessible memory; give up.
return i, ' (gdb hit inaccessible memory)'
return 256, ' (gdb found no trailing NUL)'
def ustring_to_string(ptr, length=None):
"""Convert a pointer to UTF-16 data into a Python string encoded with utf-8.
ptr and length are both gdb.Value objects.
If length is unspecified, will guess at the length."""
error_message = ''
if length is None:
length, error_message = guess_string_length(ptr)
else:
length = int(length)
char_vals = [int((ptr + i).dereference()) for i in range(length)]
string = struct.pack('H' * length, *char_vals).decode('utf-16', 'replace').encode('utf-8')
return string + error_message
def lstring_to_string(ptr, length=None):
"""Convert a pointer to LChar* data into a Python (non-Unicode) string.
ptr and length are both gdb.Value objects.
If length is unspecified, will guess at the length."""
error_message = ''
if length is None:
length, error_message = guess_string_length(ptr)
else:
length = int(length)
string = ''.join([chr((ptr + i).dereference()) for i in range(length)])
return string + error_message
class StringPrinter(object):
"Shared code between different string-printing classes"
def __init__(self, val):
self.val = val
def display_hint(self):
return 'string'
class UCharStringPrinter(StringPrinter):
"Print a UChar*; we must guess at the length"
def to_string(self):
return ustring_to_string(self.val)
class LCharStringPrinter(StringPrinter):
"Print a LChar*; we must guess at the length"
def to_string(self):
return lstring_to_string(self.val)
class WTFAtomicStringPrinter(StringPrinter):
"Print a WTF::AtomicString"
def to_string(self):
return self.val['string_']
class WTFCStringPrinter(StringPrinter):
"Print a WTF::CString"
def to_string(self):
# The CString holds a buffer, which is a refptr to a WTF::CStringBuffer.
buf_ptr = self.val['buffer_']['ptr_']
if not buf_ptr:
return 0
data = (buf_ptr + 1).cast(gdb.lookup_type('char').pointer())
length = self.val['buffer_']['ptr_']['length_']
return ''.join([chr((data + i).dereference()) for i in range(length)])
class WTFStringImplPrinter(StringPrinter):
"Print a WTF::StringImpl"
def get_length(self):
return self.val['length_']
def to_string(self):
chars_start = self.val.address + 1
if self.is_8bit():
return lstring_to_string(chars_start.cast(gdb.lookup_type('char').pointer()),
self.get_length())
return ustring_to_string(chars_start.cast(gdb.lookup_type('UChar').pointer()),
self.get_length())
def is_8bit(self):
return self.val['is8_bit_']
class WTFStringPrinter(StringPrinter):
"Print a WTF::String"
def stringimpl_ptr(self):
return self.val['impl_']['ptr_']
def get_length(self):
if not self.stringimpl_ptr():
return 0
return WTFStringImplPrinter(self.stringimpl_ptr().dereference()).get_length()
def to_string(self):
if not self.stringimpl_ptr():
return '(null)'
return self.stringimpl_ptr().dereference()
class blinkKURLPrinter(StringPrinter):
"Print a blink::KURL"
def to_string(self):
return WTFStringPrinter(self.val['string_']).to_string()
class blinkLayoutUnitPrinter:
"Print a blink::LayoutUnit"
def __init__(self, val):
self.val = val
def to_string(self):
return "%.14gpx" % (self.val['value_'] / 64.0)
class blinkLayoutSizePrinter:
"Print a blink::LayoutSize"
def __init__(self, val):
self.val = val
def to_string(self):
return 'LayoutSize(%s, %s)' % (
blinkLayoutUnitPrinter(self.val['width_']).to_string(),
blinkLayoutUnitPrinter(self.val['height_']).to_string())
class blinkLayoutPointPrinter:
"Print a blink::LayoutPoint"
def __init__(self, val):
self.val = val
def to_string(self):
return 'LayoutPoint(%s, %s)' % (
blinkLayoutUnitPrinter(self.val['x_']).to_string(),
blinkLayoutUnitPrinter(self.val['y_']).to_string())
class blinkQualifiedNamePrinter(StringPrinter):
"Print a blink::QualifiedName"
def __init__(self, val):
super(blinkQualifiedNamePrinter, self).__init__(val)
self.prefix_length = 0
self.length = 0
if self.val['impl_']:
self.prefix_printer = WTFStringPrinter(
self.val['impl_']['ptr_']['prefix_']['string_'])
self.local_name_printer = WTFStringPrinter(
self.val['impl_']['ptr_']['local_name_']['string_'])
self.prefix_length = self.prefix_printer.get_length()
if self.prefix_length > 0:
self.length = (self.prefix_length + 1 +
self.local_name_printer.get_length())
else:
self.length = self.local_name_printer.get_length()
def get_length(self):
return self.length
def to_string(self):
if self.get_length() == 0:
return "(null)"
else:
if self.prefix_length > 0:
return (self.prefix_printer.to_string() + ":" +
self.local_name_printer.to_string())
else:
return self.local_name_printer.to_string()
class BlinkPixelsAndPercentPrinter:
"Print a blink::PixelsAndPercent value"
def __init__(self, val):
self.val = val
def to_string(self):
return "(%gpx, %g%%)" % (self.val['pixels'], self.val['percent'])
class BlinkLengthPrinter:
"""Print a blink::Length."""
def __init__(self, val):
self.val = val
def to_string(self):
ltype = self.val['type_']
if self.val['is_float_']:
val = self.val['float_value_']
else:
val = int(self.val['int_value_'])
quirk = ''
if self.val['quirk_']:
quirk = ', quirk=true'
if ltype == 0:
return 'Length(Auto)'
if ltype == 1:
return 'Length(%g%%, Percent%s)' % (val, quirk)
if ltype == 2:
return 'Length(%g, Fixed%s)' % (val, quirk)
if ltype == 3:
return 'Length(Intrinsic)'
if ltype == 4:
return 'Length(MinIntrinsic)'
if ltype == 5:
return 'Length(MinContent)'
if ltype == 6:
return 'Length(MaxContent)'
if ltype == 7:
return 'Length(FillAvailable)'
if ltype == 8:
return 'Length(FitContent)'
if ltype == 9:
# Would like to print pixelsAndPercent() but can't call member
# functions - https://sourceware.org/bugzilla/show_bug.cgi?id=13326
return 'Length(Calculated)'
if ltype == 10:
return 'Length(ExtendToZoom)'
if ltype == 11:
return 'Length(DeviceWidth)'
if ltype == 12:
return 'Length(DeviceHeight)'
if ltype == 13:
return 'Length(MaxSizeNone)'
return 'Length(unknown type %i)' % ltype
class WTFVectorPrinter:
"""Pretty Printer for a WTF::Vector.
The output of this pretty printer is similar to the output of std::vector's
pretty printer, which is bundled in gcc.
Example gdb session should look like:
(gdb) p v
$3 = WTF::Vector of length 7, capacity 16 = {7, 17, 27, 37, 47, 57, 67}
(gdb) set print elements 3
(gdb) p v
$6 = WTF::Vector of length 7, capacity 16 = {7, 17, 27...}
(gdb) set print array
(gdb) p v
$7 = WTF::Vector of length 7, capacity 16 = {
7,
17,
27
...
}
(gdb) set print elements 200
(gdb) p v
$8 = WTF::Vector of length 7, capacity 16 = {
7,
17,
27,
37,
47,
57,
67
}
"""
class Iterator:
def __init__(self, start, finish):
self.item = start
self.finish = finish
self.count = 0
def __iter__(self):
return self
def __next__(self):
if self.item == self.finish:
raise StopIteration
count = self.count
self.count += 1
element = self.item.dereference()
self.item += 1
return ('[%d]' % count, element)
# Python version < 3 compatibility:
def next(self):
return self.__next__()
def __init__(self, val):
self.val = val
def children(self):
start = self.val['buffer_']
return self.Iterator(start, start + self.val['size_'])
def to_string(self):
return ('%s of length %d, capacity %d'
% ('WTF::Vector', self.val['size_'], self.val['capacity_']))
def display_hint(self):
return 'array'
# Copied from //tools/gdb/gdb_chrome.py
def typed_ptr(ptr):
"""Prints a pointer along with its exact type.
By default, gdb would print just the address, which takes more
steps to interpret.
"""
# Returning this as a cast expression surrounded by parentheses
# makes it easier to cut+paste inside of gdb.
return '((%s)%s)' % (ptr.dynamic_type, ptr)
class WTFRefOrOwnPtrPrinter:
def __init__(self, val):
self.val = val
def to_string(self):
type_without_param = re.sub(r'<.*>', '', self.val.type.name)
return '%s%s' % (type_without_param, typed_ptr(self.val['ptr_']))
class BlinkDataRefPrinter:
def __init__(self, val):
self.val = val
def to_string(self):
return 'DataRef(%s)' % (
WTFRefOrOwnPtrPrinter(self.val['data_']).to_string())
def add_pretty_printers():
pretty_printers = (
(re.compile("^WTF::Vector<.*>$"), WTFVectorPrinter),
(re.compile("^WTF::AtomicString$"), WTFAtomicStringPrinter),
(re.compile("^WTF::CString$"), WTFCStringPrinter),
(re.compile("^WTF::String$"), WTFStringPrinter),
(re.compile("^WTF::StringImpl$"), WTFStringImplPrinter),
(re.compile("^blink::KURL$"), blinkKURLPrinter),
(re.compile("^blink::LayoutUnit$"), blinkLayoutUnitPrinter),
(re.compile("^blink::LayoutPoint$"), blinkLayoutPointPrinter),
(re.compile("^blink::LayoutSize$"), blinkLayoutSizePrinter),
(re.compile("^blink::QualifiedName$"), blinkQualifiedNamePrinter),
(re.compile("^blink::PixelsAndPercent$"), BlinkPixelsAndPercentPrinter),
(re.compile("^blink::Length$"), BlinkLengthPrinter),
(re.compile("^WTF::(Ref|Own)Ptr<.*>$"), WTFRefOrOwnPtrPrinter),
(re.compile("^blink::DataRef<.*>$"), BlinkDataRefPrinter),
)
def lookup_function(val):
"""Function used to load pretty printers; will be passed to GDB."""
type = val.type
if type.code == gdb.TYPE_CODE_REF:
type = type.target()
type = type.unqualified().strip_typedefs()
tag = type.tag
if tag:
for function, pretty_printer in pretty_printers:
if function.search(tag):
return pretty_printer(val)
if type.code == gdb.TYPE_CODE_PTR:
name = str(type.target().unqualified())
if name == 'UChar':
return UCharStringPrinter(val)
if name == 'LChar':
return LCharStringPrinter(val)
return None
gdb.pretty_printers.append(lookup_function)
add_pretty_printers()
class PrintPathToRootCommand(gdb.Command):
"""Command for printing WebKit Node trees.
Usage: printpathtoroot variable_name"""
def __init__(self):
super(PrintPathToRootCommand, self).__init__("printpathtoroot",
gdb.COMMAND_SUPPORT,
gdb.COMPLETE_NONE)
def invoke(self, arg, from_tty):
element_type = gdb.lookup_type('blink::Element')
node_type = gdb.lookup_type('blink::Node')
frame = gdb.selected_frame()
try:
val = gdb.Frame.read_var(frame, arg)
except:
print("No such variable, or invalid type")
return
target_type = str(val.type.target().strip_typedefs())
if target_type == str(node_type):
stack = []
while val:
stack.append([val,
val.cast(element_type.pointer()).dereference()[
'tag_name_']])
val = val.dereference()['parent_']
padding = ''
while len(stack) > 0:
pair = stack.pop()
print(padding, pair[1], pair[0])
padding = padding + ' '
else:
print('Sorry: I don\'t know how to deal with %s yet.' % target_type)
PrintPathToRootCommand()
| [
"[email protected]"
] | |
d76a7713552cf333afc6141ab7be1d25632b568f | a777170c979214015df511999f5f08fc2e0533d8 | /claf/data/reader/bert/glue/qnli.py | 4b5a99b0e21fb352e87ffcf6d20182905ddebe0e | [
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | permissive | srlee-ai/claf | 210b2d51918cf210683e7489ccb8347cb8b1f146 | 89b3e5c5ec0486886876ea3bac381508c6a6bf58 | refs/heads/master | 2021-02-13T04:38:36.198288 | 2020-03-03T15:01:01 | 2020-03-03T15:01:01 | 244,661,892 | 0 | 0 | MIT | 2020-03-03T14:45:52 | 2020-03-03T14:45:52 | null | UTF-8 | Python | false | false | 1,660 | py |
import logging
from overrides import overrides
from claf.data.reader import SeqClsBertReader
from claf.decorator import register
logger = logging.getLogger(__name__)
@register("reader:qnli_bert")
class QNLIBertReader(SeqClsBertReader):
"""
QNLI DataReader for BERT
* Args:
file_paths: .tsv file paths (train and dev)
tokenizers: defined tokenizers config
"""
CLASS_DATA = ["entailment", "not_entailment"]
METRIC_KEY = "accuracy"
def __init__(
self,
file_paths,
tokenizers,
sequence_max_length=None,
cls_token="[CLS]",
sep_token="[SEP]",
input_type="bert",
is_test=False,
):
super(QNLIBertReader, self).__init__(
file_paths,
tokenizers,
sequence_max_length,
class_key=None,
cls_token=cls_token,
sep_token=sep_token,
input_type=input_type,
is_test=is_test,
)
@overrides
def _get_data(self, file_path, **kwargs):
data_type = kwargs["data_type"]
_file = self.data_handler.read(file_path)
lines = _file.split("\n")
data = []
for i, line in enumerate(lines):
if i == 0:
continue
line_tokens = line.split("\t")
if len(line_tokens) <= 1:
continue
data.append({
"uid": f"qnli-{file_path}-{data_type}-{i}",
"sequence_a": line_tokens[1],
"sequence_b": line_tokens[2],
self.class_key: str(line_tokens[-1]),
})
return data
| [
"[email protected]"
] | |
33ccbd6363bc7859a7fc752f05cad29813f8f354 | bd72c02af0bbd8e3fc0d0b131e3fb9a2aaa93e75 | /Tree/binary_tree_paths.py | 3eaa605aa96b08ee364375160ae8865b257bf882 | [] | no_license | harvi7/Leetcode-Problems-Python | d3a5e8898aceb11abc4cae12e1da50061c1d352c | 73adc00f6853e821592c68f5dddf0a823cce5d87 | refs/heads/master | 2023-05-11T09:03:03.181590 | 2023-04-29T22:03:41 | 2023-04-29T22:03:41 | 222,657,838 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 482 | py | class Solution:
def binaryTreePaths(self, root: TreeNode) -> List[str]:
paths = []
if not root: return paths
self.dfs(root, "", paths)
return paths
def dfs(self, root, path, paths):
path = path + str(root.val)
if not root.left and not root.right:
paths.append(path)
return
if root.left: self.dfs(root.left, path + "->", paths)
if root.right: self.dfs(root.right, path+ "->", paths) | [
"[email protected]"
] | |
5e7794458fd0973c267d8c6df2b78d63abcb4d98 | be9960512ddf562516c4f2d909577fc6b9750f19 | /packages/jet_bridge_base/jet_bridge_base/filters/filter_class.py | 80997d750eb1d9a781847a4872c1e3298f7df9e4 | [
"MIT"
] | permissive | timgates42/jet-bridge | 9abdc8bdf420c720a30d6db163649a2a74c6b829 | 80c1f3a96dc467fd8c98cbdfbda2e42aa6a1d3b4 | refs/heads/master | 2023-03-16T03:19:08.358590 | 2022-06-14T16:26:14 | 2022-06-14T16:26:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,993 | py | from jet_bridge_base.utils.queryset import get_session_engine
from sqlalchemy import inspect
from jet_bridge_base.filters import lookups
from jet_bridge_base.filters.filter import Filter
from jet_bridge_base.filters.filter_for_dbfield import filter_for_data_type
class FilterClass(object):
filters = []
def __init__(self, *args, **kwargs):
self.meta = getattr(self, 'Meta', None)
if 'context' in kwargs:
self.handler = kwargs['context'].get('handler', None)
self.update_filters()
def update_filters(self):
filters = []
if self.meta:
if hasattr(self.meta, 'model'):
Model = self.meta.model
mapper = inspect(Model)
columns = mapper.columns
if hasattr(self.meta, 'fields'):
columns = filter(lambda x: x.name in self.meta.fields, columns)
for column in columns:
item = filter_for_data_type(column.type)
for lookup in item['lookups']:
for exclude in [False, True]:
instance = item['filter_class'](
name=column.key,
column=column,
lookup=lookup,
exclude=exclude
)
filters.append(instance)
declared_filters = filter(lambda x: isinstance(x[1], Filter), map(lambda x: (x, getattr(self, x)), dir(self)))
for filter_name, filter_item in declared_filters:
filter_item.name = filter_name
filter_item.model = Model
filter_item.handler = self.handler
filters.append(filter_item)
self.filters = filters
def filter_queryset(self, request, queryset):
session = request.session
def get_filter_value(name, filters_instance=None):
value = request.get_argument_safe(name, None)
if filters_instance and value is not None and get_session_engine(session) == 'bigquery':
python_type = filters_instance.column.type.python_type
value = python_type(value)
return value
for item in self.filters:
if item.name:
argument_name = '{}__{}'.format(item.name, item.lookup)
if item.exclude:
argument_name = 'exclude__{}'.format(argument_name)
value = get_filter_value(argument_name, item)
if value is None and item.lookup == lookups.DEFAULT_LOOKUP:
argument_name = item.name
if item.exclude:
argument_name = 'exclude__{}'.format(argument_name)
value = get_filter_value(argument_name, item)
else:
value = None
queryset = item.filter(queryset, value)
return queryset
| [
"[email protected]"
] | |
0b33acfc0a8e92200b6bb9ff425ee92732e063f5 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/24/usersdata/146/11428/submittedfiles/av1_m3.py | 5a721a4808aefc5e4ee7ce452cf07957f8fd7305 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 330 | py | # -*- coding: utf-8 -*-
from __future__ import division
import math
m = input('Digite o numero de termos: ')
soma = 0
i = 1
den = 2
while i<=m :
if i%2==0:
soma = soma - 4/((den)*(den+1)*(den+2))
else:
soma = soma + 4/((den+2)*(den+1)*(den+2))
den = den+1
i = i+1
pi = 3+soma
print ('%.6f'%pi)
| [
"[email protected]"
] | |
1b330ed34ebbd7922f0d6f1b9d56a7a7c71f35db | f819fe72c5b18b42a25a71dc2900c7fa80e17811 | /pandas/_libs/tslibs/timestamps.pyi | 4de51d4dc7dd8f5b04a87cd41a4ae72ca0ed76a2 | [
"BSD-3-Clause"
] | permissive | JMBurley/pandas | 34d101425acb0ac35a53bcf29fbd47c2d4c88fda | b74dc5c077971301c5b9ff577fa362943f3c3a17 | refs/heads/master | 2022-11-06T00:48:41.465865 | 2022-06-13T19:30:11 | 2022-06-13T19:30:11 | 229,853,377 | 1 | 0 | BSD-3-Clause | 2019-12-24T02:11:54 | 2019-12-24T02:11:53 | null | UTF-8 | Python | false | false | 7,504 | pyi | from datetime import (
date as _date,
datetime,
time as _time,
timedelta,
tzinfo as _tzinfo,
)
from time import struct_time
from typing import (
ClassVar,
TypeVar,
overload,
)
import numpy as np
from pandas._libs.tslibs import (
BaseOffset,
Period,
Tick,
Timedelta,
)
_DatetimeT = TypeVar("_DatetimeT", bound=datetime)
def integer_op_not_supported(obj: object) -> TypeError: ...
class Timestamp(datetime):
min: ClassVar[Timestamp]
max: ClassVar[Timestamp]
resolution: ClassVar[Timedelta]
value: int # np.int64
def __new__(
cls: type[_DatetimeT],
ts_input: int
| np.integer
| float
| str
| _date
| datetime
| np.datetime64 = ...,
freq: int | None | str | BaseOffset = ...,
tz: str | _tzinfo | None | int = ...,
unit: str | int | None = ...,
year: int | None = ...,
month: int | None = ...,
day: int | None = ...,
hour: int | None = ...,
minute: int | None = ...,
second: int | None = ...,
microsecond: int | None = ...,
nanosecond: int | None = ...,
tzinfo: _tzinfo | None = ...,
*,
fold: int | None = ...,
) -> _DatetimeT: ...
# GH 46171
# While Timestamp can return pd.NaT, having the constructor return
# a Union with NaTType makes things awkward for users of pandas
def _set_freq(self, freq: BaseOffset | None) -> None: ...
@classmethod
def _from_value_and_reso(
cls, value: int, reso: int, tz: _tzinfo | None
) -> Timestamp: ...
@property
def year(self) -> int: ...
@property
def month(self) -> int: ...
@property
def day(self) -> int: ...
@property
def hour(self) -> int: ...
@property
def minute(self) -> int: ...
@property
def second(self) -> int: ...
@property
def microsecond(self) -> int: ...
@property
def tzinfo(self) -> _tzinfo | None: ...
@property
def tz(self) -> _tzinfo | None: ...
@property
def fold(self) -> int: ...
@classmethod
def fromtimestamp(
cls: type[_DatetimeT], t: float, tz: _tzinfo | None = ...
) -> _DatetimeT: ...
@classmethod
def utcfromtimestamp(cls: type[_DatetimeT], t: float) -> _DatetimeT: ...
@classmethod
def today(cls: type[_DatetimeT], tz: _tzinfo | str | None = ...) -> _DatetimeT: ...
@classmethod
def fromordinal(
cls: type[_DatetimeT],
ordinal: int,
freq: str | BaseOffset | None = ...,
tz: _tzinfo | str | None = ...,
) -> _DatetimeT: ...
@classmethod
def now(cls: type[_DatetimeT], tz: _tzinfo | str | None = ...) -> _DatetimeT: ...
@classmethod
def utcnow(cls: type[_DatetimeT]) -> _DatetimeT: ...
# error: Signature of "combine" incompatible with supertype "datetime"
@classmethod
def combine(cls, date: _date, time: _time) -> datetime: ... # type: ignore[override]
@classmethod
def fromisoformat(cls: type[_DatetimeT], date_string: str) -> _DatetimeT: ...
def strftime(self, format: str) -> str: ...
def __format__(self, fmt: str) -> str: ...
def toordinal(self) -> int: ...
def timetuple(self) -> struct_time: ...
def timestamp(self) -> float: ...
def utctimetuple(self) -> struct_time: ...
def date(self) -> _date: ...
def time(self) -> _time: ...
def timetz(self) -> _time: ...
def replace(
self: _DatetimeT,
year: int = ...,
month: int = ...,
day: int = ...,
hour: int = ...,
minute: int = ...,
second: int = ...,
microsecond: int = ...,
tzinfo: _tzinfo | None = ...,
fold: int = ...,
) -> _DatetimeT: ...
def astimezone(self: _DatetimeT, tz: _tzinfo | None = ...) -> _DatetimeT: ...
def ctime(self) -> str: ...
def isoformat(self, sep: str = ..., timespec: str = ...) -> str: ...
@classmethod
def strptime(cls, date_string: str, format: str) -> datetime: ...
def utcoffset(self) -> timedelta | None: ...
def tzname(self) -> str | None: ...
def dst(self) -> timedelta | None: ...
def __le__(self, other: datetime) -> bool: ... # type: ignore[override]
def __lt__(self, other: datetime) -> bool: ... # type: ignore[override]
def __ge__(self, other: datetime) -> bool: ... # type: ignore[override]
def __gt__(self, other: datetime) -> bool: ... # type: ignore[override]
# error: Signature of "__add__" incompatible with supertype "date"/"datetime"
@overload # type: ignore[override]
def __add__(self, other: np.ndarray) -> np.ndarray: ...
@overload
def __add__(
self: _DatetimeT, other: timedelta | np.timedelta64 | Tick
) -> _DatetimeT: ...
def __radd__(self: _DatetimeT, other: timedelta) -> _DatetimeT: ...
@overload # type: ignore[override]
def __sub__(self, other: datetime) -> Timedelta: ...
@overload
def __sub__(
self: _DatetimeT, other: timedelta | np.timedelta64 | Tick
) -> _DatetimeT: ...
def __hash__(self) -> int: ...
def weekday(self) -> int: ...
def isoweekday(self) -> int: ...
def isocalendar(self) -> tuple[int, int, int]: ...
@property
def is_leap_year(self) -> bool: ...
@property
def is_month_start(self) -> bool: ...
@property
def is_quarter_start(self) -> bool: ...
@property
def is_year_start(self) -> bool: ...
@property
def is_month_end(self) -> bool: ...
@property
def is_quarter_end(self) -> bool: ...
@property
def is_year_end(self) -> bool: ...
def to_pydatetime(self, warn: bool = ...) -> datetime: ...
def to_datetime64(self) -> np.datetime64: ...
def to_period(self, freq: BaseOffset | str | None = ...) -> Period: ...
def to_julian_date(self) -> np.float64: ...
@property
def asm8(self) -> np.datetime64: ...
def tz_convert(self: _DatetimeT, tz: _tzinfo | str | None) -> _DatetimeT: ...
# TODO: could return NaT?
def tz_localize(
self: _DatetimeT,
tz: _tzinfo | str | None,
ambiguous: str = ...,
nonexistent: str = ...,
) -> _DatetimeT: ...
def normalize(self: _DatetimeT) -> _DatetimeT: ...
# TODO: round/floor/ceil could return NaT?
def round(
self: _DatetimeT, freq: str, ambiguous: bool | str = ..., nonexistent: str = ...
) -> _DatetimeT: ...
def floor(
self: _DatetimeT, freq: str, ambiguous: bool | str = ..., nonexistent: str = ...
) -> _DatetimeT: ...
def ceil(
self: _DatetimeT, freq: str, ambiguous: bool | str = ..., nonexistent: str = ...
) -> _DatetimeT: ...
def day_name(self, locale: str | None = ...) -> str: ...
def month_name(self, locale: str | None = ...) -> str: ...
@property
def day_of_week(self) -> int: ...
@property
def dayofweek(self) -> int: ...
@property
def day_of_month(self) -> int: ...
@property
def day_of_year(self) -> int: ...
@property
def dayofyear(self) -> int: ...
@property
def quarter(self) -> int: ...
@property
def week(self) -> int: ...
def to_numpy(
self, dtype: np.dtype | None = ..., copy: bool = ...
) -> np.datetime64: ...
@property
def _date_repr(self) -> str: ...
@property
def days_in_month(self) -> int: ...
@property
def daysinmonth(self) -> int: ...
def _as_unit(self, unit: str, round_ok: bool = ...) -> Timestamp: ...
| [
"[email protected]"
] | |
4c011ed2da70f655d3aa386df74a1e326d494d21 | 1ada3010856e39c93e2483c960aa8fc25e2b3332 | /Binary Tree/FullBT.py | 034233595a63dbed3ebcff43dae5e8c12860eb95 | [] | no_license | Taoge123/LeetCode | 4f9e26be05f39b37bdbb9c1e75db70afdfa1b456 | 4877e35a712f59bc7b8fffa3d8af2ffa56adb08c | refs/heads/master | 2022-02-24T20:09:21.149818 | 2020-07-31T03:18:05 | 2020-07-31T03:18:05 | 142,700,689 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 965 | py | class Node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def isFullTree(root):
if root is None:
return True
#If leaf node
if root.left is None and root.right is None:
return True
if root.left is not None and root.right is not None:
return (isFullTree(root.left) and isFullTree(root.right))
return False
root = Node(10);
root.left = Node(20);
root.right = Node(30);
root.left.right = Node(40);
root.left.left = Node(50);
root.right.left = Node(60);
root.right.right = Node(70);
root.left.left.left = Node(80);
root.left.left.right = Node(90);
root.left.right.left = Node(80);
root.left.right.right = Node(90);
root.right.left.left = Node(80);
root.right.left.right = Node(90);
root.right.right.left = Node(80);
# root.right.right.right = Node(90);
if isFullTree(root):
print("The Binary tree is full")
else:
print("Binary tree is not full")
| [
"[email protected]"
] | |
ccce9383b7b87da27d4a190b8045a110fb1240bc | 5a42ce780721294d113335712d45c62a88725109 | /project/pyalg_api/commands/orient_command.py | 67982f7aa800805ca33b77737fa43bda3887f4af | [] | no_license | P79N6A/project_code | d2a933d53deb0b4e0bcba97834de009e7bb78ad0 | 1b0e863ff3977471f5a94ef7d990796a9e9669c4 | refs/heads/master | 2020-04-16T02:06:57.317540 | 2019-01-11T07:02:05 | 2019-01-11T07:02:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 746 | py | # -*- coding: utf-8 -*-
import json
import os
import pandas as pd
import pyorient
import pdb
import random
import re
from datetime import datetime, timedelta
from lib.logger import logger
from .base_command import BaseCommand
from model.open import OpenJxlStat
from model.antifraud import AfWsm
from module.yiyiyuan import YiUserRemitList
from module.yiyiyuan import YiFavoriteContact
from module.detail import Detail
from lib.ssdb_config import SsdbConfig
class OrientCommand(BaseCommand):
def __init__(self):
super(OrientCommand, self).__init__()
self.client = None
# ORIENT DB
def runorient(self,start_time = None , end_time = None):
# 连接数据库
self.contactOrient()
return True
| [
"[email protected]"
] | |
b94122e661a89d99930688d0073fd4d58d5439c7 | f000fa4e6ef1de9591eeabff43ba57b7bf32561d | /cephlcm/api/views/v1/permission.py | 98322faa2514b82357002f20746813ca09c7ca49 | [] | no_license | VictorDenisov/ceph-lcm | 1aca07f2d17bfda8760d192ffd6d17645705b6e4 | 3cfd9ced6879fca1c39039e195d22d897ddcde80 | refs/heads/master | 2021-01-15T09:19:23.723613 | 2016-09-17T01:18:45 | 2016-09-17T01:18:45 | 68,424,913 | 0 | 0 | null | 2016-09-17T01:17:36 | 2016-09-17T01:17:36 | null | UTF-8 | Python | false | false | 502 | py | # -*- coding: utf-8 -*-
"""Small API to list permissions available in application."""
from cephlcm.api import auth
from cephlcm.api.views import generic
from cephlcm.common.models import role
class PermissionView(generic.ModelView):
decorators = [
auth.require_authorization("api", "view_role"),
auth.require_authentication
]
NAME = "permission"
ENDPOINT = "/permission/"
def get(self):
return role.PermissionSet(role.PermissionSet.KNOWN_PERMISSIONS)
| [
"[email protected]"
] | |
5a5e361c2eba01070a0d3e17a30153188ba0779f | 9b64f0f04707a3a18968fd8f8a3ace718cd597bc | /huaweicloud-sdk-waf/huaweicloudsdkwaf/v1/model/list_value_list_response.py | 19cf334e308e193d4391dbf7c5b6693f27dd4bcc | [
"Apache-2.0"
] | permissive | jaminGH/huaweicloud-sdk-python-v3 | eeecb3fb0f3396a475995df36d17095038615fba | 83ee0e4543c6b74eb0898079c3d8dd1c52c3e16b | refs/heads/master | 2023-06-18T11:49:13.958677 | 2021-07-16T07:57:47 | 2021-07-16T07:57:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,469 | py | # coding: utf-8
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
class ListValueListResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'total': 'int',
'items': 'list[ValueList]'
}
attribute_map = {
'total': 'total',
'items': 'items'
}
def __init__(self, total=None, items=None):
"""ListValueListResponse - a model defined in huaweicloud sdk"""
super(ListValueListResponse, self).__init__()
self._total = None
self._items = None
self.discriminator = None
if total is not None:
self.total = total
if items is not None:
self.items = items
@property
def total(self):
"""Gets the total of this ListValueListResponse.
引用表条数
:return: The total of this ListValueListResponse.
:rtype: int
"""
return self._total
@total.setter
def total(self, total):
"""Sets the total of this ListValueListResponse.
引用表条数
:param total: The total of this ListValueListResponse.
:type: int
"""
self._total = total
@property
def items(self):
"""Gets the items of this ListValueListResponse.
引用表列表
:return: The items of this ListValueListResponse.
:rtype: list[ValueList]
"""
return self._items
@items.setter
def items(self, items):
"""Sets the items of this ListValueListResponse.
引用表列表
:param items: The items of this ListValueListResponse.
:type: list[ValueList]
"""
self._items = items
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
return json.dumps(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListValueListResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
8f39586d925238cc0f8aaf1533ed11e6bba15271 | 88b4b883c1a262b5f9ca2c97bf1835d6d73d9f0b | /src/api/python/hce/app/UrlNormalize.py | d2cfe91a54afb21b110f70ed0b308b85d4c67660 | [] | no_license | hce-project/hce-bundle | 2f93dc219d717b9983c4bb534884e4a4b95e9b7b | 856a6df2acccd67d7af640ed09f05b2c99895f2e | refs/heads/master | 2021-09-07T22:55:20.964266 | 2018-03-02T12:00:42 | 2018-03-02T12:00:42 | 104,993,955 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,593 | py | # coding: utf-8
"""
HCE project, Python bindings, Distributed Tasks Manager application.
UrlNormalize Class content main functional of support the URL_NORMALIZE properties.
@package: app
@file UrlNormalize.py
@author Alexander Vybornyh <[email protected]>
@link: http://hierarchical-cluster-engine.com/
@copyright: Copyright © 2013-2017 IOIX Ukraine
@license: http://hierarchical-cluster-engine.com/license/
@since: 0.1
"""
import re
import app.Consts as APP_CONSTS
import app.Utils as Utils
class UrlNormalize(object):
# Constants used in class
PROPERTY_OPTIONS_MASK = 'mask'
PROPERTY_OPTIONS_REPLACE = 'replace'
# Constants of error messages
ERROR_MSG_FAILED_REPLACE = "Operation replace failed. Error: %s"
# Initialization
def __init__(self):
pass
## get normalize mask
#
# @param siteProperties - site properties
# @param defaultValue - default value
# @return normalize mask
@staticmethod
def getNormalizeMask(siteProperties, defaultValue=Utils.UrlNormalizator.NORM_DEFAULT):
# variable for result
ret = defaultValue
if siteProperties is not None and isinstance(siteProperties, dict) and APP_CONSTS.URL_NORMALIZE in siteProperties and \
isinstance(siteProperties[APP_CONSTS.URL_NORMALIZE], dict) and UrlNormalize.PROPERTY_OPTIONS_MASK in siteProperties[APP_CONSTS.URL_NORMALIZE]:
ret = int(siteProperties[APP_CONSTS.URL_NORMALIZE][UrlNormalize.PROPERTY_OPTIONS_MASK])
return ret
# # execute normalization url string use base url
#
# @param siteProperties - site properties
# @param base - base url string
# @param url - url string
# @param supportProtocols - support protocol list
# @param log - logger instance
# @return already normalized url string or None - in case of bad result normalization
@staticmethod
def execute(siteProperties, base, url, supportProtocols=None, log=None):
# check site property for exist replace rule
if siteProperties is not None and isinstance(siteProperties, dict) and APP_CONSTS.URL_NORMALIZE in siteProperties:
if log is not None:
log.info("!!! siteProperties['%s']: '%s', type: %s", str(APP_CONSTS.URL_NORMALIZE), str(siteProperties[APP_CONSTS.URL_NORMALIZE]),
str(type(siteProperties[APP_CONSTS.URL_NORMALIZE])))
replaceList = []
propertyDict = {}
if isinstance(siteProperties[APP_CONSTS.URL_NORMALIZE], basestring):
propertyDict = Utils.jsonLoadsSafe(jsonString=siteProperties[APP_CONSTS.URL_NORMALIZE], default=propertyDict, log=log)
if isinstance(propertyDict, dict) and UrlNormalize.PROPERTY_OPTIONS_REPLACE in propertyDict:
replaceList = propertyDict[UrlNormalize.PROPERTY_OPTIONS_REPLACE]
if log is not None:
log.debug("!!! replaceList: %s", str(replaceList))
if isinstance(replaceList, list):
for replaceElem in replaceList:
if isinstance(replaceElem, dict):
for pattern, repl in replaceElem.items():
try:
if log is not None:
log.debug("!!! pattern: %s, url: %s", str(pattern), str(url))
url = re.sub(pattern=pattern, repl=repl, string=url, flags=re.U + re.I)
if log is not None:
log.debug("!!! res url: %s", str(url))
except Exception, err:
if log is not None:
log.error(UrlNormalize.ERROR_MSG_FAILED_REPLACE, str(err))
return Utils.urlNormalization(base=base, url=url, supportProtocols=supportProtocols, log=log)
| [
"bgv@bgv-d9"
] | bgv@bgv-d9 |
3745a3536c649d6903183d1fd0fc4de53df98f5c | c9a222631e4a0b827ee4efbd4e362d00b7cc6d48 | /demo/画方格/rose.py | 837c42354e6f408ee364e1570706b3fe5d0bab0e | [] | no_license | enticejin/python | d86b1727048bae24bce0fedc911953a20d11947c | 09dea6c62e6be8389fb23f472a1f02896a74c696 | refs/heads/master | 2023-03-12T18:00:06.322335 | 2021-11-09T01:38:13 | 2021-11-09T01:38:13 | 234,876,815 | 3 | 1 | null | 2023-03-04T01:24:08 | 2020-01-19T09:54:07 | Python | UTF-8 | Python | false | false | 1,437 | py | from turtle import *
import time
setup(1000,800,0,0)
speed(0)
penup()
seth(90)
fd(340)
seth(0)
pendown()
speed(5)
begin_fill()
fillcolor('red')
circle(50,30)
for i in range(10):
fd(1)
left(10)
circle(40,40)
for i in range(6):
fd(1)
left(3)
circle(80,40)
for i in range(20):
fd(0.5)
left(5)
circle(80,45)
for i in range(10):
fd(2)
left(1)
circle(80,25)
for i in range(20):
fd(1)
left(4)
circle(50,50)
time.sleep(0.1)
circle(120,55)
speed(0)
seth(-90)
fd(70)
right(150)
fd(20)
left(140)
circle(140,90)
left(30)
circle(160,100)
left(130)
fd(25)
penup()
right(150)
circle(40,80)
pendown()
left(115)
fd(60)
penup()
left(180)
fd(60)
pendown()
end_fill()
right(120)
circle(-50,50)
circle(-20,90)
speed(1)
fd(75)
speed(0)
circle(90,110)
penup()
left(162)
fd(185)
left(170)
pendown()
circle(200,10)
circle(100,40)
circle(-52,115)
left(20)
circle(100,20)
circle(300,20)
speed(1)
fd(250)
penup()
speed(0)
left(180)
fd(250)
circle(-300,7)
right(80)
circle(200,5)
pendown()
left(60)
begin_fill()
fillcolor('green')
circle(-80,100)
right(90)
fd(10)
left(20)
circle(-63,127)
end_fill()
penup()
left(50)
fd(20)
left(180)
pendown()
circle(200,25)
penup()
right(150)
fd(180)
right(40)
pendown()
begin_fill()
fillcolor('green')
circle(-100,80)
right(150)
fd(10)
left(60)
circle(-80,98)
end_fill()
penup()
left(60)
fd(13)
left(180)
pendown()
speed(1)
circle(-200,23)
exitonclick() | [
"[email protected]"
] | |
9c77a2d055cd61fac338aee65656356b09aa4f57 | 323f618462f939621a4287a781986d995f83b5f2 | /controllers/application/sublime.py | c6e1faa80c7b8969174f2ab793d36fcf4e948a93 | [] | no_license | almamuncsit/virtual-assistant | 3f08e433bd4d9c985a00bfdc2b31bf27b3d8ce9e | 4ea31dc8667e397724229d8383bbfbb0e50ab626 | refs/heads/master | 2023-04-03T18:07:26.775071 | 2021-04-08T17:29:47 | 2021-04-08T17:29:47 | 325,242,463 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 57 | py | import os
def sublime() -> None:
os.system('subl')
| [
"[email protected]"
] | |
353284559c12cf179d807a4ba9ac588bd8f495d5 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2533/59137/270093.py | f1c8a2c861bf977ab43894cd1ef420e8d96bcd4a | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 221 | py | def s1():
array = list(eval(input()))
ans = []
for n in array:
if n % 2 == 0:
ans.append(n)
for n in array:
if n % 2 == 1:
ans.append(n)
print(ans)
s1() | [
"[email protected]"
] | |
f142571b2aee06f9277b92ef1709a27a21e74f6a | f6290b7b8ffb263b7f0d252a67e2c6320a4c1143 | /Recursion/rat_in_a_maze.py | f7cc926fed7bed41572c85590758f250caa32735 | [] | no_license | datAnir/GeekForGeeks-Problems | b45b0ae80053da8a1b47a2af06e688081574ef80 | c71f11d0349ed3850dfaa9c7a078ee70f67e46a1 | refs/heads/master | 2023-05-29T15:21:59.680793 | 2020-12-15T04:55:01 | 2020-12-15T04:55:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,655 | py | '''
https://practice.geeksforgeeks.org/problems/rat-in-a-maze-problem/1
Consider a rat placed at (0, 0) in a square matrix of order N*N. It has to reach the destination at (n-1, n-1).
Find all possible paths that the rat can take to reach from source to destination. The directions in which the rat can move are 'U'(up), 'D'(down), 'L' (left), 'R' (right).
Value 0 at a cell in the matrix represents that it is blocked and cannot be crossed while value 1 at a cell in the matrix represents that it can be travelled through.
Expected Time Complexity: O((N2)4).
Expected Auxiliary Space: O(L*X), L = length of the path, X = number of paths.
Input:
3
4
1 0 0 0 1 1 0 1 0 1 0 0 0 1 1 1
4
1 0 0 0 1 1 0 1 1 1 0 0 0 1 1 1
2
1 0 1 0
Output:
DRDDRR
DDRDRR DRDDRR
-1
'''
# if row or col goes out of bound or arr[r][c] = -1(visited) or arr[r][c] = 0(blocked), then base cond hit so return
# if we reach at bottom right corner, then print path and return
# else change arr[i][j] = -1(visited) and call in all 4 directions
# after processing all 4 directions, make arr[i][j] = 1 again so that same cell can be used by other path
def ratMaze(arr, i, j, path, ans):
if i < 0 or j < 0 or i >= len(arr) or j >= len(arr) or arr[i][j] <= 0:
return
if i == len(arr)-1 and j == len(arr)-1:
ans.append(path)
return
arr[i][j] = -1
ratMaze(arr, i-1, j, path + 'U', ans)
ratMaze(arr, i+1, j, path + 'D', ans)
ratMaze(arr, i, j-1, path + 'L', ans)
ratMaze(arr, i, j+1, path + 'R', ans)
arr[i][j] = 1
def findPath(arr, n):
ans = []
ratMaze(arr, 0, 0, '', ans)
return ' '.join(sorted(ans)) | [
"[email protected]"
] | |
2109ddff14d449f6eecc234e4308e94c0f0b2bd5 | de8b832a3c804837300b9974dc0151d9294fa573 | /handCraft/covarep-master/distribution/py_package/covarep_py/for_redistribution_files_only/covarep_py/__init__.py | 98fa699b65065c6e6d1c946ea4c35f8f7f57c47f | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | YuanGongND/Deep_Speech_Visualization | fcff2ac93e5adffd707b98eb7591f50fe77c1274 | 73a79e3596d9a5ee338eafb9a87b227696de25d1 | refs/heads/master | 2021-07-19T23:00:36.294817 | 2017-10-28T01:04:59 | 2017-10-28T01:04:59 | 105,332,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,524 | py | #Copyright 2015 MathWorks, Inc.
# This template is used to generate an __init__.py file for a particular deployable package.
""" Package for executing deployed MATLAB functions """
from __future__ import print_function
import atexit
import glob
import importlib
import os
import os.path
import pdb
import platform
import sys
import weakref
class _PathInitializer(object):
PLATFORM_DICT = {'Windows': ['PATH','dll',''], 'Linux': ['LD_LIBRARY_PATH','so','libmw'], 'Darwin': ['DYLD_LIBRARY_PATH','dylib','libmw']}
SUPPORTED_PYTHON_VERSIONS = ['2_7', '3_3', '3_4']
RUNTIME_MAJOR_VERSION = '9'
RUNTIME_MINOR_VERSION = '0'
PACKAGE_NAME = 'covarep_py'
def set_interpreter_version(self):
"""Make sure the interpreter version is supported."""
ver = sys.version_info
version = '{0}_{1}'.format(ver[0], ver[1])
if version in _PathInitializer.SUPPORTED_PYTHON_VERSIONS:
self.interpreter_version = version
else:
raise EnvironmentError("Python %s is not supported.".format(version))
def __init__(self):
"""Initialize the variables."""
self.arch = ''
self.is_linux = False
self.is_mac = False
self.is_windows = False
self.mr_handle = None
self.ml_handle = None
self.system = ''
self.cppext_handle = None
# path to the folder that stores Python extensions and shared libraries
self.bin_dir = ''
# path to the folder that stores pure Python matlab_pysdk.runtime code (_runtime_dir)
self.runtime_dir = ''
# path to the folder that stores the pure Python matlab mlarray code used for type conversion
self.ml_dir = ''
self.set_interpreter_version()
self.get_platform_info()
this_folder = os.path.dirname(os.path.realpath(__file__))
self.path_file_name = os.path.join(this_folder, 'paths.{0}.txt'.format(self.arch))
self.instances_of_this_package = set([])
def read_path_file(self):
"""Look for a file that lists items to add to path. If present, read it and add the paths."""
filtered_lines = []
if os.path.isfile(self.path_file_name):
pth_file = open(self.path_file_name, 'r')
lines = pth_file.readlines()
for line in lines:
stripped_line = line.strip()
if stripped_line and stripped_line[0] != '#':
filtered_lines.append(stripped_line)
length = len(filtered_lines)
if length == 3:
(bin_dir, runtime_dir, ml_dir) = filtered_lines
if (not os.path.isdir(bin_dir)) or (not os.path.isdir(runtime_dir)) or (not os.path.isdir(ml_dir)):
return False
else:
(self.bin_dir, self.runtime_dir, self.ml_dir) = (bin_dir, runtime_dir, ml_dir)
return True
else:
return False
def write_path_file(self):
"""Write a file that lists items to add to path. If present, it will be overwritten."""
existing_contents = ''
if os.path.isfile(self.path_file_name):
path_file = open(self.path_file_name, 'r')
existing_contents = path_file.readlines()
path_file.close()
path_file = open(self.path_file_name, 'w')
if self.system == 'Windows':
print('# bin dir: added to both OS path and system path', file=path_file)
else:
print('# bin dir: added to system path', file=path_file)
print(self.bin_dir, file=path_file)
print('', file=path_file)
print('# runtime dir: added to system path', file=path_file)
print(self.runtime_dir, file=path_file)
print('', file=path_file)
print('# matlab (mlarray) dir: added to system path', file=path_file)
print(self.ml_dir, file=path_file)
print('', file=path_file)
if existing_contents:
print(existing_contents, file=path_file)
path_file.close()
def get_platform_info(self):
"""Ask Python for the platform and architecture."""
# This will return 'Windows', 'Linux', or 'Darwin' (for Mac).
self.system = platform.system()
if not self.system in _PathInitializer.PLATFORM_DICT:
raise RuntimeError('{0} is not a supported platform.'.format(self.system))
else:
# path_var is the OS-dependent name of the path variable ('PATH', 'LD_LIBRARY_PATH', "DYLD_LIBRARY_PATH')
(self.path_var, self.ext, self.lib_prefix) = _PathInitializer.PLATFORM_DICT[self.system]
if self.system == 'Windows':
self.is_windows = True
bit_str = platform.architecture()[0]
if bit_str == '64bit':
self.arch = 'win64'
elif bit_str == '32bit':
self.arch = 'win32'
else:
raise RuntimeError('{0} is not supported.'.format(bit_str))
elif self.system == 'Linux':
self.is_linux = True
self.arch = 'glnxa64'
elif self.system == 'Darwin':
self.is_mac = True
self.arch = 'maci64'
else:
raise RuntimeError('Operating system {0} is not supported.'.format(self.system))
def get_paths_from_os(self):
"""
Look through the system path for a file whose name contains a runtime version
corresponding to the one with which this package was produced.
"""
# Concatenates the pieces into a string. The double parentheses are necessary.
if self.system == 'Windows':
file_to_find = ''.join((self.lib_prefix, 'mclmcrrt',
_PathInitializer.RUNTIME_MAJOR_VERSION, '_', _PathInitializer.RUNTIME_MINOR_VERSION, '.', self.ext))
elif self.system == 'Linux':
file_to_find = ''.join((self.lib_prefix, 'mclmcrrt', '.', self.ext, '.',
_PathInitializer.RUNTIME_MAJOR_VERSION, '.', _PathInitializer.RUNTIME_MINOR_VERSION))
elif self.system == 'Darwin':
file_to_find = ''.join((self.lib_prefix, 'mclmcrrt', '.',
_PathInitializer.RUNTIME_MAJOR_VERSION, '.', _PathInitializer.RUNTIME_MINOR_VERSION,
'.', self.ext))
else:
raise RuntimeError('Operating system {0} is not supported.'.format(self.system))
path_elements = []
if self.path_var in os.environ:
path_elements = os.environ[self.path_var].split(os.pathsep)
if not path_elements:
friendly_os_name = self.system
if friendly_os_name == 'Darwin':
friendly_os_name = 'Mac'
raise RuntimeError('On {0}, you must set the environment variable "{1}" to a non-empty string. {2}'.format(
friendly_os_name, self.path_var, 'For more details, see the package documentation.'))
path_found = ''
for elem in path_elements:
filename = os.path.join(elem, file_to_find)
if (os.path.isfile(filename)):
path_found = elem
break
if not path_found:
raise RuntimeError('Could not find an appropriate directory for MATLAB or the MATLAB runtime in {0}. Details: {1}'.format(
self.path_var, file_to_find))
# The last nonblank part of the path should be the arch (e.g., "win64").
path_components = path_found.split(os.sep)
if path_components[-1]:
last_path_component = path_components[-1]
else:
# The directory name ended with a slash, so the last item in the list was an empty string. Go back one more.
last_path_component = path_components[-2]
if last_path_component != self.arch:
raise RuntimeError('To call deployed MATLAB code on a {0} machine, you must run a {0} version of Python. Details: {1}'.format(
last_path_component, path_found))
matlabroot = os.path.dirname(os.path.dirname(os.path.normpath(path_found)))
bin_dir = os.path.join(matlabroot, 'bin', self.arch)
runtime_dir = os.path.join(matlabroot, 'toolbox', 'compiler_sdk', 'pysdk_py')
ml_dir = os.path.join(runtime_dir, 'mlarray_dist')
if not os.path.isdir(bin_dir):
raise RuntimeError('Could not find the directory {0}'.format(bin_dir))
if not os.path.isdir(runtime_dir):
raise RuntimeError('Could not find the directory {0}'.format(runtime_dir))
if not os.path.isdir(ml_dir):
raise RuntimeError('Could not find the directory {0}'.format(ml_dir))
(self.bin_dir, self.runtime_dir, self.ml_dir) = (bin_dir, runtime_dir, ml_dir)
def update_paths(self):
"""Update the OS and Python paths."""
#For Windows, add the bin_dir to the OS path. This is unnecessary
#for Linux and Mac, where the OS can find this information via rpath.
if self.is_windows:
os.environ[self.path_var] = self.bin_dir + os.pathsep + os.environ[self.path_var]
#Add all paths to the Python path.
sys.path.insert(0, self.bin_dir)
sys.path.insert(0, self.runtime_dir)
sys.path.insert(0, self.ml_dir)
def import_matlab_pysdk_runtime(self):
"""Import matlab_pysdk.runtime. Must be done after update_paths() and import_cppext() are called."""
try:
self.mr_handle = importlib.import_module('matlab_pysdk.runtime')
except Exception as e:
raise e
if self.mr_handle._runtime_major_version:
if (self.mr_handle._runtime_major_version != _PathInitializer.RUNTIME_MAJOR_VERSION) or (
self.mr_handle._runtime_minor_version != _PathInitializer.RUNTIME_MINOR_VERSION):
raise RuntimeError('Runtime version of package ({0}.{1}) does not match runtime version of previously loaded package ({2}.{3})'.format(
_PathInitializer.RUNTIME_MAJOR_VERSION, _PathInitializer.RUNTIME_MINOR_VERSION,
self.mr_handle._runtime_major_version, self.mr_handle._runtime_minor_version))
else:
self.mr_handle._runtime_major_version = _PathInitializer.RUNTIME_MAJOR_VERSION
self.mr_handle._runtime_minor_version = _PathInitializer.RUNTIME_MINOR_VERSION
self.mr_handle._cppext_handle = self.cppext_handle
def import_matlab(self):
"""Import the matlab package. Must be done after Python system path contains what it needs to."""
try:
self.ml_handle = importlib.import_module('matlab')
except Exception as e:
raise e
def initialize_package(self):
package_handle = self.mr_handle.DeployablePackage(self, self.PACKAGE_NAME, __file__)
self.instances_of_this_package.add(weakref.ref(package_handle))
package_handle.initialize()
return package_handle
def initialize_runtime(self, option_list):
if not self.cppext_handle:
raise RuntimeError('Cannot call initialize_application before import_cppext.')
if self.is_mac:
ignored_option_found = False
for option in option_list:
if option in ('-nodisplay', '-nojvm'):
ignored_option_found = True
break
if ignored_option_found:
print('WARNING: Options "-nodisplay" and "-nojvm" are ignored on Mac.')
print('They must be passed to mwpython in order to take effect.')
self.cppext_handle.initializeApplication(option_list)
def terminate_runtime(self):
if not self.cppext_handle:
raise RuntimeError('Cannot call terminate_application before import_cppext.')
self.cppext_handle.terminateApplication()
def import_cppext(self):
self.cppext_handle = importlib.import_module("matlabruntimeforpython" + self.interpreter_version)
try:
_pir = _PathInitializer()
_pir.get_paths_from_os()
_pir.update_paths()
_pir.import_cppext()
_pir.import_matlab_pysdk_runtime()
_pir.import_matlab()
except Exception as e:
print("Exception caught during initialization of Python interface. Details: {0}".format(e))
raise
# We let the program exit normally.
def initialize():
"""
Initialize package and return a handle.
Initialize a package consisting of one or more deployed MATLAB functions. The return
value is used as a handle on which any of the functions can be executed. To wait
for all graphical figures to close before continuing, call wait_for_figures_to_close()
on the handle. To close the package, call terminate(), quit() or exit() (which are
synonymous) on the handle. The terminate() function is executed automatically when the
script or session ends.
Returns
handle - used to execute deployed MATLAB functions and to call terminate()
"""
return _pir.initialize_package()
def initialize_runtime(option_list):
"""
Initialize runtime with a list of startup options.
Initialize the MATLAB Runtime with a list of startup options that will affect
all packages opened within the script or session. If it is not called
explicitly, it will be executed automatically, with an empty list of options,
by the first call to initialize(). Do not call initialize_runtime() after
calling initialize().
There is no corresponding terminate_runtime() call. The runtime is terminated
automatically when the script or session ends.
Parameters
option_list - Python list of options; valid options are:
-nodisplay (suppresses display functionality; Linux only)
-nojvm (disables the Java Virtual Machine)
"""
if option_list:
if not isinstance(option_list, list) and not isinstance(option_list, tuple):
raise SyntaxError('initialize_runtime takes a list or tuple of strings.')
_pir.initialize_runtime(option_list)
# terminate_runtime() is intentionally omitted. Instead, when running interactively,
# the user should call exit(). When running a script, the runtime will automatically be
# terminated when the script ends.
@atexit.register
def __exit_packages():
for package in _pir.instances_of_this_package:
if package() is not None:
package().terminate()
| [
"[email protected]"
] | |
5bbcbb82c02d8c1cfa203245472a07dafc8af5ca | 81f7f4a65a068ed2483b537f6675a5f46235af88 | /inplace_activity_stream/urls.py | b6cd2b3e850c994e4c20377bdb3795bf119cb6be | [
"BSD-2-Clause",
"BSD-3-Clause"
] | permissive | ebrelsford/django-inplace-activity-stream | f561c7cf0c7180426d3ea9cd11abba1cb6744e60 | a495e42ffdc37d5e800f71ab97ed6975a1849224 | refs/heads/master | 2020-12-24T13:16:17.119724 | 2017-05-09T14:09:09 | 2017-05-09T14:09:09 | 11,783,325 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 320 | py | from django.conf.urls import url
from .views import PlaceActivityFeed, PlaceActivityListView
urlpatterns = [
url(r'^feeds/all/$',
PlaceActivityFeed(),
name='activitystream_feed',
),
url(r'^',
PlaceActivityListView.as_view(),
name='activitystream_activity_list'
),
]
| [
"[email protected]"
] | |
70e69736bdd3c3f577144a9714bd7664633f07ff | 0d0afd1dce972b4748ce8faccd992c019794ad9e | /integra/aspseg_personalizacao/wizards/asp_relatorio.py | 9cbd795322404460d5cca2bd91f68067742147dd | [] | no_license | danimaribeiro/odoo-erp | e2ca2cfe3629fbedf413e85f7c3c0453fd16941e | d12577bf7f5266b571cbedeb930720d653320e96 | refs/heads/master | 2020-01-23T21:32:16.149716 | 2016-11-05T15:35:40 | 2016-11-05T15:35:40 | 67,892,809 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,105 | py | # -*- encoding: utf-8 -*-
import os
from osv import orm, fields, osv
from pybrasil.data import parse_datetime, formata_data
import base64
from integra_rh.models.hr_payslip_input import primeiro_ultimo_dia_mes, mes_passado
from relatorio import *
from finan.wizard.relatorio import *
import csv
from pybrasil.base import DicionarioBrasil
DIR_ATUAL = os.path.abspath(os.path.dirname(__file__))
JASPER_BASE_DIR = os.path.join(DIR_ATUAL, '../../reports/base/')
class asp_relatorio(osv.osv_memory):
_name = 'asp.relatorio'
_description = 'asp.relatorio'
_rec_name = 'nome'
_columns = {
'data_inicial': fields.date(u'Data inicial'),
'data_final': fields.date(u'Data final'),
'nome': fields.char(u'Nome do arquivo', 120, readonly=True),
'arquivo': fields.binary(u'Arquivo', readonly=True),
'nome_csv': fields.char(u'Nome do arquivo CSV', 120, readonly=True),
'arquivo_csv': fields.binary(u'Arquivo CSV', readonly=True),
'location_id': fields.many2one('stock.location', u'Local do Estoque'),
'partner_id': fields.many2one('res.partner', u'Cliente'),
'company_id': fields.many2one('res.company', u'Empresa'),
'municipio_id': fields.many2one('sped.municipio', u'Cidade'),
'estado_id': fields.many2one('sped.estado', u'Estado'),
'user_id': fields.many2one('res.users', u'Representate'),
}
_defaults = {
'data_inicial': fields.date.today,
'data_final': fields.date.today,
}
def gera_estoque_minimo(self, cr, uid, ids, context={}):
if not ids:
return {}
for rel_obj in self.browse(cr, uid, ids):
location_id = rel_obj.location_id.id
sql = """select * from (
select
pc.nome_completo,
pp.default_code,
pt.name,
pp.variants,
coalesce((select sw.product_min_qty
from stock_warehouse_orderpoint sw
where sw.product_id = pp.id), 0) as minimo_stock,
coalesce((select
sum(
case
when es.tipo = 'S' THEN
es.quantidade * -1
else
es.quantidade
end ) as quantidade
from estoque_entrada_saida es
where es.product_id = pp.id
and es.location_id = sm.location_id ), 0) as quantidade,
coalesce((select distinct
sum(sol.quantidade) as quantidade_venda
from sale_order_line sol
where sol.product_id = pp.id
and state = 'Confirmed'), 0) as quantidade_venda,
coalesce((select distinct
sum(pol.product_qty) as pedido
from purchase_order_line pol
where pol.product_id = pp.id
and state = 'confirmed'), 0) as pedido
from estoque_entrada_saida sm
join product_product pp on pp.id = sm.product_id
join product_template pt on pt.id = pp.product_tmpl_id
join product_category pc on pc.id = pt.categ_id
where sm.location_id = """ + str(location_id) + """
order by
pc.id , pt.name)
as a
where
minimo_stock > 0
;"""
print(sql)
cr.execute(sql)
dados = cr.fetchall()
if len(dados) == 0:
raise osv.except_osv(u'Atenção', u'Não há dados para gerar o relatório, com base nos parâmetros informados!')
linhas = []
for categoria, codigo, produto, variants, minimo_stock, quantidade, quantidade_venda, pedido in dados:
linha = DicionarioBrasil()
linha['categoria'] = categoria
linha['codigo'] = codigo
linha['produto'] = produto
linha['variants'] = variants
linha['minimo_stock'] = minimo_stock
linha['quantidade'] = quantidade
linha['quantidade_venda'] = quantidade_venda
linha['pedido'] = pedido
linhas.append(linha)
rel = FinanRelatorioAutomaticoRetrato()
rel.title = u'Produtos para Comprar'
rel.colunas = [
['codigo' , 'C', 10, u'Código', False],
['produto' , 'C', 50, u'Descrição', False],
['variants', 'C', 15, u'Fornecedor', False],
['quantidade' , 'F', 10, u'Estoque.', True],
['minimo_stock' , 'F', 10, u'Minimo.', True],
['quantidade_venda' , 'I', 10, u'Venda', True],
['pedido' , 'I', 10, u'Compra', True],
]
rel.monta_detalhe_automatico(rel.colunas)
rel.grupos = [
['categoria', u'Categoria', False],
]
rel.monta_grupos(rel.grupos)
location_obj = self.pool.get('stock.location').browse(cr, uid, location_id)
rel.band_page_header.elements[-1].text = u'Local: ' + location_obj.name
pdf = gera_relatorio(rel, linhas)
dados = {
'nome': 'produto_para_compra.pdf',
'arquivo': base64.encodestring(pdf),
}
rel_obj.write(dados)
return True
def gera_relatorio_clientes(self, cr, uid, ids, context={}):
if not ids:
return {}
for rel_obj in self.browse(cr, uid, ids):
sql = """select
rp.name,
cli.name as cliente,
coalesce(cli.email_nfe,'') as email,
coalesce(cli.bairro,'') as bairro,
coalesce(cli.fone,'') as fone,
coalesce(cli.endereco,'') ||', nº' || coalesce(cli.numero,'') as endereco,
case
when cli.active = true then
'Ativo'
else
'Inativo'
end as ativo,
coalesce(u.name, 'Sem representante') as representante,
coalesce(spu.nome, 'Sem municipio') as municipio,
coalesce(se.nome, 'Sem estado') as estado
from res_partner cli
left join res_users u on u.id = cli.user_id
left join sped_municipio spu on spu.id = cli.municipio_id
left join sped_estado se on se.id = spu.estado_id
left join res_company c on c.id = cli.company_id
left join res_partner rp on rp.id = c.partner_id
where
cli.customer = true
"""
if rel_obj.company_id:
sql += """
and c.id = """ + str(rel_obj.company_id.id)
if rel_obj.partner_id:
sql += """
and cli.id = """ + str(rel_obj.partner_id.id)
if rel_obj.municipio_id:
sql += """
and spu.id = """ + str(rel_obj.municipio_id.id)
if rel_obj.estado_id:
sql += """
and se.id = """ + str(rel_obj.estado_id.id)
if rel_obj.user_id:
sql += """
and u.id = """ + str(rel_obj.user_id.id)
sql += """
order by
rp.name, se.nome, u.name, spu.nome, cli.name
;"""
print(sql)
cr.execute(sql)
dados = cr.fetchall()
if len(dados) == 0:
raise osv.except_osv(u'Atenção', u'Não há dados para gerar o relatório, com base nos parâmetros informados!')
linhas = []
for empresa, cliente, email, bairro, fone, endereco, ativo, representante, municipio, estado in dados:
linha = DicionarioBrasil()
linha['empresa'] = empresa
linha['cliente'] = cliente
linha['email'] = email
linha['bairro'] = bairro
linha['fone'] = fone
linha['endereco'] = endereco
linha['ativo'] = ativo
linha['representante'] = representante
linha['municipio'] = municipio
linha['estado'] = estado
linhas.append(linha)
rel = FinanRelatorioAutomaticoRetrato()
rel.title = u'Clientes por Estado'
rel.colunas = [
['cliente' , 'C', 40, u'Código', False],
['municipio','C', 20, u'Cidade', False],
['bairro' , 'C', 15, u'Bairro', False],
['email', 'C', 25, u'Email', False],
['fone' , 'C', 15, u'Fone', False],
['endereco' , 'C', 30, u'Endereco', False],
['ativo' , 'C', 8, u'Situacão', False],
]
rel.monta_detalhe_automatico(rel.colunas)
rel.grupos = [
['empresa', u'Empresa', False],
['estado', u'Estado', False],
['representante', u'Representate', False],
]
rel.monta_grupos(rel.grupos)
if rel_obj.company_id:
company_id = rel_obj.company_id.id
else:
company_id = 1
company_obj = self.pool.get('res.company').browse(cr, uid, company_id)
rel.band_page_header.elements[-1].text = u'Empresa: ' + company_obj.partner_id.name
pdf = gera_relatorio(rel, linhas)
dados = {
'nome': u'clientes_por_estado.pdf',
'arquivo': base64.encodestring(pdf),
}
rel_obj.write(dados)
return True
asp_relatorio()
| [
"[email protected]"
] | |
8cb1198739853e29689237999b1e3c3375af0e06 | 31e00afe8f782bd214f8e32949be928a51e5de39 | /CreditCalculator/Credit Calculator/task/creditcalc/creditcalc.py | a56e2de2966fd6bf16c2ab712fd91ba2d6ad1650 | [] | no_license | akocur/education | 65e2a0640bab5e9939c5692333fa2f500c9feb0b | d0a890861cd83dcc61ff2af6cfbb75c157fbaf02 | refs/heads/master | 2023-05-24T02:38:04.695665 | 2020-10-26T14:09:07 | 2020-10-26T14:09:07 | 269,410,671 | 0 | 0 | null | 2023-05-22T22:47:45 | 2020-06-04T16:32:11 | HTML | UTF-8 | Python | false | false | 3,761 | py | from math import log, ceil, floor
import argparse
class CreditCalculator:
def __init__(self, type_calc='annuity', payment=0, periods=0, interest=0, principal=0):
self.type = type_calc
self.payment = payment
self.periods = periods
self.interest = interest
self.principal = principal
self.interest_rate = self.interest / 1200
self.total_payments = 0
def __repr__(self):
return f'''
type: {self.type}
payment: {self.payment}
periods: {self.periods}
interest: {self.interest}
principal: {self.principal}
interest_rate: {self.interest_rate}
total_payments: {self.total_payments}
'''
def overpayment(self):
print(f'\nOverpayment = {self.total_payments - self.principal}')
def are_errors(self):
if self.type not in ['annuity', 'diff']:
return True
if self.interest <= 0 or self.payment < 0 or self.periods < 0 or self.principal < 0:
return True
if self.type == 'annuity':
if self.payment == self.periods == self.principal == 0:
return True
else:
if self.payment:
return True
def calculate(self):
if self.are_errors():
print('Incorrect parameters')
return
if self.type == 'annuity':
if self.principal == 0:
self.principal = floor(self.payment * (pow(1 + self.interest_rate, self.periods) - 1) /
(self.interest_rate * pow(1 + self.interest_rate, self.periods)))
print(f'Your credit principal = {self.principal}!')
elif self.payment == 0:
self.payment = ceil(self.principal * self.interest_rate * pow(1 + self.interest_rate, self.periods) /
(pow(1 + self.interest_rate, self.periods) - 1))
print(f'Your annuity payment = {self.payment}!')
elif self.periods == 0:
self.periods = ceil(log(self.payment / (self.payment - self.interest_rate * self.principal),
1 + self.interest_rate))
n_years = self.periods // 12
n_month = self.periods % 12
n_years_str = '' if n_years < 1 else f'{n_years} year{"s" if n_years > 1 else ""}'
and_str = ' and ' if n_years > 0 and n_month > 0 else ''
n_month_str = '' if n_month < 1 else f'{n_month} month{"s" if n_month > 1 else ""}'
print(f'You need {n_years_str}{and_str}{n_month_str} to repay this credit!')
self.total_payments = self.payment * self.periods
elif self.type == 'diff':
sum_payment = 0
for month in range(1, self.periods + 1):
payment = ceil(self.principal / self.periods + self.interest_rate * (self.principal - self.principal *
(month - 1) / self.periods))
print(f'Month {month}: paid out {payment}')
sum_payment += payment
self.total_payments = sum_payment
self.overpayment()
parser_args = argparse.ArgumentParser()
parser_args.add_argument('--type')
parser_args.add_argument('--payment', type=int, default=0)
parser_args.add_argument('--principal', type=int, default=0)
parser_args.add_argument('--periods', type=int, default=0)
parser_args.add_argument('--interest', type=float, default=0)
args = parser_args.parse_args()
credit_calc = CreditCalculator(args.type, args.payment, args.periods, args.interest, args.principal)
credit_calc.calculate()
| [
"[email protected]"
] | |
63702ccdfa0d0fe8c64cf9c7db3880461a484a25 | f3498e863e6500508f486added078aa462239de2 | /devel/lib/python2.7/dist-packages/mavros_msgs/__init__.py | 7e8803c1ed66e694312bfbf8a0b6d0cd5d62e556 | [] | no_license | luiscaiza/catkin_ws | 82bf39d25c3e732a4a3e5408b81de7992373cb87 | ac038d7623b27217309adbbd38ace6e0bb528d61 | refs/heads/master | 2020-04-16T04:41:09.004587 | 2019-01-15T19:17:28 | 2019-01-15T19:17:28 | 165,276,961 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 99 | py | /home/luis/catkin_ws/devel/.private/mavros_msgs/lib/python2.7/dist-packages/mavros_msgs/__init__.py | [
"[email protected]"
] | |
5b68f9839dd7d14e82c6f535fb0febdf0d995910 | 3bae1ed6460064f997264091aca0f37ac31c1a77 | /extensions/logserver/ScribedCommand.py | 3d234d75264afabac9b90bffa6679d4182ed3010 | [] | no_license | racktivity/ext-pylabs-core | 04d96b80ac1942754257d59e91460c3a141f0a32 | 53d349fa6bee0ccead29afd6676979b44c109a61 | refs/heads/master | 2021-01-22T10:33:18.523799 | 2017-06-08T09:09:28 | 2017-06-08T09:09:28 | 54,314,984 | 0 | 0 | null | 2017-06-08T09:09:29 | 2016-03-20T11:55:01 | Python | UTF-8 | Python | false | false | 4,544 | py |
from pylabs import q
from pylabs.inifile import IniFile
from pylabs.baseclasses.CommandWrapper import CommandWrapper
from pylabs.enumerators import AppStatusType
import time
class ScribedCommand(CommandWrapper):
"""
A basic ScribedCommandWrapper to start/stop/restart the Scribe server
"""
# def _getPidFile(self):
# return q.system.fs.joinPaths(q.dirs.pidDir, "scribed.pid")
def _getScribedBinary(self):
return q.system.fs.joinPaths(q.dirs.binDir,"scribed")
def _getScribeCTRLBinary(self):
return q.system.fs.joinPaths(q.dirs.binDir, "scribe_ctrl")
def _getDefaultConfigFile(self):
return q.system.fs.joinPaths(q.dirs.cfgDir, 'scribe_logserver.conf')
def _getPort(self):
serverIniFile = IniFile(q.system.fs.joinPaths(q.dirs.cfgDir, 'qconfig', 'logservermain.cfg'))
return serverIniFile.getValue('main', 'port')
def _getStatus(self, port):
#@todo: use the status coammand instead of the version command to get the server status once the status problem resolved
command = "%(SCRIBECTRLCommand)s version %(port)s" % {"SCRIBECTRLCommand":self._getScribeCTRLBinary(), "port":port}
exitCode, output = q.system.process.execute(command, dieOnNonZeroExitCode = False, outputToStdout = False)
#status command returns 2 if scribe is alive else returns 3 ?????
if exitCode :
return AppStatusType.HALTED
return AppStatusType.RUNNING
def start(self, configFile = None, timeout = 5):
"""
Start Scribe Server
@param configFile: configuration file for describing the different stores
@type configFile: string
"""
port = self._getPort()
if self._getStatus(port) == AppStatusType.RUNNING:
q.console.echo('Scribe Server on port %s already running'%port)
return
if not configFile:
configFile = self._getDefaultConfigFile()
q.logger.log('Starting scribe server with port %s using config file %s'%(port, configFile), 5)
command = "%(SCRIBEDCommand)s -p %(port)s -c %(configFile)s 2> %(scribeout)s&" % {"SCRIBEDCommand":self._getScribedBinary(), "port": port, "configFile":configFile, 'scribeout': q.system.fs.joinPaths(q.dirs.logDir, 'logserver.out')}
exitCode, output = q.system.process.execute(command, dieOnNonZeroExitCode = False, outputToStdout = True)
t = timeout
started = False
while t>0:
if q.system.process.checkProcess('bin/scribed') == 0:
started = True
break
t = t - 1
time.sleep(1)
if not started:
q.logger.log("Scribe could not be started in %d seconds" % timeout, 8)
raise RuntimeError("Scribe could not be started in %d seconds" % timeout)
q.logger.log('Scribe server on port %s and config file %s started Successfully'%(port, configFile), 3)
q.console.echo("Scribe started successfully.")
def stop(self):
"""
Stop Scribe Server
"""
port = self._getPort()
if self._getStatus(port) == AppStatusType.HALTED:
q.console.echo('Scribe Server on port %s is not running'%port)
return
command = "%(SCRIBECTRLCommand)s stop %(port)s" % {"SCRIBECTRLCommand":self._getScribeCTRLBinary(), "port":port}
exitCode, output = q.system.process.execute(command, dieOnNonZeroExitCode = False, outputToStdout = True)
if exitCode and output:
raise RuntimeError("Scribe could not be stopped. Reason: %s"%output)
q.console.echo("Scribe stopped successfully")
def restart(self):
"""
Restart Scribe Server
"""
self.stop()
self.start()
def getStatus(self):
"""
Check the live status of the scribe server
"""
return self._getStatus(self._getPort())
def getDetailedStatus(self):
"""
Used the status command to get detailed status of the scribe server
"""
command = "%(SCRIBECTRLCommand)s status %(port)s" % {"SCRIBECTRLCommand":self._getScribeCTRLBinary(), "port":self._getPort()}
exitCode, output = q.system.process.execute(command, dieOnNonZeroExitCode = False, outputToStdout = False)
#status command returns 2 if scribe is alive else returns 3 ?????
if exitCode == 3:
return AppStatusType.HALTED
return AppStatusType.RUNNING
| [
"devnull@localhost"
] | devnull@localhost |
370c92a901face3a9787726692e591f2249350aa | b5921afe6ea5cd8b3dcfc83147ab5893134a93d0 | /tl/plugs/timeline/klacht.py | 1834e62f30873b8c7ab9090c22771b0cad15963c | [
"LicenseRef-scancode-other-permissive"
] | permissive | techdragon/tl | aaeb46e18849c04ad436e0e786401621a4be82ee | 6aba8aeafbc92cabdfd7bec11964f7c3f9cb835d | refs/heads/master | 2021-01-17T16:13:18.636457 | 2012-11-02T10:08:10 | 2012-11-02T10:08:10 | 9,296,808 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,237 | py | # tl/plugs/timeline/klacht.py
#
#
""" het timeline klachten command. """
## tl imports
from tl.utils.name import stripname
from tl.lib.datadir import getdatadir
from tl.lib.persist import TimedPersist, PersistCollection
from tl.lib.commands import cmnds
from tl.lib.examples import examples
from tl.id import get_uid, get_id
## basic imports
import logging
import time
import os
## getklachtdir function
def getklachtdir(username):
return os.path.join(getdatadir(), "timeline", stripname(username), "klacht")
## Klacht class
class Klacht(TimedPersist):
def __init__(self, username, klacht, default={}, ddir=None, origin=None, *args, **kwargs):
TimedPersist.__init__(self, username, default=default, ddir=getklachtdir(username), *args, **kwargs)
self.data.klacht = self.data.klacht or klacht or "geen text gegeven"
self.data.username = self.data.username or username or "anon"
self.data.uid = self.data.uid or get_uid(username)
self.data.origin = self.data.origin or origin or get_id()
class Klachten(PersistCollection): pass
## complaint command
def handle_klacht(bot, event):
if not event.rest: event.reply("waar wil je over klagen?") ; return
k = Klacht(event.user.data.name, event.rest)
k.save()
event.reply("klacht is genoteerd op %s" % time.ctime(k.data.created))
cmnds.add("klacht", handle_klacht, ["OPER", "USER", "GUEST"])
examples.add(
"klacht",
"met het klacht commando kan je laten registeren wat je intiept, een overzicht kan je krijgen door het klachten commando te geven",
"klacht die GGZ NHN is maar een rukkerig zooitje proviteurs die betaalt krijgen om nee te zeggen"
)
def handle_klachten(bot, event):
klachten = Klachten(getklachtdir(event.user.data.name))
result = []
for k in klachten.dosort(): result.append("%s - %s" % (k.data.klacht, time.ctime(k.data.created)))
if result: event.reply("klachten van %s: " % event.user.data.name, result, dot="indent", nosort=True)
else: event.reply("ik kan nog geen klachten vinden voor %s" % event.uid)
cmnds.add("klachten", handle_klachten, ["OPER", "USER", "GUEST"])
examples.add("klachten", "laat alle klachten zien", "klachten")
| [
"[email protected]"
] | |
4a1a45cb117ab58f4f8273b78fa8fba8fabe3cd5 | 2ffdd45472fc20497123bffc3c9b94d9fe8c9bc8 | /venv/Lib/site-packages/pip/_internal/network/auth.py | db016f2af2fd5d56a211d566cafa9d00e4118b0b | [] | no_license | mbea-int/expense-tracker-app | fca02a45623e24ed20d201f69c9a892161141e0c | 47db2c98ed93efcac5330ced2b98d2ca365e6017 | refs/heads/master | 2023-05-10T14:29:04.935218 | 2021-06-04T15:10:00 | 2021-06-04T15:10:00 | 373,816,157 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,512 | py | """Network Authentication Helpers
Contains interface (MultiDomainBasicAuth) and associated glue code for
providing credentials in the context of network requests.
"""
import logging
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.utils import get_netrc_auth
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._internal.utils.misc import (
ask,
ask_input,
ask_password,
remove_auth_from_url,
split_auth_netloc_from_url,
)
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Dict, Optional, Tuple, List, Any
from pip._internal.vcs.versioncontrol import AuthInfo
from pip._vendor.requests.models import Response, Request
Credentials = Tuple[str, str, str]
logger = logging.getLogger(__name__)
try:
import keyring # noqa
except ImportError:
keyring = None
except Exception as exc:
logger.warning("Keyring is skipped due to an exception: %s", str(exc))
keyring = None
def get_keyring_auth(url, username):
# type: (str, str) -> Optional[AuthInfo]
"""Return the tuple auth for a given url from keyring."""
if not url or not keyring:
return None
try:
try:
get_credential = keyring.get_credential
except AttributeError:
pass
else:
logger.debug("Getting credentials from keyring for %s", url)
cred = get_credential(url, username)
if cred is not None:
return cred.username, cred.password
return None
if username:
logger.debug("Getting password from keyring for %s", url)
password = keyring.get_password(url, username)
if password:
return username, password
except Exception as exc:
logger.warning("Keyring is skipped due to an exception: %s", str(exc))
return None
class MultiDomainBasicAuth(AuthBase):
def __init__(self, prompting=True, index_urls=None):
# type: (bool, Optional[List[str]]) -> None
self.prompting = prompting
self.index_urls = index_urls
self.passwords = {} # type: Dict[str, AuthInfo]
# When the user is prompted to enter credentials and keyring is
# available, we will offer to save them. If the user accepts,
# this value is set to the credentials they entered. After the
# request authenticates, the caller should call
# ``save_credentials`` to save these.
self._credentials_to_save = None # type: Optional[Credentials]
def _get_index_url(self, url):
# type: (str) -> Optional[str]
"""Return the original index URL matching the requested URL.
Cached or dynamically generated credentials may work against
the original index URL rather than just the netloc.
The provided url should have had its username and password
removed already. If the original index url had credentials then
they will be included in the return value.
Returns None if no matching index was found, or if --no-index
was specified by the user.
"""
if not url or not self.index_urls:
return None
for u in self.index_urls:
prefix = remove_auth_from_url(u).rstrip("/") + "/"
if url.startswith(prefix):
return u
return None
def _get_new_credentials(self, original_url, allow_netrc=True, allow_keyring=True):
# type: (str, bool, bool) -> AuthInfo
"""Find and return credentials for the specified URL."""
# Split the credentials and netloc from the url.
url, netloc, url_user_password = split_auth_netloc_from_url(original_url)
# Start with the credentials embedded in the url
username, password = url_user_password
if username is not None and password is not None:
logger.debug("Found credentials in url for %s", netloc)
return url_user_password
# Find a matching index url for this request
index_url = self._get_index_url(url)
if index_url:
# Split the credentials from the url.
index_info = split_auth_netloc_from_url(index_url)
if index_info:
index_url, _, index_url_user_password = index_info
logger.debug("Found index url %s", index_url)
# If an index URL was found, try its embedded credentials
if index_url and index_url_user_password[0] is not None:
username, password = index_url_user_password
if username is not None and password is not None:
logger.debug("Found credentials in index url for %s", netloc)
return index_url_user_password
# Get creds from netrc if we still don't have them
if allow_netrc:
netrc_auth = get_netrc_auth(original_url)
if netrc_auth:
logger.debug("Found credentials in netrc for %s", netloc)
return netrc_auth
# If we don't have a password and keyring is available, use it.
if allow_keyring:
# The index url is more specific than the netloc, so try it first
kr_auth = get_keyring_auth(index_url, username) or get_keyring_auth(
netloc, username
)
if kr_auth:
logger.debug("Found credentials in keyring for %s", netloc)
return kr_auth
return username, password
def _get_url_and_credentials(self, original_url):
# type: (str) -> Tuple[str, Optional[str], Optional[str]]
"""Return the credentials to use for the provided URL.
If allowed, netrc and keyring may be used to obtain the
correct credentials.
Returns (url_without_credentials, username, password). Note
that even if the original URL contains credentials, this
function may return a different username and password.
"""
url, netloc, _ = split_auth_netloc_from_url(original_url)
# Use any stored credentials that we have for this netloc
username, password = self.passwords.get(netloc, (None, None))
if username is None and password is None:
# No stored credentials. Acquire new credentials without prompting
# the user. (e.g. from netrc, keyring, or the URL itself)
username, password = self._get_new_credentials(original_url)
if username is not None or password is not None:
# Convert the username and password if they're None, so that
# this netloc will show up as "cached" in the conditional above.
# Further, HTTPBasicAuth doesn't accept None, so it makes sense to
# cache the value that is going to be used.
username = username or ""
password = password or ""
# Store any acquired credentials.
self.passwords[netloc] = (username, password)
assert (
# Credentials were found
(username is not None and password is not None)
or
# Credentials were not found
(username is None and password is None)
), "Could not load credentials from url: {}".format(original_url)
return url, username, password
def __call__(self, req):
# type: (Request) -> Request
# Get credentials for this request
url, username, password = self._get_url_and_credentials(req.url)
# Set the url of the request to the url without any credentials
req.url = url
if username is not None and password is not None:
# Send the basic auth with this request
req = HTTPBasicAuth(username, password)(req)
# Attach a hook to handle 401 responses
req.register_hook("response", self.handle_401)
return req
# Factored out to allow for easy patching in tests
def _prompt_for_password(self, netloc):
# type: (str) -> Tuple[Optional[str], Optional[str], bool]
username = ask_input("User for {}: ".format(netloc))
if not username:
return None, None, False
auth = get_keyring_auth(netloc, username)
if auth and auth[0] is not None and auth[1] is not None:
return auth[0], auth[1], False
password = ask_password("Password: ")
return username, password, True
# Factored out to allow for easy patching in tests
def _should_save_password_to_keyring(self):
# type: () -> bool
if not keyring:
return False
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
def handle_401(self, resp, **kwargs):
# type: (Response, **Any) -> Response
# We only care about 401 responses, anything else we want to just
# pass through the actual response
if resp.status_code != 401:
return resp
# We are not able to prompt the user so simply return the response
if not self.prompting:
return resp
parsed = urllib_parse.urlparse(resp.url)
# Prompt the user for a new username and password
username, password, save = self._prompt_for_password(parsed.netloc)
# Store the new username and password to use for future requests
self._credentials_to_save = None
if username is not None and password is not None:
self.passwords[parsed.netloc] = (username, password)
# Prompt to save the password to keyring
if save and self._should_save_password_to_keyring():
self._credentials_to_save = (parsed.netloc, username, password)
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
resp.raw.release_conn()
# Add our new username and password to the request
req = HTTPBasicAuth(username or "", password or "")(resp.request)
req.register_hook("response", self.warn_on_401)
# On successful request, save the credentials that were used to
# keyring. (Note that if the user responded "no" above, this member
# is not set and nothing will be saved.)
if self._credentials_to_save:
req.register_hook("response", self.save_credentials)
# Send our new request
new_resp = resp.connection.send(req, **kwargs)
new_resp.history.append(resp)
return new_resp
def warn_on_401(self, resp, **kwargs):
# type: (Response, **Any) -> None
"""Response callback to warn about incorrect credentials."""
if resp.status_code == 401:
logger.warning(
"401 Error, Credentials not correct for %s", resp.request.url
)
def save_credentials(self, resp, **kwargs):
# type: (Response, **Any) -> None
"""Response callback to save credentials on success."""
assert keyring is not None, "should never reach here without keyring"
if not keyring:
return
creds = self._credentials_to_save
self._credentials_to_save = None
if creds and resp.status_code < 400:
try:
logger.info("Saving credentials to keyring")
keyring.set_password(*creds)
except Exception:
logger.exception("Failed to save credentials")
| [
"[email protected]"
] | |
c830aa73b9ef83c3a14fb5861a563fa13de4758f | f2a5311fdca8d71535565e1ec3fc2b79e55ab7aa | /main2d.py | c35349151b55013dad39d01b3ed3cb0a83619153 | [] | no_license | xzxzmmnn/pytorch-convcnp | 59bd9036cea88479862a622749408fd73f9d132b | 31340d5cf4b537a240075f93c8d4aff6f10d8931 | refs/heads/master | 2020-12-28T11:53:13.337832 | 2020-01-01T07:58:27 | 2020-01-01T07:58:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,307 | py | import argparse
import torch
import torch.optim as optim
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
import torchvision.transforms as tf
from torchvision.datasets import MNIST, CIFAR10
from fastprogress import master_bar, progress_bar
from convcnp import ConvCNP2d, channel_last
from visualize import plot_all_2d, convert_tfboard
def train(model, dataloader, optimizer):
model.train()
avg_loss = 0
for index, (I, _) in enumerate(progress_bar(dataloader, parent=args.mb)):
I = I.to(args.device)
optimizer.zero_grad()
pred_dist = model(I)
loss = - pred_dist.log_prob(channel_last((I))).sum(-1).mean()
loss.backward()
optimizer.step()
avg_loss -= loss.item() * I.size(0)
if index % 10 == 0:
args.mb.child.comment = 'loss={:.3f}'.format(loss.item())
return avg_loss / len(dataloader.dataset)
def validate(model, dataloader):
model.eval()
I, _ = iter(dataloader).next()
I = I.to(args.device)
with torch.no_grad():
Mc, f, dist = model.complete(I)
likelihood = dist.log_prob(channel_last(I)).sum(-1).mean()
rmse = (I - f).pow(2).mean()
image = plot_all_2d(I, Mc, f)
image = convert_tfboard(image)
return likelihood, rmse, image
def main():
if args.dataset == 'mnist':
trainset = MNIST('~/data/mnist', train=True, transform=tf.ToTensor())
testset = MNIST('~/data/mnist', train=False, transform=tf.ToTensor())
cnp = ConvCNP2d(channel=1)
elif args.dataset == 'cifar10':
trainset = CIFAR10('~/data/cifar10', train=True, transform=tf.ToTensor())
testset = CIFAR10('~/data/cifar10', train=False, transform=tf.ToTensor())
cnp = ConvCNP2d(channel=3)
trainloader = DataLoader(trainset, batch_size=args.batch_size, shuffle=True, num_workers=8)
testloader = DataLoader(testset, batch_size=16, shuffle=True)
cnp = cnp.to(args.device)
optimizer = optim.Adam(cnp.parameters(), lr=args.learning_rate)
args.mb = master_bar(range(1, args.epochs + 1))
for epoch in args.mb:
avg_train_loss = train(cnp, trainloader, optimizer)
valid_ll, rmse, image = validate(cnp, testloader)
args.writer.add_scalar('train/likelihood', avg_train_loss, epoch)
args.writer.add_scalar('validate/likelihood', valid_ll, epoch)
args.writer.add_scalar('validate/rmse', rmse, epoch)
args.writer.add_image('validate/image', image, epoch)
torch.save(cnp.state_dict(), filename)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--batch-size', '-B', type=int, default=16)
parser.add_argument('--learning-rate', '-LR', type=float, default=5e-4)
parser.add_argument('--epochs', '-E', type=int, default=100)
parser.add_argument('--dataset', '-D', type=str, default='mnist', choices=['mnist', 'cifar10'])
parser.add_argument('--logging', default=False, action='store_true')
args = parser.parse_args()
filename = 'convcnp2d_{}.pth.gz'.format(args.dataset)
if torch.cuda.is_available():
args.device = torch.device('cuda')
else:
args.device = torch.device('cpu')
args.writer = SummaryWriter()
main()
args.writer.close() | [
"[email protected]"
] | |
74ecd4a6a16f90612dffbb77095aa1099cf71add | 033b29b6b1538d10e060e5734a1d7488a3fa03b4 | /attic/objects/cards.py | f7891263e43957b288de677edd09d2b095312859 | [
"MIT"
] | permissive | yuechuanx/fluent-python-code-and-notes | f99967416abc9c46be50d95f822b2ef3609f2d2d | 2ae19fff8e1d292c6e8d163c99ca63e07259499c | refs/heads/master | 2023-08-09T22:14:22.985987 | 2022-08-28T09:06:32 | 2022-08-28T09:06:32 | 229,009,764 | 2 | 0 | MIT | 2023-07-20T15:11:59 | 2019-12-19T08:30:28 | Jupyter Notebook | UTF-8 | Python | false | false | 1,422 | py | """
Spadille is the nickname for the Ace of Spades in some games
(see `Webster 1913`_)
>>> beer_card = Card('7', Suite.diamonds)
>>> beer_card
Card('7', Suite.diamonds)
>>> spadille = Card('A', Suite.spades, long_rank='Ace')
>>> spadille
Card('A', Suite.spades)
>>> print(spadille)
Ace of spades
>>> bytes(spadille)
b'A\\x01'
>>> charles = Card('K', Suite.hearts)
>>> bytes(charles)
b'K\\x04'
>>> big_cassino = Card('10', Suite.diamonds)
>>> bytes(big_cassino)
b'T\\x02'
__ http://machaut.uchicago.edu/cgi-bin/WEBSTER.sh?WORD=spadille
"""
from enum import Enum
Suite = Enum('Suite', 'spades diamonds clubs hearts')
class Card:
def __init__(self, rank, suite, *, long_rank=None):
self.rank = rank
if long_rank is None:
self.long_rank = self.rank
else:
self.long_rank = long_rank
self.suite = suite
def __str__(self):
return '{long_rank} of {suite.name}'.format(**self.__dict__)
def __repr__(self):
constructor = '{cls.__name__}({args})'
args = '{0.rank!r}, Suite.{0.suite.name}'.format(self)
return constructor.format(cls=self.__class__, args=args)
def __bytes__(self):
if self.rank == '10':
rank_byte = b'T'
else:
rank_byte = bytes([ord(self.rank)])
return rank_byte + bytes([self.suite.value])
| [
"[email protected]"
] | |
690d6bd978706e9964ef58092234dfc22adba345 | 866dee1b3d01b863c31332ec81330d1b5ef5c6fa | /openquake.hazardlib/openquake/hazardlib/gsim/somerville_2009.py | 3b4ea58c733f11df1149419112d9a6f14b6fa79c | [
"MIT",
"AGPL-3.0-only"
] | permissive | rainzhop/ConvNetQuake | 3e2e1a040952bd5d6346905b83f39889c6a2e51a | a3e6de3f7992eac72f1b9883fec36b8c7fdefd48 | refs/heads/master | 2020-08-07T16:41:03.778293 | 2019-11-01T01:49:00 | 2019-11-01T01:49:00 | 213,527,701 | 0 | 0 | MIT | 2019-10-08T02:08:00 | 2019-10-08T02:08:00 | null | UTF-8 | Python | false | false | 9,716 | py | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2013-2016 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Module exports :class:`SomervilleEtAl2009NonCratonic`,
:class:`SomervilleEtAl2009YilgarnCraton`
"""
from __future__ import division
import numpy as np
from openquake.hazardlib.gsim.base import GMPE, CoeffsTable
from openquake.hazardlib import const
from openquake.hazardlib.imt import PGA, PGV, SA
class SomervilleEtAl2009NonCratonic(GMPE):
"""
Implements GMPE developed by P. Somerville, R. Graves, N. Collins, S. G.
Song, S. Ni, and P. Cummins for Non-Cratonic Australia published in "Source
and Ground Motion Models for Australian Earthquakes", Report to Geoscience
Australia (2009). Document available at:
http://www.ga.gov.au/cedda/publications/193?yp=2009
"""
#: The supported tectonic region type is stable continental region
DEFINED_FOR_TECTONIC_REGION_TYPE = const.TRT.STABLE_CONTINENTAL
#: The supported intensity measure types are PGA, PGV, and SA, see table
#: 3
DEFINED_FOR_INTENSITY_MEASURE_TYPES = set([
PGA,
PGV,
SA
])
#: The supported intensity measure component is set to 'average
#: horizontal', however the original paper does not report this information
DEFINED_FOR_INTENSITY_MEASURE_COMPONENT = const.IMC.AVERAGE_HORIZONTAL
#: The supported standard deviations is total, see tables 3
DEFINED_FOR_STANDARD_DEVIATION_TYPES = set([
const.StdDev.TOTAL
])
#: no site parameters are defined, the GMPE is calibrated for Vs30 = 865
#: m/s
REQUIRES_SITES_PARAMETERS = set()
#: The required rupture parameter is magnitude, see table 2
REQUIRES_RUPTURE_PARAMETERS = set(('mag', ))
#: The required distance parameter is 'Joyner-Boore' distance, see table 2
REQUIRES_DISTANCES = set(('rjb', ))
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
Implement equations as defined in table 2.
"""
assert all(stddev_type in self.DEFINED_FOR_STANDARD_DEVIATION_TYPES
for stddev_type in stddev_types)
C = self.COEFFS[imt]
mean = self._compute_mean(C, rup.mag, dists.rjb)
stddevs = self._get_stddevs(C, stddev_types, dists.rjb.shape[0])
return mean, stddevs
def _compute_mean(self, C, mag, rjb):
"""
Compute mean value, see table 2.
"""
m1 = 6.4
r1 = 50.
h = 6.
R = np.sqrt(rjb ** 2 + h ** 2)
R1 = np.sqrt(r1 ** 2 + h ** 2)
less_r1 = rjb < r1
ge_r1 = rjb >= r1
mean = (C['c1'] + C['c4'] * (mag - m1) * np.log(R) + C['c5'] * rjb +
C['c8'] * (8.5 - mag) ** 2)
mean[less_r1] += C['c3'] * np.log(R[less_r1])
mean[ge_r1] += (C['c3'] * np.log(R1) +
C['c6'] * (np.log(R[ge_r1]) - np.log(R1)))
if mag < m1:
mean += C['c2'] * (mag - m1)
else:
mean += C['c7'] * (mag - m1)
return mean
def _get_stddevs(self, C, stddev_types, num_sites):
"""
Return total standard deviation.
"""
stddevs = []
for _ in stddev_types:
stddevs.append(np.zeros(num_sites) + C['sigma'])
return stddevs
#: Coefficients taken from table 3
COEFFS = CoeffsTable(sa_damping=5, table="""\
IMT c1 c2 c3 c4 c5 c6 c7 c8 sigma
pgv 5.07090 0.52780 -0.85740 0.17700 -0.00501 -0.61190 0.80660 -0.03800 0.6417
pga 1.03780 -0.03970 -0.79430 0.14450 -0.00618 -0.72540 -0.03590 -0.09730 0.5685
0.010 1.05360 -0.04190 -0.79390 0.14450 -0.00619 -0.72660 -0.03940 -0.09740 0.5684
0.020 1.05680 -0.03920 -0.79680 0.14550 -0.00617 -0.73230 -0.03930 -0.09600 0.5684
0.030 1.13530 -0.04790 -0.80920 0.15000 -0.00610 -0.76410 -0.05710 -0.09210 0.5681
0.040 1.30000 -0.07020 -0.83150 0.15920 -0.00599 -0.82850 -0.09810 -0.08530 0.5676
0.050 1.47680 -0.09310 -0.83330 0.15600 -0.00606 -0.86740 -0.12740 -0.09130 0.5670
0.075 1.70220 -0.05160 -0.80720 0.14560 -0.00655 -0.87690 -0.10970 -0.08690 0.5663
0.100 1.65720 0.15080 -0.77590 0.13100 -0.00708 -0.77830 0.01690 -0.05980 0.5659
0.150 1.94440 -0.09620 -0.75000 0.11670 -0.00698 -0.69490 -0.13320 -0.12530 0.5659
0.200 1.82720 -0.06230 -0.73430 0.11940 -0.00677 -0.64380 -0.09570 -0.11920 0.5669
0.250 1.74380 -0.02530 -0.72480 0.11950 -0.00646 -0.63740 -0.06250 -0.11650 0.5678
0.3003 1.80560 -0.27020 -0.73190 0.13490 -0.00606 -0.66440 -0.17470 -0.14340 0.5708
0.400 1.88750 -0.37820 -0.70580 0.09960 -0.00589 -0.58770 -0.24420 -0.21890 0.5697
0.500 2.03760 -0.79590 -0.69730 0.11470 -0.00565 -0.59990 -0.48670 -0.29690 0.5739
0.750 1.93060 -0.80280 -0.74510 0.11220 -0.00503 -0.59460 -0.50120 -0.34990 0.5876
1.000 1.60380 -0.47800 -0.86950 0.07320 -0.00569 -0.41590 0.06360 -0.33730 0.6269
1.4993 0.47740 0.90960 -1.02440 0.11060 -0.00652 -0.19000 1.09610 -0.10660 0.7517
2.000 -0.25810 1.37770 -1.01000 0.10310 -0.00539 -0.27340 1.50330 -0.04530 0.8036
3.0003 -0.96360 1.14690 -0.88530 0.10380 -0.00478 -0.40420 1.54130 -0.11020 0.8219
4.000 -1.46140 1.07950 -0.80490 0.10960 -0.00395 -0.46040 1.41960 -0.14700 0.8212
5.000 -1.61160 0.74860 -0.78100 0.09650 -0.00307 -0.46490 1.24090 -0.22170 0.8240
7.5019 -2.35310 0.35190 -0.64340 0.09590 -0.00138 -0.68260 0.92880 -0.31230 0.7957
10.000 -3.26140 0.69730 -0.62760 0.12920 -0.00155 -0.61980 1.01050 -0.24550 0.7602
""")
class SomervilleEtAl2009YilgarnCraton(SomervilleEtAl2009NonCratonic):
"""
Implements GMPE developed by P. Somerville, R. Graves, N. Collins, S. G.
Song, S. Ni, and P. Cummins for Yilgarn Craton published in "Source
and Ground Motion Models for Australian Earthquakes", Report to Geoscience
Australia (2009). Document available at:
http://www.ga.gov.au/cedda/publications/193?yp=2009
Extends
:class:`openquake.hazardlib.gsim.somerville_2009.SomervilleEtAl2009NonCratonic`
because the same functional form is used, only the coefficents differ.
"""
#: Coefficients taken from table 4
COEFFS = CoeffsTable(sa_damping=5, table="""\
IMT c1 c2 c3 c4 c5 c6 c7 c8 sigma
pgv 5.23440 1.58530 -1.01540 0.21400 -0.00341 -0.91610 1.12980 0.14810 0.6606
pga 1.54560 1.45650 -1.11510 0.16640 -0.00567 -1.04900 1.05530 0.20000 0.5513
0.010 1.55510 1.46380 -1.11460 0.16620 -0.00568 -1.04840 1.05850 0.20140 0.5512
0.020 2.33800 1.38060 -1.22970 0.18010 -0.00467 -1.39850 0.95990 0.20130 0.5510
0.030 2.48090 1.37540 -1.17620 0.17120 -0.00542 -1.38720 0.96930 0.19280 0.5508
0.040 2.31450 1.60250 -1.12600 0.17150 -0.00629 -1.27910 1.07040 0.23560 0.5509
0.050 2.26860 1.55840 -1.07340 0.14710 -0.00709 -1.08910 1.10750 0.20670 0.5510
0.075 1.97070 1.68030 -1.01540 0.14560 -0.00737 -0.91930 1.18290 0.22170 0.5514
0.100 1.71030 1.75070 -0.99330 0.13820 -0.00746 -0.78140 1.29390 0.23790 0.5529
0.150 1.52310 1.69160 -0.96310 0.13330 -0.00713 -0.67330 1.22430 0.21020 0.5544
0.200 1.36830 1.57940 -0.94720 0.13640 -0.00677 -0.62690 1.17760 0.18950 0.5558
0.250 1.40180 1.28940 -0.94410 0.14360 -0.00617 -0.67070 1.05610 0.14590 0.5583
0.3003 1.45000 1.04630 -0.94880 0.14760 -0.00581 -0.68700 0.94040 0.11040 0.5602
0.400 1.44150 0.92820 -0.91830 0.11320 -0.00576 -0.59520 0.86280 0.04060 0.5614
0.500 1.40380 0.69160 -0.91010 0.13480 -0.00557 -0.62390 0.71230 0.00620 0.5636
0.750 1.50840 0.75800 -0.99010 0.11260 -0.00458 -0.69040 0.68590 -0.05630 0.5878
1.000 2.10630 0.38180 -1.08680 0.07950 -0.00406 -0.90340 0.61850 -0.18250 0.6817
1.4993 2.55790 -0.84270 -0.81810 0.07650 -0.00220 -1.35320 -0.25440 -0.46660 0.8514
2.000 2.39600 -1.39950 -0.70440 0.06770 -0.00366 -0.90860 -0.64320 -0.59600 0.8646
3.0003 0.96040 -0.46120 -0.70450 0.06450 -0.00429 -0.51190 -0.16430 -0.46310 0.8424
4.000 0.12190 -0.06980 -0.75910 0.08490 -0.00374 -0.41450 0.12350 -0.39250 0.8225
5.000 -0.84240 0.53160 -0.79600 0.10330 -0.00180 -0.62130 0.53680 -0.27570 0.8088
7.5019 -1.92260 0.63760 -0.81900 0.14550 -0.00066 -0.75740 0.69020 -0.23290 0.7808
10.000 -2.60330 0.59060 -0.80940 0.16090 -0.00106 -0.68550 0.70350 -0.22910 0.7624
""")
| [
"[email protected]"
] | |
8e227b0301c820f7de88c13b8aa9fea0299ee35c | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /lex-models_read_1/intent-version_get.py | 3042cea737aa3ed08aa65f3330a3dc1253199319 | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,430 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import execute_one_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/lex-models/get-intent-versions.html
if __name__ == '__main__':
"""
create-intent-version : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/lex-models/create-intent-version.html
delete-intent-version : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/lex-models/delete-intent-version.html
"""
parameter_display_string = """
# name : The name of the intent for which versions should be returned.
"""
add_option_dict = {}
#######################################################################
# setting option use
# ex: add_option_dict["setting_matching_parameter"] = "--owners"
# ex: add_option_dict["setting_key"] = "owner_id"
#######################################################################
# single parameter
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
#######################################################################
# parameter display string
add_option_dict["parameter_display_string"] = parameter_display_string
execute_one_parameter("lex-models", "get-intent-versions", "name", add_option_dict) | [
"[email protected]"
] | |
f6e55ac4950c328a97974e006332ebc0b2d7779d | 9a4de72aab094c87cfee62380e7f2613545eecfb | /monitor/permissions.py | 4a5ca730834d35cef0d5a306a239cd832e172907 | [] | no_license | jamesduan/asset | ed75765c30a5288aaf4f6c56bbf2c9a059105f29 | f71cb623b5ba376309cb728ad5c291ced2ee8bfc | refs/heads/master | 2021-01-10T00:06:41.120678 | 2017-05-27T11:40:48 | 2017-05-27T11:40:48 | 92,730,581 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 473 | py | from rest_framework import permissions
from assetv2.settingsapi import GROUP_ID
SAFE_METHOD = ['GET']
class EventPermission(permissions.BasePermission):
def has_permission(self, request, view):
return True
class AlarmPermission(permissions.BasePermission):
def has_permission(self, request, view):
if request.user is None:
return False
if request.user.is_superuser or request.method in SAFE_METHOD:
return True
| [
"[email protected]"
] | |
93017e77935b215f3fe77019853de5c8710fb5da | 5ab0a217ac64a4e73d7ccff834a73eecdae323c5 | /chps3-5/5.09.2.py | 07db151d59c6c3f2ae3361eb9a29d95d73b0929d | [] | no_license | bolducp/My-Think-Python-Solutions | d90ea5c485e418f4a6b547fdd9c1d4c8adfe72b9 | 6c411af5a46ee167b8e4a3449aa4b18705bf1df5 | refs/heads/master | 2021-01-22T05:19:53.987082 | 2015-09-15T15:53:31 | 2015-09-15T15:53:31 | 41,259,726 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | def print_it():
print "hello"
def do_n(a_function, n):
if n <= 0:
return
a_function()
do_n(a_function, n-1)
do_n(print_it, 10) | [
"[email protected]"
] | |
5870f222472143b75927e57686601703315c9e5f | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/3/ht8.py | 395581a5a9a0c57ec6f9e5fe99c8d47a05db9215 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'hT8':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
d8bbd86b7eb2a6e33c42f85a09768c3508e5f1d1 | b8ca7b39498b371405b1036dc58d18ab260a3bdc | /app/core/aries_rfcs/features/feature_0036_issue_credential/feature.py | e6a5f5619f49ecac5e242412fb189cbd29696296 | [] | no_license | Sirius-social/Indy-Agent | 73cbb9126648a8e7d97e60fd7add3f7f72c25b1f | 8e26d824c74d7e41739ac16acacf4b12181cfea1 | refs/heads/master | 2022-12-10T18:33:15.324450 | 2021-09-19T22:39:21 | 2021-09-19T22:39:21 | 197,164,640 | 0 | 1 | null | 2022-12-08T05:53:33 | 2019-07-16T09:36:58 | Rust | UTF-8 | Python | false | false | 43,576 | py | import json
import uuid
import logging
import base64
from collections import UserDict
from typing import List
from django.conf import settings
from django.utils.timezone import timedelta, now
import core.indy_sdk_utils as indy_sdk_utils
import core.codec
import core.const
from core.models import update_cred_def_meta, update_issuer_schema
from core.base import WireMessageFeature, FeatureMeta, EndpointTransport, WriteOnlyChannel
from core.messages.message import Message
from core.messages.errors import ValidationException as MessageValidationException
from core.serializer.json_serializer import JSONSerializer as Serializer
from core.wallet import WalletAgent, InvokableStateMachineMeta, WalletConnection
from state_machines.base import BaseStateMachine, MachineIsDone
from core.wallet import WalletOperationError
from core.aries_rfcs.features.feature_0015_acks.feature import AckMessage
from transport.const import WIRED_CONTENT_TYPES
from .statuses import *
from .errors import *
class ProposedAttrib(UserDict):
def __init__(self, name: str, value: str, mime_type: str=None, **kwargs):
super().__init__()
self.data['name'] = name
if mime_type:
self.data['mime-type'] = mime_type
self.data['value'] = value
def to_json(self):
return self.data
class AttribTranslation(UserDict):
def __init__(self, attrib_name: str, translation: str, **kwargs):
super().__init__()
self.data['attrib_name'] = attrib_name
self.data['translation'] = translation
def to_json(self):
return self.data
class Context:
def __init__(self):
self.my_did = None
self.their_did = None
self.my_ver_key = None
self.their_verkey = None
self.their_endpoint = None
self.routing_keys = None
class IssueCredentialProtocol(WireMessageFeature, metaclass=FeatureMeta):
"""https://github.com/hyperledger/aries-rfcs/tree/master/features/0036-issue-credential"""
DEF_LOCALE = 'en'
FAMILY_NAME = "issue-credential"
VERSION = "1.1"
FAMILY = "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/" + FAMILY_NAME + "/" + VERSION
"""Messages"""
# potential Holder to Issuer (optional). Tells what the Holder hopes to receive.
PROPOSE_CREDENTIAL = FAMILY + "/propose-credential"
# Issuer to potential Holder (optional for some credential implementations; required for Hyperledger Indy).
# Tells what the Issuer intends to issue, and possibly, the price the Issuer expects to be paid.
OFFER_CREDENTIAL = FAMILY + "/offer-credential"
# Potential Holder to Issuer. If neither of the previous message types is used,
# this is the message that begins the protocol.
REQUEST_CREDENTIAL = FAMILY + "/request-credential"
# Issuer to new Holder. Attachment payload contains the actual credential.
ISSUE_CREDENTIAL = FAMILY + "/issue-credential"
# Problem reports
PROBLEM_REPORT = FAMILY + "/problem_report"
# Ack
CREDENTIAL_ACK = FAMILY + "/ack"
CREDENTIAL_PREVIEW_TYPE = "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/issue-credential/1.0/credential-preview"
CREDENTIAL_TRANSLATION_TYPE = "https://github.com/Sirius-social/agent/tree/master/messages/credential-translation"
ISSUER_SCHEMA_TYPE = "https://github.com/Sirius-social/agent/tree/master/messages/issuer-schema"
CREDENTIAL_TRANSLATION_ID = "credential-translation"
ISSUER_SCHEMA_ID = "issuer-schema"
# Problem reports
PROPOSE_NOT_ACCEPTED = "propose_not_accepted"
OFFER_PROCESSING_ERROR = 'offer_processing_error'
REQUEST_NOT_ACCEPTED = "request_not_accepted"
ISSUE_PROCESSING_ERROR = 'issue_processing_error'
RESPONSE_FOR_UNKNOWN_REQUEST = "response_for_unknown_request"
# internal usage definitions
MESSAGE_CONTENT_TYPE = 'application/json'
WIRED_CONTENT_TYPE = WIRED_CONTENT_TYPES[0]
CMD_START = 'start'
CMD_STOP = 'stop'
STATE_MACHINE_TTL = 60 # 60 sec
@classmethod
async def handle(cls, agent_name: str, wire_message: bytes, my_label: str = None, my_endpoint: str = None) -> bool:
unpacked = await WalletAgent.unpack_message(agent_name, wire_message)
kwargs = json.loads(unpacked['message'])
message = Message(**kwargs)
if message.get('@type', None) is None:
return False
if not cls.endorsement(message):
return False
for protocol_version in ["1.1", "1.0"]:
type_issue_credential = cls.set_protocol_version(cls.ISSUE_CREDENTIAL, protocol_version)
type_offer_credential = cls.set_protocol_version(cls.OFFER_CREDENTIAL, protocol_version)
type_request_credential = cls.set_protocol_version(cls.REQUEST_CREDENTIAL, protocol_version)
type_credential_ack = cls.set_protocol_version(cls.CREDENTIAL_ACK, protocol_version)
state_machine_id = cls.get_state_machine_id(unpacked['sender_verkey'])
if message.type in [type_issue_credential, type_offer_credential]:
machine_class = IssueCredentialProtocol.HolderSateMachine
if message.type == type_offer_credential:
await WalletAgent.start_state_machine(
status=IssueCredentialStatus.Null, ttl=IssueCredentialProtocol.STATE_MACHINE_TTL,
agent_name=agent_name, machine_class=machine_class, machine_id=state_machine_id,
protocol_version=protocol_version
)
await WalletAgent.invoke_state_machine(
agent_name=agent_name, id_=state_machine_id,
content_type=cls.WIRED_CONTENT_TYPE, data=wire_message
)
return True
elif message.type in [type_request_credential, AckMessage.ACK, type_credential_ack]:
await WalletAgent.invoke_state_machine(
agent_name=agent_name, id_=state_machine_id,
content_type=cls.WIRED_CONTENT_TYPE, data=wire_message
)
return True
return False
@staticmethod
def get_state_machine_id(key: str):
return 'issue=cred:' + key
@staticmethod
def set_protocol_version(msg_type: str, version: str):
parts = msg_type.split('/')
if len(parts) < 4:
raise RuntimeError('Unexpected message type structure "%s"' % msg_type)
parts[2] = version
return '/'.join(parts)
@classmethod
def endorsement(cls, msg: Message) -> bool:
if msg.type == AckMessage.ACK:
return True
family_prefix = "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/" + cls.FAMILY_NAME
for version in ["1.1", "1.0"]:
family = family_prefix + "/" + version
if family in msg.type:
return True
return False
@classmethod
def build_problem_report_for_connections(cls, problem_code, problem_str, thread_id: str = None) -> Message:
initialized = {
"@type": "{}/problem_report".format(cls.FAMILY),
"problem-code": problem_code,
"explain": problem_str
}
if thread_id:
initialized['~thread'] = {Message.THREAD_ID: thread_id, Message.SENDER_ORDER: 0}
return Message(initialized)
@staticmethod
async def send_problem_report(
wallet: WalletConnection, problem_code: str, problem_str: str, context: Context, thread_id: str=None
):
err_msg = IssueCredentialProtocol.build_problem_report_for_connections(
problem_code,
problem_str,
thread_id
)
try:
wire_message = await wallet.pack_message(
Serializer.serialize(err_msg).decode('utf-8'),
context.their_verkey,
context.my_ver_key
)
except Exception as e:
logging.exception(str(e))
raise
else:
transport = EndpointTransport(address=context.their_endpoint)
await transport.send_wire_message(wire_message)
return err_msg
@classmethod
async def validate_common_message_blocks(cls, msg: Message, problem_code: str, context: Context):
try:
msg.validate_common_blocks()
return True, None
except MessageValidationException as e:
logging.exception('Validation error while parsing message: %s' % msg.as_json())
if context.their_did:
err_msg = cls.build_problem_report_for_connections(
problem_code,
str(e.exception),
thread_id=msg.id
)
return False, err_msg
else:
return False, None
except Exception as e:
logging.exception('Validation error while parsing message: %s' % str(e))
return False, None
@classmethod
async def send_message_to_agent(cls, to_did: str, msg: Message, wallet: WalletConnection):
their_did = to_did
pairwise_info = await wallet.get_pairwise(their_did)
pairwise_meta = pairwise_info['metadata']
my_did = pairwise_info['my_did']
their_endpoint = pairwise_meta['their_endpoint']
their_vk = pairwise_meta['their_vk']
my_vk = await wallet.key_for_local_did(my_did)
await cls.send_message_to_endpoint_and_key(their_vk, their_endpoint, msg, wallet, my_vk)
@staticmethod
async def send_message_to_endpoint_and_key(their_ver_key: str, their_endpoint: str, msg: Message,
wallet: WalletConnection, my_ver_key: str = None):
# If my_ver_key is omitted, anon-crypt is used inside pack.
try:
wire_message = await wallet.pack_message(
Serializer.serialize(msg).decode('utf-8'),
their_ver_key,
my_ver_key
)
except Exception as e:
logging.exception(str(e))
raise
else:
transport = EndpointTransport(address=their_endpoint)
await transport.send_wire_message(wire_message)
@staticmethod
async def unpack_agent_message(wire_msg_bytes, wallet: WalletConnection):
print('===== 0036 unpack_agent_message ======')
if isinstance(wire_msg_bytes, str):
wire_msg_bytes = bytes(wire_msg_bytes, 'utf-8')
unpacked = await wallet.unpack_message(wire_msg_bytes)
print('unpacked: \n' + json.dumps(unpacked, indent=2, sort_keys=True))
from_key = None
from_did = None
their_endpoint = None
context = Context()
if 'sender_verkey' in unpacked:
from_key = unpacked['sender_verkey']
from_did = await indy_sdk_utils.did_for_key(wallet, unpacked['sender_verkey'])
pairwise_info = await wallet.get_pairwise(from_did)
pairwise_meta = pairwise_info['metadata']
their_endpoint = pairwise_meta['their_endpoint']
to_key = unpacked['recipient_verkey']
to_did = await indy_sdk_utils.did_for_key(wallet, unpacked['recipient_verkey'])
msg = Serializer.deserialize(unpacked['message'])
print('from_did: ' + str(from_did))
print('to_did: ' + str(to_did))
print('to_key: ' + str(to_key))
print('from_key: ' + str(from_key))
print('their_endpoint: ' + str(their_endpoint))
context.their_did = from_did
context.my_did = to_did
context.my_ver_key = to_key
context.their_verkey = from_key
context.their_endpoint = their_endpoint
print('===========')
return msg, context
@classmethod
def propose_credential(
cls,
comment: str=None, locale: str=DEF_LOCALE, proposal_attrib: List[ProposedAttrib]=None, schema_id: str=None,
schema_name: str=None, schema_version: str=None, schema_issuer_did: str=None, cred_def_id: str=None,
issuer_did: str=None, proposal_attrib_translation: List[AttribTranslation]=None
):
data = {
'@type': cls.PROPOSE_CREDENTIAL,
'~l10n': {"locale": locale},
}
if comment:
data['comment'] = comment
if schema_id:
data['schema_id'] = schema_id
if schema_name:
data['schema_name'] = schema_name
if schema_version:
data['schema_version'] = schema_version
if schema_issuer_did:
data['schema_issuer_did'] = schema_issuer_did
if cred_def_id:
data['cred_def_id'] = cred_def_id
if issuer_did:
data['issuer_did'] = issuer_did
if proposal_attrib:
data['credential_proposal'] = {
"@type": cls.CREDENTIAL_PREVIEW_TYPE,
"attributes": [attrib.to_json() for attrib in proposal_attrib]
}
if proposal_attrib_translation:
data['~attach'] = [
{
"@type": cls.CREDENTIAL_TRANSLATION_TYPE,
"id": cls.CREDENTIAL_TRANSLATION_ID,
'~l10n': {"locale": locale},
"mime-type": "application/json",
"data": {
"json": [trans.to_json() for trans in proposal_attrib_translation]
}
}
]
return Message(data)
class IssuerStateMachine(BaseStateMachine, metaclass=InvokableStateMachineMeta):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.status = IssueCredentialStatus.Null
self.ack_message_id = None
self.cred_def_id = None
self.rev_reg_id = None
self.values_buffer = None
self.cred_offer_buffer = None
self.to = None
self.comment = None
self.locale = None
self.blob_storage_reader_handle = None
self.log_channel_name = None
self.cred_id = None
self.__log_channel = None
self.protocol_version = None
self.expires_time = None
@classmethod
async def start_issuing(
cls, agent_name: str, pass_phrase: str, to: str, cred_def_id: str, cred_def: dict,
values: dict, issuer_schema: dict=None, rev_reg_id: str=None,
preview: List[ProposedAttrib]=None, translation: List[AttribTranslation]=None,
comment: str=None, locale: str=None, cred_id: str=None, ttl: int=None
):
machine_class = IssueCredentialProtocol.IssuerStateMachine
log_channel_name = 'cred-issuing-log/' + uuid.uuid4().hex
to_verkey = await WalletAgent.key_for_local_did(
agent_name, pass_phrase, to
)
if not to_verkey:
raise RuntimeError('Unknown pairwise for DID: %s' % str(to))
state_machine_id = IssueCredentialProtocol.get_state_machine_id(to_verkey)
ttl = ttl or IssueCredentialProtocol.STATE_MACHINE_TTL
expires_time = now() + timedelta(seconds=ttl)
await WalletAgent.start_state_machine(
agent_name=agent_name, machine_class=machine_class, machine_id=state_machine_id,
status=IssueCredentialStatus.Null, ttl=ttl,
expires_time=expires_time.strftime('%Y-%m-%dT%H:%M:%S') + '+0000',
to=to, cred_def_id=cred_def_id, rev_reg_id=rev_reg_id, log_channel_name=log_channel_name,
cred_id=cred_id
)
data = dict(
command=IssueCredentialProtocol.CMD_START,
comment=comment,
locale=locale,
values=values,
issuer_schema=issuer_schema,
cred_def=cred_def,
preview=[p.to_json() for p in preview] if preview else None,
translation=[t.to_json() for t in translation] if translation else None
)
await WalletAgent.invoke_state_machine(
agent_name=agent_name,
id_=state_machine_id,
content_type=IssueCredentialProtocol.MESSAGE_CONTENT_TYPE,
data=data
)
return log_channel_name
@classmethod
async def stop_issuing(cls, agent_name: str, pass_phrase: str, to: str):
to_verkey = await WalletAgent.key_for_local_did(
agent_name, pass_phrase, to
)
if not to_verkey:
raise RuntimeError('Unknown pairwise for DID: %s' % str(to))
state_machine_id = IssueCredentialProtocol.get_state_machine_id(to_verkey)
data = dict(
command=IssueCredentialProtocol.CMD_STOP,
)
await WalletAgent.invoke_state_machine(
agent_name=agent_name,
id_=state_machine_id,
content_type=IssueCredentialProtocol.MESSAGE_CONTENT_TYPE,
data=data
)
await WalletAgent.kill_state_machine(
agent_name=agent_name,
pass_phrase=pass_phrase,
id_=state_machine_id
)
async def handle(self, content_type, data):
try:
if content_type == IssueCredentialProtocol.MESSAGE_CONTENT_TYPE:
command = str(data.get('command', None))
if command == IssueCredentialProtocol.CMD_START:
if self.status == IssueCredentialStatus.Null:
# Store Context
comment = data.get('comment', None)
locale = data.get('locale', None) or IssueCredentialProtocol.DEF_LOCALE
values = data.get('values')
cred_def = data.get('cred_def')
await update_cred_def_meta(cred_def['id'], cred_def)
preview = data.get('preview', None)
issuer_schema = data.get('issuer_schema', None)
if issuer_schema:
await update_issuer_schema(issuer_schema['id'], issuer_schema)
preview = [ProposedAttrib(**item) for item in preview] if preview else None
translation = data.get('translation', None)
translation = [AttribTranslation(**item) for item in translation] if translation else None
self.values_buffer = json.dumps(values)
# Call Indy
offer = await self.get_wallet().issuer_create_credential_offer(self.cred_def_id)
self.cred_offer_buffer = json.dumps(offer)
await self.__log(event='Build offer with Indy lib', details=offer)
payload = dict(**offer, **cred_def)
await self.__log(event='Payload', details=payload)
# Build Aries message
id_suffix = uuid.uuid4().hex
data = {
"@type": IssueCredentialProtocol.OFFER_CREDENTIAL,
'~l10n': {"locale": locale},
"offers~attach": [
{
"@id": 'libindy-cred-offer-' + id_suffix,
"mime-type": "application/json",
"data": {
"base64": base64.b64encode(json.dumps(payload).encode()).decode()
}
}
]
}
if comment:
data['comment'] = comment
data['~l10n'] = {"locale": locale}
self.comment = comment
self.locale = locale
if preview:
data["credential_preview"] = {
"@type": IssueCredentialProtocol.CREDENTIAL_PREVIEW_TYPE,
"attributes": [attrib.to_json() for attrib in preview]
}
if translation or issuer_schema:
data['~attach'] = []
if translation:
data['~attach'].append(
{
"@type": IssueCredentialProtocol.CREDENTIAL_TRANSLATION_TYPE,
"id": IssueCredentialProtocol.CREDENTIAL_TRANSLATION_ID,
'~l10n': {"locale": locale},
"mime-type": "application/json",
"data": {
"json": [trans.to_json() for trans in translation]
}
}
)
if issuer_schema:
data['~attach'].append(
{
"@type": IssueCredentialProtocol.ISSUER_SCHEMA_TYPE,
"id": IssueCredentialProtocol.ISSUER_SCHEMA_ID,
"mime-type": "application/json",
"data": {
"json": issuer_schema
}
}
)
if self.expires_time:
data['~timing'] = {
"expires_time": self.expires_time
}
message_offer = Message(data)
await IssueCredentialProtocol.send_message_to_agent(self.to, message_offer, self.get_wallet())
self.status = IssueCredentialStatus.OfferCredential
await self.__log(event='Send Offer message', details=data)
else:
raise RuntimeError('Unexpected command %s' % command)
elif command == IssueCredentialProtocol.CMD_STOP:
if self.to and self.status != IssueCredentialStatus.Null:
err_msg = IssueCredentialProtocol.build_problem_report_for_connections(
problem_code=IssueCredentialProtocol.ISSUE_PROCESSING_ERROR,
problem_str='Actor unexpected stopped issuing',
)
await IssueCredentialProtocol.send_message_to_agent(self.to, err_msg, self.get_wallet())
await self.__log('Actor unexpected stopped issuing')
await self.done()
else:
raise RuntimeError('Unknown command: %s' % command)
elif content_type in WIRED_CONTENT_TYPES:
msg, context = await IssueCredentialProtocol.unpack_agent_message(data, self.get_wallet())
success, err_msg = await IssueCredentialProtocol.validate_common_message_blocks(
msg,
IssueCredentialProtocol.REQUEST_NOT_ACCEPTED,
context
)
if not success:
if err_msg:
await IssueCredentialProtocol.send_message_to_agent(context.their_did, err_msg, self.get_wallet())
if msg.type == IssueCredentialProtocol.REQUEST_CREDENTIAL:
if self.status == IssueCredentialStatus.OfferCredential:
await self.__log('Received credential request', msg.to_dict())
# Issue credential
cred_offer = json.loads(self.cred_offer_buffer)
cred_request = msg.to_dict().get('requests~attach', None)
cred_values = json.loads(self.values_buffer)
encoded_cred_values = dict()
for key, value in cred_values.items():
encoded_cred_values[key] = dict(raw=str(value), encoded=core.codec.encode(value))
if cred_request:
if isinstance(cred_request, list):
cred_request = cred_request[0]
cred_request_body = cred_request.get('data').get('base64')
cred_request_body = base64.b64decode(cred_request_body)
cred_request_body = json.loads(cred_request_body.decode())
ret = await self.get_wallet().issuer_create_credential(
cred_offer=cred_offer,
cred_req=cred_request_body,
cred_values=encoded_cred_values,
rev_reg_id=self.rev_reg_id,
blob_storage_reader_handle=self.blob_storage_reader_handle
)
cred, cred_revoc_id, revoc_reg_delta = ret
await self.__log(
'Issue Credentials atrifacts',
dict(cred=cred, cred_revoc_id=cred_revoc_id, revoc_reg_delta=revoc_reg_delta)
)
if self.cred_id:
message_id = self.cred_id
else:
message_id = 'libindy-cred-' + uuid.uuid4().hex
data = {
"@type": IssueCredentialProtocol.ISSUE_CREDENTIAL,
"~please_ack": {"message_id": message_id},
"credentials~attach": [
{
"@id": message_id,
"mime-type": "application/json",
"~thread": {Message.THREAD_ID: msg.id, Message.SENDER_ORDER: 0},
"data": {
"base64": base64.b64encode(json.dumps(cred).encode()).decode()
}
}
]
}
self.ack_message_id = message_id
if self.comment:
data['comment'] = self.commentcomment
data['~l10n'] = {"locale": self.locale}
issue_message = Message(data)
await IssueCredentialProtocol.send_message_to_agent(
self.to, issue_message, self.get_wallet()
)
self.status = IssueCredentialStatus.IssueCredential
await self.__log(event='Issue credential', details=data)
else:
await self.__send_problem_report(
problem_code=IssueCredentialProtocol.REQUEST_NOT_ACCEPTED,
problem_str='Impossible state machine state',
context=context,
thread_id=msg.id
)
raise ImpossibleStatus
elif msg.type == AckMessage.ACK or msg.type == IssueCredentialProtocol.CREDENTIAL_ACK:
if self.status == IssueCredentialStatus.IssueCredential:
await self.__log('Received ACK', msg.to_dict())
await self.done()
else:
"""Nothing to do
await self.__send_problem_report(
problem_code=IssueCredentialProtocol.REQUEST_NOT_ACCEPTED,
problem_str='Impossible state machine state',
context=context,
thread_id=msg.id
)
raise ImpossibleStatus
"""
logging.warning('Impossible state machine state')
elif msg.type == IssueCredentialProtocol.PROBLEM_REPORT:
await self.__log('Received problem report', msg.to_dict())
await self.done()
else:
raise RuntimeError('Unsupported content_type "%s"' % content_type)
except Exception as e:
if not isinstance(e, MachineIsDone):
logging.exception('Base machine terminated with exception')
await self.done()
async def done(self):
if self.__log_channel is not None:
await self.__log('Done')
await self.__log_channel.close()
await self.__log('Done')
await super().done()
async def __send_problem_report(self, problem_code: str, problem_str: str, context: Context, thread_id: str = None):
err_msg = await IssueCredentialProtocol.send_problem_report(
self.get_wallet(),
problem_code,
problem_str,
context,
thread_id
)
await self.__log('Send report problem', err_msg.to_dict())
async def __log(self, event: str, details: dict=None):
event_message = '%s (%s)' % (event, self.get_id())
await self.get_wallet().log(message=event_message, details=details)
if self.__log_channel is None:
self.__log_channel = await WriteOnlyChannel.create(self.log_channel_name)
if not self.__log_channel.is_closed:
await self.__log_channel.write([event_message, details])
class HolderSateMachine(BaseStateMachine, metaclass=InvokableStateMachineMeta):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.status = IssueCredentialStatus.Null
self.to = None
self.cred_metadata = None
self.comment = None
self.cred_def_buffer = None
self.rev_reg_def = None
self.cred_def_id = None
self.protocol_version = None
async def handle(self, content_type, data):
try:
msg_type_offer_credential = IssueCredentialProtocol.set_protocol_version(
IssueCredentialProtocol.OFFER_CREDENTIAL, self.protocol_version or IssueCredentialProtocol.VERSION
)
msg_type_issue_credential = IssueCredentialProtocol.set_protocol_version(
IssueCredentialProtocol.ISSUE_CREDENTIAL, self.protocol_version or IssueCredentialProtocol.VERSION
)
if content_type in WIRED_CONTENT_TYPES:
msg, context = await IssueCredentialProtocol.unpack_agent_message(data, self.get_wallet())
self.to = context.their_did
success, err_msg = await IssueCredentialProtocol.validate_common_message_blocks(
msg,
IssueCredentialProtocol.REQUEST_NOT_ACCEPTED,
context
)
if not success:
if err_msg:
await IssueCredentialProtocol.send_message_to_agent(context.their_did, err_msg, self.get_wallet())
else:
raise RuntimeError('Unsupported content_type "%s"' % content_type)
if msg.type == msg_type_offer_credential:
if self.status == IssueCredentialStatus.Null:
await self.__log('Received credential offer', msg.to_dict())
offer, offer_body, cred_def_body = await self.__validate_cred_offer(msg, context)
self.cred_def_id = offer_body['cred_def_id']
link_secret_name = settings.INDY['WALLET_SETTINGS']['PROVER_MASTER_SECRET_NAME']
try:
await self.get_wallet().prover_create_master_secret(link_secret_name)
except WalletOperationError as e:
if 'duplicate' in e.error_message.lower():
# nothing to do
pass
else:
raise e
# Create Credential request
self.cred_def_buffer = json.dumps(cred_def_body)
cred_request, metadata = await self.get_wallet().prover_create_credential_req(
prover_did=context.my_did,
cred_offer=offer_body,
cred_def=cred_def_body,
master_secret_id=link_secret_name
)
await self.__log(
'Cred request artifacts',
dict(cred_request=cred_request, metadata=metadata)
)
self.cred_metadata = json.dumps(metadata)
# Build request
data = {
"@type": IssueCredentialProtocol.REQUEST_CREDENTIAL,
"~thread": {Message.THREAD_ID: msg.id, Message.SENDER_ORDER: 0},
"requests~attach": [
{
"@id": uuid.uuid4().hex,
"mime-type": "application/json",
"data": {
"base64": base64.b64encode(json.dumps(cred_request).encode()).decode()
}
},
]
}
if self.comment:
data['comment'] = self.comment
message_request = Message(data)
await IssueCredentialProtocol.send_message_to_agent(self.to, message_request, self.get_wallet())
await self.__log('Send credential request', message_request.to_dict())
self.status = IssueCredentialStatus.RequestCredential
else:
await self.__send_problem_report(
problem_code=IssueCredentialProtocol.OFFER_PROCESSING_ERROR,
problem_str='Impossible state machine state',
context=context,
thread_id=msg.id
)
raise ImpossibleStatus
elif msg.type == msg_type_issue_credential:
if self.status == IssueCredentialStatus.RequestCredential:
await self.__log('Received Issue credential', msg.to_dict())
cred_attaches = msg.to_dict().get('credentials~attach', None)
if isinstance(cred_attaches, dict):
cred_attaches = [cred_attaches]
for cred_attach in cred_attaches:
cred_body = cred_attach.get('data').get('base64')
cred_body = base64.b64decode(cred_body)
cred_body = json.loads(cred_body.decode())
cred_def = json.loads(self.cred_def_buffer)
cred_id = cred_attach.get('@id', None)
# Store credential
cred_older = await self.get_wallet().prover_get_credential(cred_id)
if cred_older:
# Delete older credential
await self.get_wallet().prover_delete_credential(cred_id)
cred_id = await self.get_wallet().prover_store_credential(
cred_req_metadata=json.loads(self.cred_metadata),
cred=cred_body,
cred_def=cred_def,
rev_reg_def=self.rev_reg_def,
cred_id=cred_id
)
await self.__log('Store credential with id: %s' % str(cred_id), cred_body)
ack_message_id = msg.to_dict().get('~please_ack', {}).get('message_id', None)
if not ack_message_id:
ack_message_id = msg.id
ack = AckMessage.build(ack_message_id)
# Cardea back-compatibility
ack['@type'] = IssueCredentialProtocol.CREDENTIAL_ACK
await IssueCredentialProtocol.send_message_to_agent(self.to, ack, self.get_wallet())
await self.__log('Send ACK', ack.to_dict())
await self.done()
else:
await self.__send_problem_report(
problem_code=IssueCredentialProtocol.ISSUE_PROCESSING_ERROR,
problem_str='Impossible state machine state',
context=context,
thread_id=msg.id
)
raise ImpossibleStatus
elif msg.type == IssueCredentialProtocol.PROBLEM_REPORT:
await self.__log('Received problem report', msg.to_dict())
await self.done()
else:
await self.__send_problem_report(
problem_code=IssueCredentialProtocol.RESPONSE_FOR_UNKNOWN_REQUEST,
problem_str='Unknown message type',
context=context,
thread_id=msg.id
)
except Exception as e:
if not isinstance(e, MachineIsDone):
logging.exception('Base machine terminated with exception')
await self.done()
async def done(self):
await self.__log('Done')
await super().done()
async def __validate_cred_offer(self, msg: Message, context: Context):
offer_attaches = msg.to_dict().get('offers~attach', None)
if isinstance(offer_attaches, dict):
offer_attaches = [offer_attaches]
if (not type(offer_attaches) is list) or (type(offer_attaches) is list and len(offer_attaches) == 0):
await self.__send_problem_report(
problem_code=IssueCredentialProtocol.OFFER_PROCESSING_ERROR,
problem_str='Expected offer~attach must contains credOffer and credDef',
context=context,
thread_id=msg.id
)
await self.done()
offer = offer_attaches[0]
offer_body = None
cred_def_body = None
for attach in offer_attaches:
raw_base64 = attach.get('data', {}).get('base64', None)
if raw_base64:
payload = json.loads(base64.b64decode(raw_base64).decode())
offer_fields = ['key_correctness_proof', 'nonce', 'schema_id', 'cred_def_id']
cred_def_fields = ['value', 'type', 'ver', 'schemaId', 'id', 'tag']
if all([field in payload.keys() for field in offer_fields]): # check if cred offer content
offer_body = {attr: val for attr, val in payload.items() if attr in offer_fields}
if all([field in payload.keys() for field in cred_def_fields]): # check if cred def content
cred_def_body = {attr: val for attr, val in payload.items() if attr in cred_def_fields}
if not offer_body:
await self.__send_problem_report(
problem_code=IssueCredentialProtocol.OFFER_PROCESSING_ERROR,
problem_str='Expected offer~attach must contains Payload with offer',
context=context,
thread_id=msg.id
)
await self.done()
if cred_def_body:
cred_def_id = cred_def_body['id']
await indy_sdk_utils.store_cred_def(self.get_wallet(), cred_def_id, cred_def_body)
else:
await self.__send_problem_report(
problem_code=IssueCredentialProtocol.OFFER_PROCESSING_ERROR,
problem_str='Expected offer~attach must contains Payload with cred_def data',
context=context,
thread_id=msg.id
)
await self.done()
attaches = msg.to_dict().get('~attach', None)
if attaches:
if isinstance(attaches, dict):
attaches = [attaches]
for attach in attaches:
if attach.get('@type', None) == IssueCredentialProtocol.ISSUER_SCHEMA_TYPE:
issuer_schema_body = attach['data']['json']
issuer_schema_id = issuer_schema_body['id']
await indy_sdk_utils.store_issuer_schema(self.get_wallet(), issuer_schema_id, issuer_schema_body)
return offer, offer_body, cred_def_body
async def __send_problem_report(self, problem_code: str, problem_str: str, context: Context, thread_id: str=None):
err_msg = await IssueCredentialProtocol.send_problem_report(
self.get_wallet(),
problem_code,
problem_str,
context,
thread_id
)
await self.__log('Send report problem', err_msg.to_dict())
async def __log(self, event: str, details: dict = None):
event_message = '%s (%s)' % (event, self.get_id())
await self.get_wallet().log(message=event_message, details=details)
| [
"[email protected]"
] | |
6f0edfece6342887493bb3cea706740038aa981b | 9b8e2992a38f591032997b5ced290fe1acc3ad94 | /assignment.py | 9f83cc955c406a0296f5a3b75f6f088b6468147c | [] | no_license | girishdhegde/aps-2020 | c694443c10d0d572c8022dad5a6ce735462aaa51 | fb43d8817ba16ff78f93a8257409d77dbc82ced8 | refs/heads/master | 2021-08-08T04:49:18.876187 | 2021-01-02T04:46:20 | 2021-01-02T04:46:20 | 236,218,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | cost = [[3, 2, 7],
[5, 1, 3],
[2, 7, 2]]
n = 3
dp = [float('inf') for i in range(2**n)]
dp[0] = 0
def count_set_bits(mask):
cnt = 0
while(mask != 0):
cnt += 1
mask &= (mask - 1)
return cnt
print(count_set_bits(7))
# for mask in range(2**n):
# x = count_set_bits(mask) | [
"[email protected]"
] | |
d0ffc2ce73a4358770fcbd0d3aea0a21813f5eeb | 005f02cb534bbf91fe634fcf401441e1179365c8 | /10-Django Level 2/10.2-projektDy/projektDy/wsgi.py | 18fe901d2208426855c0e488e7e4b4babad70dda | [] | no_license | Ruxhino-B/django-deployment-example | 220a39a456871a1bf42a64fd5b945731056fc7b9 | e19713ac1e11af202152ad20d7c3c94891a77e83 | refs/heads/master | 2020-04-18T02:21:10.505691 | 2020-01-06T14:18:18 | 2020-01-06T14:25:25 | 167,159,223 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 411 | py | """
WSGI config for projektDy project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'projektDy.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
03829e363688475ccad8963022ab6bfa1f2ae6ee | b47f5ca0a51cf59427b7bd12e9c85064a1e13e03 | /easyci/commands/watch.py | 4969cb582bbd2f55f16519477a4ad48cafb03c24 | [
"MIT"
] | permissive | naphatkrit/easyci | a490b57e601bcad6d2022834809dd60cb0902e0c | 7aee8d7694fe4e2da42ce35b0f700bc840c8b95f | refs/heads/master | 2016-09-02T01:14:28.505230 | 2015-09-09T00:26:25 | 2015-09-09T00:26:25 | 41,396,486 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 653 | py | import click
from watchdog.observers import Observer
from easyci.file_system_events.tests_event_handler import TestsEventHandler
from easyci.utils import decorators
@click.command()
@click.pass_context
@decorators.print_markers
def watch(ctx):
"""Watch the directory for changes. Automatically run tests.
"""
vcs = ctx.obj['vcs']
event_handler = TestsEventHandler(vcs)
observer = Observer()
observer.schedule(event_handler, vcs.path, recursive=True)
observer.start()
click.echo('Watching directory `{path}`. Use ctrl-c to stop.'.format(path=vcs.path))
while observer.isAlive():
observer.join(timeout=1)
| [
"[email protected]"
] | |
2fe01f1669e635156e429787ec8e0f24864e4090 | bd2fb6aa0e25dcc3f6c1511007f15f63d0d9fb55 | /tests/functions/folding/test_addmod_mulmod.py | 50aa1d78ec53446c84cb634c1d65af92aea219fc | [
"Apache-2.0"
] | permissive | andrelfpinto/vyper | 4b26a88686518eca3a829c172dd01dcd34b242e4 | d9b73846aa14a6019faa4126ec7608acd05e480d | refs/heads/master | 2022-11-08T06:38:59.104585 | 2020-06-23T01:13:50 | 2020-06-23T01:13:50 | 274,272,650 | 0 | 0 | Apache-2.0 | 2020-06-23T00:39:05 | 2020-06-23T00:39:04 | null | UTF-8 | Python | false | false | 868 | py | import pytest
from hypothesis import assume, given, settings
from hypothesis import strategies as st
from vyper import ast as vy_ast
from vyper import functions as vy_fn
st_uint256 = st.integers(min_value=0, max_value=2 ** 256 - 1)
@pytest.mark.fuzzing
@settings(max_examples=50, deadline=1000)
@given(a=st_uint256, b=st_uint256, c=st_uint256)
@pytest.mark.parametrize('fn_name', ['uint256_addmod', 'uint256_mulmod'])
def test_modmath(get_contract, a, b, c, fn_name):
assume(c > 0)
source = f"""
@public
def foo(a: uint256, b: uint256, c: uint256) -> uint256:
return {fn_name}(a, b, c)
"""
contract = get_contract(source)
vyper_ast = vy_ast.parse_to_ast(f"{fn_name}({a}, {b}, {c})")
old_node = vyper_ast.body[0].value
new_node = vy_fn.DISPATCH_TABLE[fn_name].evaluate(old_node)
assert contract.foo(a, b, c) == new_node.value
| [
"[email protected]"
] | |
08b5a295b6ce6b6b955d67a08448ad20ee18b133 | bad62c2b0dfad33197db55b44efeec0bab405634 | /sdk/formrecognizer/azure-ai-formrecognizer/tests/test_frc_identity_documents_async.py | d90abd8f45f9a581b5f6a733d8ccab64198aad68 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | test-repo-billy/azure-sdk-for-python | 20c5a2486456e02456de17515704cb064ff19833 | cece86a8548cb5f575e5419864d631673be0a244 | refs/heads/master | 2022-10-25T02:28:39.022559 | 2022-10-18T06:05:46 | 2022-10-18T06:05:46 | 182,325,031 | 0 | 0 | MIT | 2019-07-25T22:28:52 | 2019-04-19T20:59:15 | Python | UTF-8 | Python | false | false | 8,085 | py | # coding=utf-8
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import pytest
import functools
from io import BytesIO
from devtools_testutils.aio import recorded_by_proxy_async
from azure.core.exceptions import ServiceRequestError
from azure.core.credentials import AzureKeyCredential
from azure.ai.formrecognizer._generated.v2_1.models import AnalyzeOperationResult
from azure.ai.formrecognizer._response_handlers import prepare_prebuilt_models
from azure.ai.formrecognizer.aio import FormRecognizerClient
from azure.ai.formrecognizer import FormRecognizerApiVersion
from asynctestcase import AsyncFormRecognizerTest
from preparers import FormRecognizerPreparer
from preparers import GlobalClientPreparer as _GlobalClientPreparer
FormRecognizerClientPreparer = functools.partial(_GlobalClientPreparer, FormRecognizerClient)
class TestIdDocumentsAsync(AsyncFormRecognizerTest):
@FormRecognizerPreparer()
async def test_identity_document_bad_endpoint(self, **kwargs):
formrecognizer_test_api_key = kwargs.get("formrecognizer_test_api_key", None)
with open(self.identity_document_license_jpg, "rb") as fd:
my_file = fd.read()
with pytest.raises(ServiceRequestError):
client = FormRecognizerClient("http://notreal.azure.com", AzureKeyCredential(formrecognizer_test_api_key))
async with client:
poller = await client.begin_recognize_identity_documents(my_file)
@FormRecognizerPreparer()
@FormRecognizerClientPreparer()
async def test_damaged_file_bytes_fails_autodetect_content_type(self, **kwargs):
client = kwargs.pop("client")
damaged_pdf = b"\x50\x44\x46\x55\x55\x55" # doesn't match any magic file numbers
with pytest.raises(ValueError):
async with client:
poller = await client.begin_recognize_identity_documents(
damaged_pdf
)
@FormRecognizerPreparer()
@FormRecognizerClientPreparer()
async def test_damaged_file_bytes_io_fails_autodetect(self, **kwargs):
client = kwargs.pop("client")
damaged_pdf = BytesIO(b"\x50\x44\x46\x55\x55\x55") # doesn't match any magic file numbers
with pytest.raises(ValueError):
async with client:
poller = await client.begin_recognize_identity_documents(
damaged_pdf
)
@FormRecognizerPreparer()
@FormRecognizerClientPreparer()
async def test_passing_bad_content_type_param_passed(self, **kwargs):
client = kwargs.pop("client")
with open(self.identity_document_license_jpg, "rb") as fd:
my_file = fd.read()
with pytest.raises(ValueError):
async with client:
poller = await client.begin_recognize_identity_documents(
my_file,
content_type="application/jpeg"
)
@FormRecognizerPreparer()
@FormRecognizerClientPreparer()
async def test_auto_detect_unsupported_stream_content(self, **kwargs):
client = kwargs.pop("client")
with open(self.unsupported_content_py, "rb") as fd:
my_file = fd.read()
with pytest.raises(ValueError):
async with client:
poller = await client.begin_recognize_identity_documents(
my_file
)
@FormRecognizerPreparer()
@FormRecognizerClientPreparer()
@recorded_by_proxy_async
async def test_identity_document_stream_transform_jpg(self, client):
responses = []
def callback(raw_response, _, headers):
analyze_result = client._deserialize(AnalyzeOperationResult, raw_response)
extracted_id_document = prepare_prebuilt_models(analyze_result)
responses.append(analyze_result)
responses.append(extracted_id_document)
with open(self.identity_document_license_jpg, "rb") as fd:
my_file = fd.read()
async with client:
poller = await client.begin_recognize_identity_documents(
identity_document=my_file,
include_field_elements=True,
cls=callback
)
result = await poller.result()
raw_response = responses[0]
returned_model = responses[1]
id_document = returned_model[0]
actual = raw_response.analyze_result.document_results[0].fields
read_results = raw_response.analyze_result.read_results
document_results = raw_response.analyze_result.document_results
page_results = raw_response.analyze_result.page_results
self.assertFormFieldsTransformCorrect(id_document.fields, actual, read_results)
# check page range
assert id_document.page_range.first_page_number == document_results[0].page_range[0]
assert id_document.page_range.last_page_number == document_results[0].page_range[1]
# Check page metadata
self.assertFormPagesTransformCorrect(id_document.pages, read_results, page_results)
@FormRecognizerPreparer()
@FormRecognizerClientPreparer()
@recorded_by_proxy_async
async def test_identity_document_jpg_include_field_elements(self, client):
with open(self.identity_document_license_jpg, "rb") as fd:
id_document = fd.read()
async with client:
poller = await client.begin_recognize_identity_documents(id_document, include_field_elements=True)
result = await poller.result()
assert len(result) == 1
id_document = result[0]
self.assertFormPagesHasValues(id_document.pages)
for field in id_document.fields.values():
if field.name == "CountryRegion":
assert field.value == "USA"
continue
elif field.name == "Region":
assert field.value == "Washington"
else:
self.assertFieldElementsHasValues(field.value_data.field_elements, id_document.page_range.first_page_number)
@pytest.mark.live_test_only
@FormRecognizerPreparer()
@FormRecognizerClientPreparer()
async def test_identity_document_continuation_token(self, **kwargs):
client = kwargs.pop("client")
with open(self.identity_document_license_jpg, "rb") as fd:
id_document = fd.read()
async with client:
initial_poller = await client.begin_recognize_identity_documents(id_document)
cont_token = initial_poller.continuation_token()
poller = await client.begin_recognize_identity_documents(None, continuation_token=cont_token)
result = await poller.result()
assert result is not None
await initial_poller.wait() # necessary so azure-devtools doesn't throw assertion error
@FormRecognizerPreparer()
@FormRecognizerClientPreparer(client_kwargs={"api_version": FormRecognizerApiVersion.V2_0})
async def test_identity_document_v2(self, **kwargs):
client = kwargs.pop("client")
with open(self.identity_document_license_jpg, "rb") as fd:
id_document = fd.read()
with pytest.raises(ValueError) as e:
async with client:
await client.begin_recognize_identity_documents(id_document)
assert "Method 'begin_recognize_identity_documents' is only available for API version V2_1 and up" in str(e.value)
@FormRecognizerPreparer()
@FormRecognizerClientPreparer()
@recorded_by_proxy_async
async def test_pages_kwarg_specified(self, client):
with open(self.identity_document_license_jpg, "rb") as fd:
id_document = fd.read()
async with client:
poller = await client.begin_recognize_identity_documents(id_document, pages=["1"])
assert '1' == poller._polling_method._initial_response.http_response.request.query['pages']
result = await poller.result()
assert result
| [
"[email protected]"
] | |
e1d99bfb0a5b950802d604a203ed618117937fb1 | 55e13562203f2f24338a5e1f8bb543becf8df171 | /lighttpd/upload.py | eabb90c5e76f9a50fe642e503391fc9342dd1fe7 | [] | no_license | jvanz/container-images | 79cd1dd4ade141d733ec6923f1157c15159369ab | ff228722dcccb318def64d9bf485dc43ccafa0d8 | refs/heads/master | 2020-08-31T05:24:44.258927 | 2019-11-25T17:42:09 | 2019-11-25T17:42:09 | 218,603,819 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 547 | py | #!/usr/bin/python3
import cgi
import cgitb
cgitb.enable(display=0, logdir="/var/log/lighttpd")
form = cgi.FieldStorage()
filefield = form["file"]
msg = "Failed to upload file!"
if filefield.filename:
with open(f"/tmp/{filefield.filename}", "w+b") as f:
f.write(filefield.file.read())
msg = "File uploaded!"
else:
msg = f"Cannot find file field. {filefield.name} = {filefield.filename}"
print("Content-Type: text/html")
print()
print("<html>")
print("<body>")
print(f"<H1>{msg}</H1>")
print("</body>")
print("</html>")
| [
"[email protected]"
] | |
3a5d04a7a9e08a83b8fe983908ca0ffb84378af0 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_006/ch73_2020_06_08_22_39_00_045794.py | 1fffb73594f4be691ffc6cda3e85a35745493c0c | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 192 | py | def remove_vogais(palavra):
vogais=["a", "e", "i", "o", "u"]
for i in palavra:
if i in vogais:
nova=palavra.replace(i, " ")
return nova
| [
"[email protected]"
] | |
19e87d181cb3cd97cab210a0b8d5c7b162ce09f7 | 35f9def6e6d327d3a4a4f2959024eab96f199f09 | /developer/lab/ipython/PyTorch/PyTorch/test/test_optim.py | 54ae721f4c794e0b5146601035d2f47b86481cd5 | [
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0",
"BSD-2-Clause",
"CAL-1.0-Combined-Work-Exception",
"CAL-1.0",
"MIT",
"CC-BY-SA-4.0",
"LicenseRef-scancode-free-unknown"
] | permissive | arXiv-research/DevLab-III-1 | ec10aef27e1ca75f206fea11014da8784752e454 | c50cd2b9154c83c3db5e4a11b9e8874f7fb8afa2 | refs/heads/main | 2023-04-16T19:24:58.758519 | 2021-04-28T20:21:23 | 2021-04-28T20:21:23 | 362,599,929 | 2 | 0 | MIT | 2021-04-28T20:36:11 | 2021-04-28T20:36:11 | null | UTF-8 | Python | false | false | 94,444 | py | import warnings
import math
import unittest
import functools
from copy import deepcopy
import torch
from torch._six import inf
import torch.optim as optim
import torch.optim._multi_tensor as optim_mt
import torch.nn.functional as F
from torch.optim import SGD
from torch.autograd import Variable
from torch import sparse
from torch.optim.lr_scheduler import LambdaLR, MultiplicativeLR, StepLR, \
MultiStepLR, ExponentialLR, CosineAnnealingLR, ReduceLROnPlateau, \
_LRScheduler, CyclicLR, CosineAnnealingWarmRestarts, OneCycleLR
from torch.optim.swa_utils import AveragedModel, SWALR, update_bn
from torch.testing._internal.common_utils import TestCase, run_tests, TEST_WITH_UBSAN, load_tests, \
skipIfRocm
# load_tests from common_utils is used to automatically filter tests for
# sharding on sandcastle. This line silences flake warnings
load_tests = load_tests
def rosenbrock(tensor):
x, y = tensor
return (1 - x) ** 2 + 100 * (y - x ** 2) ** 2
def drosenbrock(tensor):
x, y = tensor
return torch.tensor((-400 * x * (y - x ** 2) - 2 * (1 - x), 200 * (y - x ** 2)))
class TestOptim(TestCase):
exact_dtype = True
def _test_rosenbrock_sparse(self, constructor, scheduler_constructors=None,
sparse_only=False):
if scheduler_constructors is None:
scheduler_constructors = []
params_t = torch.tensor([1.5, 1.5])
params = Variable(params_t, requires_grad=True)
optimizer = constructor([params])
schedulers = []
for scheduler_constructor in scheduler_constructors:
schedulers.append(scheduler_constructor(optimizer))
if not sparse_only:
params_c = Variable(params_t.clone(), requires_grad=True)
optimizer_c = constructor([params_c])
solution = torch.tensor([1, 1])
initial_dist = params.data.dist(solution)
def eval(params, sparse_grad, w):
# Depending on w, provide only the x or y gradient
optimizer.zero_grad()
loss = rosenbrock(params)
loss.backward()
grad = drosenbrock(params.data)
# NB: We torture test the optimizer by returning an
# uncoalesced sparse tensor
if w:
i = torch.LongTensor([[0, 0]])
x = grad[0]
v = torch.tensor([x / 4., x - x / 4.])
else:
i = torch.LongTensor([[1, 1]])
y = grad[1]
v = torch.tensor([y - y / 4., y / 4.])
x = sparse.DoubleTensor(i, v, torch.Size([2])).to(dtype=v.dtype)
with torch.no_grad():
if sparse_grad:
params.grad = x
else:
params.grad = x.to_dense()
return loss
for i in range(2000):
# Do cyclic coordinate descent
w = i % 2
optimizer.step(functools.partial(eval, params, True, w))
for scheduler in schedulers:
if isinstance(scheduler, ReduceLROnPlateau):
scheduler.step(rosenbrock(params))
else:
scheduler.step()
if not sparse_only:
optimizer_c.step(functools.partial(eval, params_c, False, w))
self.assertEqual(params.data, params_c.data)
self.assertLessEqual(params.data.dist(solution), initial_dist)
def _test_basic_cases_template(self, weight, bias, input, constructor, scheduler_constructors):
weight = Variable(weight, requires_grad=True)
bias = Variable(bias, requires_grad=True)
input = Variable(input)
optimizer = constructor(weight, bias)
schedulers = []
for scheduler_constructor in scheduler_constructors:
schedulers.append(scheduler_constructor(optimizer))
# to check if the optimizer can be printed as a string
optimizer.__repr__()
def fn():
optimizer.zero_grad()
y = weight.mv(input)
if y.is_cuda and bias.is_cuda and y.get_device() != bias.get_device():
y = y.cuda(bias.get_device())
loss = (y + bias).pow(2).sum()
loss.backward()
return loss
initial_value = fn().item()
for _i in range(200):
for scheduler in schedulers:
if isinstance(scheduler, ReduceLROnPlateau):
val_loss = fn()
scheduler.step(val_loss)
else:
scheduler.step()
optimizer.step(fn)
self.assertLess(fn().item(), initial_value)
def _test_state_dict(self, weight, bias, input, constructor):
weight = Variable(weight, requires_grad=True)
bias = Variable(bias, requires_grad=True)
input = Variable(input)
def fn_base(optimizer, weight, bias):
optimizer.zero_grad()
i = input_cuda if weight.is_cuda else input
loss = (weight.mv(i) + bias).pow(2).sum()
loss.backward()
return loss
optimizer = constructor(weight, bias)
fn = functools.partial(fn_base, optimizer, weight, bias)
# Prime the optimizer
for _i in range(20):
optimizer.step(fn)
# Clone the weights and construct new optimizer for them
weight_c = Variable(weight.data.clone(), requires_grad=True)
bias_c = Variable(bias.data.clone(), requires_grad=True)
optimizer_c = constructor(weight_c, bias_c)
fn_c = functools.partial(fn_base, optimizer_c, weight_c, bias_c)
# Load state dict
state_dict = deepcopy(optimizer.state_dict())
state_dict_c = deepcopy(optimizer.state_dict())
optimizer_c.load_state_dict(state_dict_c)
# Run both optimizations in parallel
for _i in range(20):
optimizer.step(fn)
optimizer_c.step(fn_c)
self.assertEqual(weight, weight_c)
self.assertEqual(bias, bias_c)
# Make sure state dict wasn't modified
self.assertEqual(state_dict, state_dict_c)
# Make sure state dict is deterministic with equal but not identical parameters
self.assertEqual(optimizer.state_dict(), optimizer_c.state_dict())
# Make sure repeated parameters have identical representation in state dict
optimizer_c.param_groups.extend(optimizer_c.param_groups)
self.assertEqual(optimizer.state_dict()['param_groups'][-1],
optimizer_c.state_dict()['param_groups'][-1])
# Check that state dict can be loaded even when we cast parameters
# to a different type and move to a different device.
if not torch.cuda.is_available():
return
input_cuda = Variable(input.data.float().cuda())
weight_cuda = Variable(weight.data.float().cuda(), requires_grad=True)
bias_cuda = Variable(bias.data.float().cuda(), requires_grad=True)
optimizer_cuda = constructor(weight_cuda, bias_cuda)
fn_cuda = functools.partial(fn_base, optimizer_cuda, weight_cuda, bias_cuda)
state_dict = deepcopy(optimizer.state_dict())
state_dict_c = deepcopy(optimizer.state_dict())
optimizer_cuda.load_state_dict(state_dict_c)
# Make sure state dict wasn't modified
self.assertEqual(state_dict, state_dict_c)
for _i in range(20):
optimizer.step(fn)
optimizer_cuda.step(fn_cuda)
self.assertEqual(weight, weight_cuda)
self.assertEqual(bias, bias_cuda)
# validate deepcopy() copies all public attributes
def getPublicAttr(obj):
return set(k for k in obj.__dict__ if not k.startswith('_'))
self.assertEqual(getPublicAttr(optimizer), getPublicAttr(deepcopy(optimizer)))
def _test_basic_cases(self, constructor, scheduler_constructors=None,
ignore_multidevice=False):
if scheduler_constructors is None:
scheduler_constructors = []
self._test_state_dict(
torch.randn(10, 5),
torch.randn(10),
torch.randn(5),
constructor
)
self._test_basic_cases_template(
torch.randn(10, 5),
torch.randn(10),
torch.randn(5),
constructor,
scheduler_constructors
)
# non-contiguous parameters
self._test_basic_cases_template(
torch.randn(10, 5, 2)[..., 0],
torch.randn(10, 2)[..., 0],
torch.randn(5),
constructor,
scheduler_constructors
)
# CUDA
if not torch.cuda.is_available():
return
self._test_basic_cases_template(
torch.randn(10, 5).cuda(),
torch.randn(10).cuda(),
torch.randn(5).cuda(),
constructor,
scheduler_constructors
)
# Multi-GPU
if not torch.cuda.device_count() > 1 or ignore_multidevice:
return
self._test_basic_cases_template(
torch.randn(10, 5).cuda(0),
torch.randn(10).cuda(1),
torch.randn(5).cuda(0),
constructor,
scheduler_constructors
)
def _build_params_dict(self, weight, bias, **kwargs):
return [{'params': [weight]}, dict(params=[bias], **kwargs)]
def _build_params_dict_single(self, weight, bias, **kwargs):
return [dict(params=bias, **kwargs)]
def test_sgd(self):
for optimizer in [optim.SGD, optim_mt.SGD]:
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-2),
lr=1e-3)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict_single(weight, bias, lr=1e-2),
lr=1e-3)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict_single(weight, bias, lr=1e-2))
)
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3),
[lambda opt: StepLR(opt, gamma=0.9, step_size=10)]
)
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3),
[lambda opt: StepLR(opt, gamma=0.9, step_size=10),
lambda opt: ReduceLROnPlateau(opt)]
)
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3),
[lambda opt: StepLR(opt, gamma=0.99, step_size=10),
lambda opt: ExponentialLR(opt, gamma=0.99),
lambda opt: ReduceLROnPlateau(opt)]
)
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3, momentum=1)
)
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3, momentum=1, weight_decay=1)
)
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], nesterov=True, lr=1e-3, momentum=1, weight_decay=1)
)
with self.assertRaisesRegex(ValueError, "Invalid momentum value: -0.5"):
optimizer(None, lr=1e-2, momentum=-0.5)
def test_sgd_sparse(self):
for optimizer in [optim.SGD, optim_mt.SGD]:
self._test_rosenbrock_sparse(
lambda params: optimizer(params, lr=5e-3)
)
self._test_rosenbrock_sparse(
lambda params: optimizer(params, lr=0.005),
[lambda opt: StepLR(opt, gamma=0.99999, step_size=300)]
)
def test_multi_tensor_optimizers(self):
if not torch.cuda.is_available():
return
optimizer_pairs_with_flags = [
((optim.Adam, optim._multi_tensor.Adam), dict(weight_decay=1., amsgrad=True)),
((optim.Adam, optim._multi_tensor.Adam), dict(weight_decay=1., amsgrad=False)),
((optim.Adam, optim._multi_tensor.Adam), dict(weight_decay=0., amsgrad=True)),
((optim.Adam, optim._multi_tensor.Adam), dict(weight_decay=0., amsgrad=False)),
((optim.AdamW, optim._multi_tensor.AdamW), dict(weight_decay=1., amsgrad=True)),
((optim.AdamW, optim._multi_tensor.AdamW), dict(weight_decay=1., amsgrad=False)),
((optim.AdamW, optim._multi_tensor.AdamW), dict(weight_decay=0., amsgrad=True)),
((optim.AdamW, optim._multi_tensor.AdamW), dict(weight_decay=0., amsgrad=False)),
((optim.SGD, optim._multi_tensor.SGD), dict(lr=0.2, momentum=1, dampening=0, weight_decay=1, nesterov=True)),
((optim.SGD, optim._multi_tensor.SGD), dict(lr=0.2, momentum=1, dampening=0.5, weight_decay=1, nesterov=False)),
((optim.RMSprop, optim._multi_tensor.RMSprop), dict(weight_decay=1, momentum=1, centered=True)),
((optim.RMSprop, optim._multi_tensor.RMSprop), dict(weight_decay=1, momentum=0, centered=True)),
((optim.RMSprop, optim._multi_tensor.RMSprop), dict(weight_decay=1, momentum=1, centered=False)),
((optim.RMSprop, optim._multi_tensor.RMSprop), dict(weight_decay=0, momentum=1, centered=False)),
((optim.Rprop, optim._multi_tensor.Rprop), dict(lr=1e-2, etas=(0.5, 1.2), step_sizes=(1e-6, 50))),
((optim.ASGD, optim._multi_tensor.ASGD), dict(weight_decay=0)),
((optim.ASGD, optim._multi_tensor.ASGD), dict(weight_decay=1)),
((optim.Adamax, optim._multi_tensor.Adamax), dict(weight_decay=0)),
((optim.Adamax, optim._multi_tensor.Adamax), dict(weight_decay=1)),
((optim.Adadelta, optim._multi_tensor.Adadelta), dict(weight_decay=0)),
((optim.Adadelta, optim._multi_tensor.Adadelta), dict(weight_decay=1)),
]
kIterations = 11
device = 'cuda'
for optimizers, params in optimizer_pairs_with_flags:
res = []
for opt in optimizers:
weight = torch.tensor([[-0.2109, -0.4976], [-0.1413, -0.3420], [-0.2524, 0.6976]],
dtype=torch.float64, device=device, requires_grad=True)
bias = torch.tensor([-0.1085, -0.2979, 0.6892], dtype=torch.float64, device=device, requires_grad=True)
weight2 = torch.tensor([[-0.0508, -0.3941, -0.2843]],
dtype=torch.float64, device=device, requires_grad=True)
bias2 = torch.tensor([-0.0711], dtype=torch.float64, device=device, requires_grad=True)
input = torch.tensor([0.1, 0.2, 0.3, 0.4, 0.5, 0.6], dtype=torch.float64, device=device).reshape(3, 2)
model = torch.nn.Sequential(torch.nn.Linear(2, 3),
torch.nn.Sigmoid(),
torch.nn.Linear(3, 1),
torch.nn.Sigmoid())
model.to(torch.float64).to(device)
pretrained_dict = model.state_dict()
pretrained_dict['0.weight'] = weight
pretrained_dict['0.bias'] = bias
pretrained_dict['2.weight'] = weight2
pretrained_dict['2.bias'] = bias2
model.load_state_dict(pretrained_dict)
optimizer = opt(model.parameters(), **params)
for _ in range(kIterations):
optimizer.zero_grad()
output = model(input)
loss = output.sum()
loss.backward()
if iter == 0:
model.parameters().__next__().grad = None
optimizer.step()
res.append(model.parameters())
for p1, p2 in zip(res[0], res[1]):
self.assertEqual(p1, p2)
def test_adam(self):
for optimizer in [optim.Adam, optim_mt.Adam]:
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-2),
lr=1e-3)
)
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3, amsgrad=True)
)
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3, weight_decay=0.1)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-2),
lr=1e-3, amsgrad=True)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-2),
lr=1e-3),
[lambda opt: ExponentialLR(opt, gamma=0.9)]
)
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3, amsgrad=True),
[lambda opt: ExponentialLR(opt, gamma=0.9),
lambda opt: ReduceLROnPlateau(opt)]
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-2),
lr=1e-3, amsgrad=True),
[lambda opt: StepLR(opt, gamma=0.9, step_size=10),
lambda opt: ReduceLROnPlateau(opt)]
)
with self.assertRaisesRegex(ValueError, "Invalid beta parameter at index 0: 1.0"):
optimizer(None, lr=1e-2, betas=(1.0, 0.0))
with self.assertRaisesRegex(ValueError, "Invalid weight_decay value: -1"):
optimizer(None, lr=1e-2, weight_decay=-1)
def test_adamw(self):
for optimizer in [optim.AdamW, optim_mt.AdamW]:
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-2),
lr=1e-3)
)
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3, weight_decay=1)
)
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3, weight_decay=1, amsgrad=True)
)
with self.assertRaisesRegex(ValueError, "Invalid weight_decay value: -1"):
optimizer(None, lr=1e-2, weight_decay=-1)
def test_sparse_adam(self):
self._test_rosenbrock_sparse(
lambda params: optim.SparseAdam(params, lr=4e-2),
[],
True
)
with self.assertRaisesRegex(ValueError, "Invalid beta parameter at index 0: 1.0"):
optim.SparseAdam(None, lr=1e-2, betas=(1.0, 0.0))
with self.assertRaisesRegex(ValueError, "SparseAdam requires dense parameter tensors"):
optim.SparseAdam([torch.zeros(3, layout=torch.sparse_coo)])
with self.assertRaisesRegex(ValueError, "SparseAdam requires dense parameter tensors"):
optim.SparseAdam([{"params": [torch.zeros(3, layout=torch.sparse_coo)]}])
# ROCm precision is too low to pass this test
@skipIfRocm
def test_adadelta(self):
for optimizer in [optim.Adadelta, optim_mt.Adadelta]:
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias])
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, rho=0.95))
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, rho=0.95)),
[lambda opt: StepLR(opt, gamma=0.9, step_size=10),
lambda opt: ReduceLROnPlateau(opt)]
)
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], weight_decay=1)
)
with self.assertRaisesRegex(ValueError, "Invalid rho value: 1.1"):
optimizer(None, lr=1e-2, rho=1.1)
def test_adagrad(self):
self._test_basic_cases(
lambda weight, bias: optim.Adagrad([weight, bias], lr=1e-1)
)
self._test_basic_cases(
lambda weight, bias: optim.Adagrad([weight, bias], lr=1e-1,
initial_accumulator_value=0.1)
)
self._test_basic_cases(
lambda weight, bias: optim.Adagrad(
self._build_params_dict(weight, bias, lr=1e-2),
lr=1e-1)
)
self._test_basic_cases(
lambda weight, bias: optim.Adagrad(
self._build_params_dict(weight, bias, lr=1e-2),
lr=1e-1),
[lambda opt: ReduceLROnPlateau(opt)]
)
self._test_basic_cases(
lambda weight, bias: optim.Adagrad(
self._build_params_dict(weight, bias, lr=1e-2),
lr=1e-1),
[lambda opt: ReduceLROnPlateau(opt),
lambda opt: ExponentialLR(opt, gamma=0.99)]
)
with self.assertRaisesRegex(ValueError, "Invalid lr_decay value: -0.5"):
optim.Adagrad(None, lr=1e-2, lr_decay=-0.5)
def test_adagrad_sparse(self):
self._test_rosenbrock_sparse(
lambda params: optim.Adagrad(params, lr=1e-1)
)
self._test_rosenbrock_sparse(
lambda params: optim.Adagrad(params, lr=0.1),
[lambda opt: StepLR(opt, gamma=1 - 1e-5, step_size=500),
lambda opt: ReduceLROnPlateau(opt, threshold=1e-4)]
)
def test_adamax(self):
for optimizer in [optim.Adamax, optim_mt.Adamax]:
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-1)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-2),
lr=1e-1)
)
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-1, weight_decay=1)
)
with self.assertRaisesRegex(ValueError, "Invalid beta parameter at index 1: 1.0"):
optimizer(None, lr=1e-2, betas=(0.0, 1.0))
def test_rmsprop(self):
for optimizer in [optim.RMSprop, optim_mt.RMSprop]:
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-2)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-3),
lr=1e-2)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-3),
lr=1e-2, centered=True)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-3),
lr=1e-2, centered=True, momentum=0.1)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-3),
lr=1e-2, momentum=0.1)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-3),
lr=1e-2, momentum=0.1, weight_decay=1)
)
with self.assertRaisesRegex(ValueError, "Invalid momentum value: -1.0"):
optimizer(None, lr=1e-2, momentum=-1.0)
def test_asgd(self):
for optimizer in [optim.ASGD, optim_mt.ASGD]:
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3, t0=100)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-2),
lr=1e-3, t0=100)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-3),
lr=1e-2, weight_decay=1)
)
with self.assertRaisesRegex(ValueError, "Invalid weight_decay value: -0.5"):
optimizer(None, lr=1e-2, weight_decay=-0.5)
def test_rprop(self):
for optimizer in [optim.Rprop, optim_mt.Rprop]:
self._test_basic_cases(
lambda weight, bias: optimizer([weight, bias], lr=1e-3)
)
self._test_basic_cases(
lambda weight, bias: optimizer(
self._build_params_dict(weight, bias, lr=1e-2),
lr=1e-3)
)
with self.assertRaisesRegex(ValueError, "Invalid eta values: 1.0, 0.5"):
optimizer(None, lr=1e-2, etas=(1.0, 0.5))
def test_lbfgs(self):
self._test_basic_cases(
lambda weight, bias: optim.LBFGS([weight, bias]),
ignore_multidevice=True
)
self._test_basic_cases(
lambda weight, bias: optim.LBFGS([weight, bias], line_search_fn="strong_wolfe"),
ignore_multidevice=True
)
@unittest.skipIf(TEST_WITH_UBSAN, "division-by-zero error with UBSAN")
def test_lbfgs_return_type(self):
params = [torch.randn(10, 5), torch.randn(10)]
opt1 = optim.LBFGS(params, 0.01, tolerance_grad=inf)
opt2 = optim.LBFGS(params, 0.01, tolerance_grad=-inf)
def closure():
return torch.tensor([10])
res1 = opt1.step(closure)
res2 = opt2.step(closure)
self.assertEqual(type(res1), type(res2))
def test_invalid_param_type(self):
with self.assertRaises(TypeError):
optim.SGD(Variable(torch.randn(5, 5)), lr=3)
def test_duplicate_params_in_param_group(self):
param = Variable(torch.randn(5, 5))
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
optim.SGD([param, param], lr=0.1)
self.assertEqual(len(w), 1)
self.assertIn('a parameter group with duplicate parameters', str(w[0].message))
def test_no_grad_for_all_params(self):
param = torch.randn(5, 5, requires_grad=False)
optimizer_list = [
optim.Adadelta,
optim.AdamW,
optim.Adam,
optim.Adagrad,
optim.Adamax,
optim.RMSprop,
optim.SGD,
optim.SparseAdam,
optim.ASGD,
]
for optim_ctr in optimizer_list:
opt = optim_ctr([param, param], lr=0.1)
# make sure step can still run even if
# all params have no grad
opt.step()
class SchedulerTestNet(torch.nn.Module):
def __init__(self):
super(SchedulerTestNet, self).__init__()
self.conv1 = torch.nn.Conv2d(1, 1, 1)
self.conv2 = torch.nn.Conv2d(1, 1, 1)
def forward(self, x):
return self.conv2(F.relu(self.conv1(x)))
class LambdaLRTestObject:
def __init__(self, value):
self.value = value
def __call__(self, epoch):
return self.value * epoch
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.__dict__ == other.__dict__
else:
return False
class TestLRScheduler(TestCase):
exact_dtype = True
def setUp(self):
super(TestLRScheduler, self).setUp()
self.net = SchedulerTestNet()
self.opt = SGD(
[{'params': self.net.conv1.parameters()}, {'params': self.net.conv2.parameters(), 'lr': 0.5}],
lr=0.05)
def test_error_when_getlr_has_epoch(self):
class MultiStepLR(torch.optim.lr_scheduler._LRScheduler):
def __init__(self, optimizer, gamma, milestones, last_epoch=-1):
self.init_lr = [group['lr'] for group in optimizer.param_groups]
self.gamma = gamma
self.milestones = milestones
super().__init__(optimizer, last_epoch)
def get_lr(self, step):
global_step = self.last_epoch
gamma_power = ([0] + [i + 1 for i, m in enumerate(self.milestones) if global_step >= m])[-1]
return [init_lr * (self.gamma ** gamma_power) for init_lr in self.init_lr]
optimizer = torch.optim.SGD([torch.rand(1)], lr=1)
with self.assertRaises(TypeError):
scheduler = MultiStepLR(optimizer, gamma=1, milestones=[10, 20])
def test_no_cyclic_references(self):
import gc
param = Variable(torch.empty(10), requires_grad=True)
optim = SGD([param], lr=0.5)
scheduler = LambdaLR(optim, lambda epoch: 1.0)
del scheduler
# Prior to Python 3.7, local variables in a function will be referred by the current frame.
import sys
if sys.version_info < (3, 7):
import inspect
referrers = gc.get_referrers(optim)
self.assertTrue(
len(referrers) == 1 and referrers[0] is inspect.currentframe(),
"Optimizer should contain no cyclic references (except current frame)")
del referrers
else:
self.assertTrue(
len(gc.get_referrers(optim)) == 0,
"Optimizer should contain no cyclic references")
gc.collect()
del optim
self.assertEqual(
gc.collect(), 0, msg="Optimizer should be garbage-collected on __del__")
def test_old_pattern_warning(self):
epochs = 35
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter("always") # allow any warning to be raised
scheduler = StepLR(self.opt, gamma=0.1, step_size=3)
self.assertTrue(len(ws) == 0, "No warning should be raised")
def old_pattern():
for _ in range(epochs):
scheduler.step()
self.opt.step()
self.assertWarnsRegex(UserWarning, r'how-to-adjust-learning-rate', old_pattern)
def test_old_pattern_warning_with_arg(self):
epochs = 35
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter("always") # allow any warning to be raised
scheduler = StepLR(self.opt, gamma=0.1, step_size=3)
self.assertTrue(len(ws) == 0, "No warning should be raised")
def old_pattern2():
for _ in range(epochs):
scheduler.step()
self.opt.step()
self.assertWarnsRegex(UserWarning, r'how-to-adjust-learning-rate', old_pattern2)
def test_old_pattern_warning_resuming(self):
epochs = 35
for i, group in enumerate(self.opt.param_groups):
group['initial_lr'] = 0.01
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter("always") # allow any warning to be raised
scheduler = StepLR(self.opt, gamma=0.1, step_size=3, last_epoch=10)
self.assertTrue(len(ws) == 0, "No warning should be raised")
def old_pattern():
for _ in range(epochs):
scheduler.step()
self.opt.step()
self.assertWarnsRegex(UserWarning, r'how-to-adjust-learning-rate', old_pattern)
def test_old_pattern_warning_resuming_with_arg(self):
epochs = 35
for i, group in enumerate(self.opt.param_groups):
group['initial_lr'] = 0.01
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter("always") # allow any warning to be raised
scheduler = StepLR(self.opt, gamma=0.1, step_size=3, last_epoch=10)
self.assertTrue(len(ws) == 0, "No warning should be raised")
def old_pattern2():
for _ in range(epochs):
scheduler.step()
self.opt.step()
self.assertWarnsRegex(UserWarning, r'how-to-adjust-learning-rate', old_pattern2)
def test_old_pattern_warning_with_overridden_optim_step(self):
epochs = 35
for i, group in enumerate(self.opt.param_groups):
group['initial_lr'] = 0.01
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter("always") # allow any warning to be raised
scheduler = StepLR(self.opt, gamma=0.1, step_size=3, last_epoch=10)
self.assertTrue(len(ws) == 0, "No warning should be raised")
# emulate use-case with optimizer.step overridden
import types
old_step = self.opt.step
def new_step(o, *args, **kwargs):
retval = old_step(*args, **kwargs)
return retval
self.opt.step = types.MethodType(new_step, self.opt)
def old_pattern2():
for _ in range(epochs):
scheduler.step()
self.opt.step()
self.assertWarnsRegex(UserWarning, r'how-to-adjust-learning-rate', old_pattern2)
def test_new_pattern_no_warning(self):
epochs = 35
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter("always") # allow any warning to be raised
scheduler = StepLR(self.opt, gamma=0.1, step_size=3)
self.assertTrue(len(ws) == 0, "No warning should be raised")
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter("always") # allow any warning to be raised
for _ in range(epochs):
self.opt.step()
scheduler.step()
self.assertTrue(len(ws) == 0, "No warning should be raised")
def test_new_pattern_no_warning_with_arg(self):
epochs = 35
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter("always") # allow any warning to be raised
scheduler = StepLR(self.opt, gamma=0.1, step_size=3)
self.assertTrue(len(ws) == 0, "No warning should be raised")
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter("always") # allow any warning to be raised
for _ in range(epochs):
self.opt.step()
scheduler.step()
self.assertTrue(len(ws) == 0, "No warning should be raised")
def test_new_pattern_no_warning_with_overridden_optim_step(self):
epochs = 35
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter("always") # allow any warning to be raised
scheduler = StepLR(self.opt, gamma=0.1, step_size=3)
self.assertTrue(len(ws) == 0, "No warning should be raised")
# emulate use-case with optimizer.step overridden
import types
old_step = self.opt.step
def new_step(o, *args, **kwargs):
retval = old_step(*args, **kwargs)
return retval
self.opt.step = types.MethodType(new_step, self.opt)
def new_pattern():
for e in range(epochs):
self.opt.step()
scheduler.step()
self.assertWarnsRegex(UserWarning, r'`optimizer.step\(\)` has been overridden', new_pattern)
def _test_lr_is_constant_for_constant_epoch(self, scheduler):
l = []
for _ in range(10):
scheduler.step(2)
l.append(self.opt.param_groups[0]['lr'])
self.assertEqual(min(l), max(l))
def test_step_lr_is_constant_for_constant_epoch(self):
scheduler = StepLR(self.opt, 2)
self._test_lr_is_constant_for_constant_epoch(scheduler)
def test_exponential_lr_is_constant_for_constant_epoch(self):
scheduler = ExponentialLR(self.opt, gamma=0.9)
self._test_lr_is_constant_for_constant_epoch(scheduler)
def test_step_lr(self):
# lr = 0.05 if epoch < 3
# lr = 0.005 if 30 <= epoch < 6
# lr = 0.0005 if epoch >= 9
epochs = 10
single_targets = [0.05] * 3 + [0.005] * 3 + [0.0005] * 3 + [0.00005] * 3
targets = [single_targets, [x * epochs for x in single_targets]]
scheduler = StepLR(self.opt, gamma=0.1, step_size=3)
self._test(scheduler, targets, epochs)
def test_get_last_lr_step_lr(self):
from torch.nn import Parameter
epochs = 10
optimizer = torch.optim.SGD([Parameter(torch.randn(2, 2, requires_grad=True))], 0.1)
targets = [[0.1] * 3 + [0.01] * 3 + [0.001] * 3 + [0.0001]]
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, 3, gamma=0.1)
self._test_get_last_lr(scheduler, targets, epochs)
def test_get_last_lr_multi_step_lr(self):
# lr = 0.05 if epoch < 2
# lr = 0.005 if 2 <= epoch < 5
# lr = 0.0005 if 5 <= epoch < 9
# lr = 0.00005 if 9 <= epoch
epochs = 10
single_targets = [0.05] * 2 + [0.005] * 3 + [0.0005] * 4 + [0.00005] * 1
targets = [single_targets, [x * epochs for x in single_targets]]
scheduler = MultiStepLR(self.opt, gamma=0.1, milestones=[2, 5, 9])
self._test_get_last_lr(scheduler, targets, epochs)
def test_multi_step_lr(self):
# lr = 0.05 if epoch < 2
# lr = 0.005 if 2 <= epoch < 5
# lr = 0.0005 if epoch < 9
# lr = 0.00005 if epoch >= 9
epochs = 10
single_targets = [0.05] * 2 + [0.005] * 3 + [0.0005] * 4 + [0.00005] * 3
targets = [single_targets, [x * epochs for x in single_targets]]
scheduler = MultiStepLR(self.opt, gamma=0.1, milestones=[2, 5, 9])
self._test(scheduler, targets, epochs)
def test_multi_step_lr_with_epoch(self):
# lr = 0.05 if epoch < 2
# lr = 0.005 if 2 <= epoch < 5
# lr = 0.0005 if epoch < 9
# lr = 0.00005 if epoch >= 9
epochs = 10
single_targets = [0.05] * 2 + [0.005] * 3 + [0.0005] * 4 + [0.00005] * 3
targets = [single_targets, [x * epochs for x in single_targets]]
scheduler = MultiStepLR(self.opt, gamma=0.1, milestones=[2, 5, 9])
self._test_with_epoch(scheduler, targets, epochs)
def test_exp_lr(self):
epochs = 10
single_targets = [0.05 * (0.9 ** x) for x in range(epochs)]
targets = [single_targets, [x * epochs for x in single_targets]]
scheduler = ExponentialLR(self.opt, gamma=0.9)
self._test(scheduler, targets, epochs)
def test_cos_anneal_lr(self):
epochs = 10
eta_min = 1e-10
single_targets = [eta_min + (0.05 - eta_min) *
(1 + math.cos(math.pi * x / epochs)) / 2
for x in range(epochs)]
targets = [single_targets, [x * epochs for x in single_targets]]
scheduler = CosineAnnealingLR(self.opt, T_max=epochs, eta_min=eta_min)
self._test(scheduler, targets, epochs)
def test_closed_form_step_lr(self):
scheduler = StepLR(self.opt, gamma=0.1, step_size=3)
closed_form_scheduler = StepLR(self.opt, gamma=0.1, step_size=3)
self._test_against_closed_form(scheduler, closed_form_scheduler, 20)
def test_closed_form_multi_step_lr(self):
scheduler = MultiStepLR(self.opt, gamma=0.1, milestones=[2, 5, 9])
closed_form_scheduler = MultiStepLR(self.opt, gamma=0.1, milestones=[2, 5, 9])
self._test_against_closed_form(scheduler, closed_form_scheduler, 20)
def test_closed_form_exp_lr(self):
scheduler = ExponentialLR(self.opt, gamma=0.9)
closed_form_scheduler = ExponentialLR(self.opt, gamma=0.9)
self._test_against_closed_form(scheduler, closed_form_scheduler, 20)
def test_closed_form_cos_anneal_lr(self):
eta_min = 1e-10
epochs = 20
T_max = 5
scheduler = CosineAnnealingLR(self.opt, T_max=T_max, eta_min=eta_min)
closed_form_scheduler = CosineAnnealingLR(self.opt, T_max=T_max, eta_min=eta_min)
self._test_against_closed_form(scheduler, closed_form_scheduler, epochs)
def test_reduce_lr_on_plateau1(self):
epochs = 10
for param_group in self.opt.param_groups:
param_group['lr'] = 0.5
targets = [[0.5] * 20]
metrics = [10 - i * 0.0167 for i in range(20)]
scheduler = ReduceLROnPlateau(self.opt, threshold_mode='abs', mode='min',
threshold=0.01, patience=5, cooldown=5)
self._test_reduce_lr_on_plateau(scheduler, targets, metrics, epochs)
def test_reduce_lr_on_plateau2(self):
epochs = 22
for param_group in self.opt.param_groups:
param_group['lr'] = 0.5
targets = [[0.5] * 6 + [0.05] * 7 + [0.005] * 7 + [0.0005] * 2]
metrics = [10 - i * 0.0165 for i in range(22)]
scheduler = ReduceLROnPlateau(self.opt, patience=5, cooldown=0, threshold_mode='abs',
mode='min', threshold=0.1)
self._test_reduce_lr_on_plateau(scheduler, targets, metrics, epochs)
def test_reduce_lr_on_plateau3(self):
epochs = 22
for param_group in self.opt.param_groups:
param_group['lr'] = 0.5
targets = [[0.5] * (2 + 6) + [0.05] * (5 + 6) + [0.005] * 4]
metrics = [-0.8] * 2 + [-0.234] * 20
scheduler = ReduceLROnPlateau(self.opt, mode='max', patience=5, cooldown=5,
threshold_mode='abs')
self._test_reduce_lr_on_plateau(scheduler, targets, metrics, epochs)
def test_reduce_lr_on_plateau4(self):
epochs = 20
for param_group in self.opt.param_groups:
param_group['lr'] = 0.5
targets = [[0.5] * 20]
metrics = [1.5 * (1.025 ** i) for i in range(20)] # 1.025 > 1.1**0.25
scheduler = ReduceLROnPlateau(self.opt, mode='max', patience=3,
threshold_mode='rel', threshold=0.1)
self._test_reduce_lr_on_plateau(scheduler, targets, metrics, epochs)
def test_reduce_lr_on_plateau5(self):
epochs = 20
for param_group in self.opt.param_groups:
param_group['lr'] = 0.5
targets = [[0.5] * 6 + [0.05] * (5 + 6) + [0.005] * 4]
metrics = [1.5 * (1.005 ** i) for i in range(20)]
scheduler = ReduceLROnPlateau(self.opt, mode='max', threshold_mode='rel',
threshold=0.1, patience=5, cooldown=5)
self._test_reduce_lr_on_plateau(scheduler, targets, metrics, epochs)
def test_reduce_lr_on_plateau6(self):
epochs = 20
for param_group in self.opt.param_groups:
param_group['lr'] = 0.5
targets = [[0.5] * 20]
metrics = [1.5 * (0.85 ** i) for i in range(20)]
scheduler = ReduceLROnPlateau(self.opt, mode='min', threshold_mode='rel',
threshold=0.1)
self._test_reduce_lr_on_plateau(scheduler, targets, metrics, epochs)
def test_reduce_lr_on_plateau7(self):
epochs = 20
for param_group in self.opt.param_groups:
param_group['lr'] = 0.5
targets = [[0.5] * 6 + [0.05] * (5 + 6) + [0.005] * 4]
metrics = [1] * 7 + [0.6] + [0.5] * 12
scheduler = ReduceLROnPlateau(self.opt, mode='min', threshold_mode='rel',
threshold=0.1, patience=5, cooldown=5)
self._test_reduce_lr_on_plateau(scheduler, targets, metrics, epochs)
def test_reduce_lr_on_plateau8(self):
epochs = 20
for param_group in self.opt.param_groups:
param_group['lr'] = 0.5
targets = [[0.5] * 6 + [0.4] * 14, [0.5] * 6 + [0.3] * 14]
metrics = [1.5 * (1.005 ** i) for i in range(20)]
scheduler = ReduceLROnPlateau(self.opt, mode='max', threshold_mode='rel', min_lr=[0.4, 0.3],
threshold=0.1, patience=5, cooldown=5)
self._test_reduce_lr_on_plateau(scheduler, targets, metrics, epochs)
def test_compound_step_and_multistep_lr(self):
epochs = 10
schedulers = [None] * 2
schedulers[0] = StepLR(self.opt, gamma=0.1, step_size=3)
schedulers[1] = MultiStepLR(self.opt, gamma=0.1, milestones=[2, 5, 9])
targets = [[0.05] * 2 + [0.005] * 1 + [5e-4] * 2 + [5e-5] + [5e-6] * 3 + [5e-8]]
self._test(schedulers, targets, epochs)
def test_compound_step_and_exp_lr(self):
epochs = 10
schedulers = [None] * 2
single_targets = [0.05 * (0.9 ** x) for x in range(3)]
single_targets += [0.005 * (0.9 ** x) for x in range(3, 6)]
single_targets += [0.0005 * (0.9 ** x) for x in range(6, 9)]
single_targets += [0.00005 * (0.9 ** x) for x in range(9, 12)]
targets = [single_targets, [x * epochs for x in single_targets]]
schedulers[0] = StepLR(self.opt, gamma=0.1, step_size=3)
schedulers[1] = ExponentialLR(self.opt, gamma=0.9)
self._test(schedulers, targets, epochs)
def test_compound_exp_and_multistep_lr(self):
epochs = 10
schedulers = [None] * 2
single_targets = [0.05 * (0.9 ** x) for x in range(2)]
single_targets += [0.005 * (0.9 ** x) for x in range(2, 5)]
single_targets += [0.0005 * (0.9 ** x) for x in range(5, 9)]
single_targets += [0.00005 * (0.9 ** x) for x in range(9, 11)]
targets = [single_targets, [x * epochs for x in single_targets]]
schedulers[0] = MultiStepLR(self.opt, gamma=0.1, milestones=[2, 5, 9])
schedulers[1] = ExponentialLR(self.opt, gamma=0.9)
self._test(schedulers, targets, epochs)
def test_compound_cosanneal_and_step_lr(self):
epochs = 10
eta_min = 1e-10
single_targets = [eta_min + (0.05 - eta_min) *
(1 + math.cos(math.pi * x / epochs)) / 2
for x in range(epochs)]
single_targets = [x * 0.1 ** (i // 3) for i, x in enumerate(single_targets)]
targets = [single_targets, [x * epochs for x in single_targets]]
schedulers = [None] * 2
schedulers[0] = CosineAnnealingLR(self.opt, T_max=epochs, eta_min=eta_min)
schedulers[1] = StepLR(self.opt, gamma=0.1, step_size=3)
self._test(schedulers, targets, epochs)
def test_compound_cosanneal_and_multistep_lr(self):
epochs = 10
eta_min = 1e-10
single_targets = [eta_min + (0.05 - eta_min) *
(1 + math.cos(math.pi * x / epochs)) / 2
for x in range(epochs)]
multipliers = [1] * 2 + [0.1] * 3 + [0.01] * 4 + [0.001]
single_targets = [x * y for x, y in zip(single_targets, multipliers)]
targets = [single_targets, [x * epochs for x in single_targets]]
schedulers = [None] * 2
schedulers[0] = CosineAnnealingLR(self.opt, T_max=epochs, eta_min=eta_min)
schedulers[1] = MultiStepLR(self.opt, gamma=0.1, milestones=[2, 5, 9])
self._test(schedulers, targets, epochs)
def test_compound_cosanneal_and_exp_lr(self):
epochs = 10
eta_min = 1e-10
single_targets = [eta_min + (0.05 - eta_min) *
(1 + math.cos(math.pi * x / epochs)) / 2
for x in range(epochs)]
multipliers = [0.1 ** i for i in range(epochs)]
single_targets = [x * y for x, y in zip(single_targets, multipliers)]
targets = [single_targets, [x * epochs for x in single_targets]]
schedulers = [None] * 2
schedulers[0] = CosineAnnealingLR(self.opt, T_max=epochs, eta_min=eta_min)
schedulers[1] = ExponentialLR(self.opt, gamma=0.1)
self._test(schedulers, targets, epochs)
def test_compound_reduce_lr_on_plateau1(self):
epochs = 10
for param_group in self.opt.param_groups:
param_group['lr'] = 0.5
single_targets = [0.5] * 20
multipliers = [0.1 ** (i // 3) for i in range(20)]
single_targets = [x * y for x, y in zip(multipliers, single_targets)]
targets = [single_targets]
targets = targets[1:] # test runs step before checking lr
metrics = [10 - i * 0.0167 for i in range(20)]
schedulers = [None, None]
schedulers[0] = ReduceLROnPlateau(self.opt, threshold_mode='abs', mode='min',
threshold=0.01, patience=5, cooldown=5)
schedulers[1] = StepLR(self.opt, gamma=0.1, step_size=3)
self._test_reduce_lr_on_plateau(schedulers, targets, metrics, epochs)
def test_compound_reduce_lr_on_plateau2(self):
epochs = 22
for param_group in self.opt.param_groups:
param_group['lr'] = 0.5
single_targets = [0.5] * 6 + [0.05] * 7 + [0.005] * 7 + [0.0005] * 2
multipliers = [1] * 3 + [0.1] * 5 + [0.01] * 4 + [0.001] * 10
single_targets = [x * y for x, y in zip(single_targets, multipliers)]
targets = [single_targets]
targets = targets[1:] # test runs step before checking lr
metrics = [10 - i * 0.0165 for i in range(22)]
schedulers = [None] * 2
schedulers[0] = ReduceLROnPlateau(self.opt, patience=5, cooldown=0, threshold_mode='abs',
mode='min', threshold=0.1)
schedulers[1] = MultiStepLR(self.opt, gamma=0.1, milestones=[3, 8, 12])
self._test_reduce_lr_on_plateau(schedulers, targets, metrics, epochs)
def test_compound_reduce_lr_on_plateau3(self):
epochs = 22
for param_group in self.opt.param_groups:
param_group['lr'] = 0.5
single_targets = [0.5] * (2 + 6) + [0.05] * (5 + 6) + [0.005] * 4
multipliers = [0.1 ** i for i in range(epochs)]
single_targets = [x * y for x, y in zip(multipliers, single_targets)]
targets = [single_targets]
targets = targets[1:] # test runs step before checking lr
metrics = [-0.8] * 2 + [-0.234] * 20
schedulers = [None, None]
schedulers[0] = ReduceLROnPlateau(self.opt, mode='max', patience=5, cooldown=5,
threshold_mode='abs')
schedulers[1] = ExponentialLR(self.opt, gamma=0.1)
self._test_reduce_lr_on_plateau(schedulers, targets, metrics, epochs)
def test_compound_reduce_lr_on_plateau4(self):
epochs = 20
for param_group in self.opt.param_groups:
param_group['lr'] = 0.05
epochs = 10
eta_min = 1e-10
single_targets = [eta_min + (0.05 - eta_min) *
(1 + math.cos(math.pi * x / epochs)) / 2
for x in range(epochs)]
targets = [single_targets]
targets = targets[1:] # test runs step before checking lr
metrics = [1.5 * (1.025 ** i) for i in range(20)] # 1.025 > 1.1**0.25
schedulers = [None, None]
schedulers[0] = ReduceLROnPlateau(self.opt, mode='max', patience=3,
threshold_mode='rel', threshold=0.1)
schedulers[1] = CosineAnnealingLR(self.opt, epochs, eta_min)
self._test_reduce_lr_on_plateau(schedulers, targets, metrics, epochs)
def test_cycle_lr_invalid_mode(self):
with self.assertRaises(ValueError):
scheduler = CyclicLR(self.opt, base_lr=0, max_lr=0, mode="CATS")
def test_cycle_lr_triangular_mode_one_lr(self):
lr_target = [1, 2, 3, 4, 5, 4, 3, 2, 1, 2, 3]
momentum_target = [5, 4, 3, 2, 1, 2, 3, 4, 5, 4, 3]
lr_targets = [lr_target, lr_target]
momentum_targets = [momentum_target, momentum_target]
scheduler = CyclicLR(self.opt, base_lr=1, max_lr=5, step_size_up=4,
cycle_momentum=True, base_momentum=1, max_momentum=5,
mode='triangular')
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, len(lr_target))
def test_cycle_lr_triangular_mode_one_lr_no_momentum(self):
lr_target = [1, 2, 3, 4, 5, 4, 3, 2, 1, 2, 3]
lr_targets = [lr_target, lr_target]
momentum_target = [self.opt.defaults['momentum']] * len(lr_target)
momentum_targets = [momentum_target, momentum_target]
scheduler = CyclicLR(self.opt, base_lr=1, max_lr=5, step_size_up=4,
cycle_momentum=False, mode='triangular')
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, len(lr_target))
def test_cycle_lr_triangular2_mode_one_lr(self):
lr_target = [1, 2, 3, 4, 5, 4, 3, 2, 1, 1.5, 2.0, 2.5, 3.0, 2.5, 2.0, 1.5,
1, 1.25, 1.50, 1.75, 2.00, 1.75]
momentum_target = [5.0, 4.0, 3.0, 2.0, 1.0, 2.0, 3.0, 4.0, 5.0, 4.5, 4.0,
3.5, 3.0, 3.5, 4.0, 4.5, 5.0, 4.75, 4.5, 4.25, 4.0, 4.25]
lr_targets = [lr_target, lr_target]
momentum_targets = [momentum_target, momentum_target]
scheduler = CyclicLR(self.opt, base_lr=1, max_lr=5, step_size_up=4,
cycle_momentum=True, base_momentum=1, max_momentum=5,
mode='triangular2')
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, len(lr_target))
def test_cycle_lr_exp_range_mode_one_lr(self):
base_lr, max_lr = 1, 5
diff_lr = max_lr - base_lr
gamma = 0.9
xs = [0, 0.25, 0.5, 0.75, 1, 0.75, 0.50, 0.25, 0, 0.25, 0.5, 0.75, 1]
lr_target = [base_lr + x * diff_lr * gamma**i for i, x in enumerate(xs)]
momentum_target = [max_lr - x * diff_lr * gamma**i for i, x in enumerate(xs)]
lr_targets = [lr_target, lr_target]
momentum_targets = [momentum_target, momentum_target]
scheduler = CyclicLR(self.opt, base_lr=base_lr,
max_lr=max_lr, step_size_up=4,
cycle_momentum=True, base_momentum=base_lr, max_momentum=max_lr,
mode='exp_range', gamma=gamma)
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, len(lr_target))
def test_cycle_lr_triangular_mode(self):
lr_target_1 = [1, 2, 3, 4, 5, 4, 3, 2, 1, 2, 3]
lr_target_2 = [x + 1 for x in lr_target_1]
lr_targets = [lr_target_1, lr_target_2]
momentum_target_1 = [5, 4, 3, 2, 1, 2, 3, 4, 5, 4, 3]
momentum_target_2 = [x + 1 for x in momentum_target_1]
momentum_targets = [momentum_target_1, momentum_target_2]
scheduler = CyclicLR(self.opt, base_lr=[1, 2], max_lr=[5, 6], step_size_up=4,
cycle_momentum=True, base_momentum=[1, 2], max_momentum=[5, 6],
mode='triangular')
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, len(lr_target_1))
def test_cycle_lr_triangular2_mode(self):
lr_target_1 = [1, 2, 3, 4, 5, 4, 3, 2, 1, 1.5, 2.0, 2.5, 3.0, 2.5, 2.0, 1.5, 1,
1.25, 1.50, 1.75, 2.00, 1.75]
lr_target_2 = [x + 2 for x in lr_target_1]
lr_targets = [lr_target_1, lr_target_2]
momentum_target_1 = [5.0, 4.0, 3.0, 2.0, 1.0, 2.0, 3.0, 4.0, 5.0, 4.5, 4.0, 3.5,
3.0, 3.5, 4.0, 4.5, 5.0, 4.75, 4.5, 4.25, 4.0, 4.25]
momentum_target_2 = [x + 2 for x in momentum_target_1]
momentum_targets = [momentum_target_1, momentum_target_2]
scheduler = CyclicLR(self.opt, base_lr=[1, 3], max_lr=[5, 7], step_size_up=4,
cycle_momentum=True, base_momentum=[1, 3], max_momentum=[5, 7],
mode='triangular2')
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, len(lr_target_1))
def test_cycle_lr_exp_range_mode(self):
base_lr_1, max_lr_1 = 1, 5
base_lr_2, max_lr_2 = 5, 12
diff_lr_1 = max_lr_1 - base_lr_1
diff_lr_2 = max_lr_2 - base_lr_2
gamma = 0.9
xs = [0, 0.25, 0.5, 0.75, 1, 0.75, 0.50, 0.25, 0, 0.25, 0.5, 0.75, 1]
lr_target_1 = [base_lr_1 + x * diff_lr_1 * gamma**i for i, x in enumerate(xs)]
lr_target_2 = [base_lr_2 + x * diff_lr_2 * gamma**i for i, x in enumerate(xs)]
lr_targets = [lr_target_1, lr_target_2]
momentum_target_1 = [max_lr_1 - x * diff_lr_1 * gamma**i for i, x in enumerate(xs)]
momentum_target_2 = [max_lr_2 - x * diff_lr_2 * gamma**i for i, x in enumerate(xs)]
momentum_targets = [momentum_target_1, momentum_target_2]
scheduler = CyclicLR(self.opt, base_lr=[base_lr_1, base_lr_2],
max_lr=[max_lr_1, max_lr_2], step_size_up=4,
cycle_momentum=True, base_momentum=[base_lr_1, base_lr_2],
max_momentum=[max_lr_1, max_lr_2],
mode='exp_range', gamma=gamma)
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, len(lr_target_1))
def test_cycle_lr_triangular_mode_step_size_up_down(self):
lr_target = [1.0, 2.0, 3.0, 4.0, 5.0, 13.0 / 3, 11.0 / 3, 9.0 / 3, 7.0 / 3, 5.0 / 3, 1.0]
lr_targets = [lr_target, lr_target]
momentum_target = [5.0, 4.0, 3.0, 2.0, 1.0, 5.0 / 3, 7.0 / 3, 3.0, 11.0 / 3, 13.0 / 3, 5.0]
momentum_targets = [momentum_target, momentum_target]
scheduler = CyclicLR(self.opt, base_lr=1, max_lr=5,
step_size_up=4,
step_size_down=6,
cycle_momentum=True,
base_momentum=1, max_momentum=5,
mode='triangular')
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, len(lr_target))
def test_cycle_lr_triangular2_mode_step_size_up_down(self):
lr_base_target = ([
1.0, 3.0, 5.0, 13.0 / 3, 11.0 / 3, 9.0 / 3, 7.0 / 3, 5.0 / 3, 1.0, 2.0, 3.0, 8.0 / 3,
7.0 / 3, 6.0 / 3, 5.0 / 3, 4.0 / 3, 1.0, 3.0 / 2, 2.0, 11.0 / 6, 10.0 / 6, 9.0 / 6,
8.0 / 6, 7.0 / 6
])
momentum_base_target = ([
5.0, 3.0, 1.0, 5.0 / 3, 7.0 / 3, 3.0, 11.0 / 3, 13.0 / 3, 5.0, 4.0, 3.0, 10.0 / 3,
11.0 / 3, 4.0, 13.0 / 3, 14.0 / 3, 5.0, 4.5, 4.0, 25.0 / 6, 13.0 / 3, 4.5, 14.0 / 3,
29.0 / 6
])
deltas = [2 * i for i in range(0, 2)]
base_lrs = [1 + delta for delta in deltas]
max_lrs = [5 + delta for delta in deltas]
lr_targets = [[x + delta for x in lr_base_target] for delta in deltas]
momentum_targets = [[x + delta for x in momentum_base_target] for delta in deltas]
scheduler = CyclicLR(
self.opt,
base_lr=base_lrs,
max_lr=max_lrs,
step_size_up=2,
step_size_down=6,
cycle_momentum=True,
base_momentum=base_lrs,
max_momentum=max_lrs,
mode='triangular2')
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, len(lr_base_target))
def test_cycle_lr_exp_range_mode_step_size_up_down(self):
base_lr, max_lr = 1, 5
diff_lr = max_lr - base_lr
gamma = 0.9
xs = ([
0.0, 0.5, 1.0, 5.0 / 6, 4.0 / 6, 3.0 / 6, 2.0 / 6, 1.0 / 6, 0.0, 0.5, 1.0, 5.0 / 6,
4.0 / 6
])
lr_target = [base_lr + x * diff_lr * gamma**i for i, x in enumerate(xs)]
lr_targets = [lr_target, lr_target]
momentum_target = [max_lr - x * diff_lr * gamma**i for i, x in enumerate(xs)]
momentum_targets = [momentum_target, momentum_target]
scheduler = CyclicLR(self.opt, base_lr=base_lr, max_lr=max_lr,
step_size_up=2, step_size_down=6,
cycle_momentum=True, base_momentum=base_lr,
max_momentum=max_lr,
mode='exp_range', gamma=gamma)
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, len(lr_target))
def test_cycle_lr_with_momentumless_optimizer(self):
# Note [Temporarily set optimizer to Adam]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# The TestLRScheduler object carries around an SGD optimizer to avoid having to
# instantiate one for every test. This gets in the way for our very specific case
# in which we need to use Adam (or really any optimizer that doesn't use momentum)
# in order to test that the momentum bug in CyclicLR is fixed (the bug is described
# in more detail in https://github.com/pytorch/pytorch/issues/19003 ).
old_opt = self.opt
self.opt = optim.Adam(
[{'params': self.net.conv1.parameters()}, {'params': self.net.conv2.parameters(), 'lr': 0.5}],
lr=0.05)
lr_target = [1, 2, 3, 4, 5, 4, 3, 2, 1, 2, 3]
lr_targets = [lr_target, lr_target]
momentum_target = [None] * len(lr_target)
momentum_targets = [momentum_target, momentum_target]
scheduler = CyclicLR(self.opt, base_lr=1, max_lr=5, step_size_up=4,
cycle_momentum=False, mode='triangular')
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, len(lr_target))
self.opt = old_opt # set optimizer back to SGD
def test_cycle_lr_cycle_momentum_fail_with_momentumless_optimizer(self):
with self.assertRaises(ValueError):
adam_opt = optim.Adam(self.net.parameters())
scheduler = CyclicLR(adam_opt, base_lr=1, max_lr=5, cycle_momentum=True)
def test_onecycle_lr_invalid_anneal_strategy(self):
with self.assertRaises(ValueError):
scheduler = OneCycleLR(self.opt, max_lr=1e-3, total_steps=10, anneal_strategy="CATS")
def test_onecycle_lr_invalid_pct_start(self):
with self.assertRaises(ValueError):
scheduler = OneCycleLR(self.opt, max_lr=1e-3, total_steps=10, pct_start=1.1)
def test_onecycle_lr_cannot_calculate_total_steps(self):
with self.assertRaises(ValueError):
scheduler = OneCycleLR(self.opt, max_lr=1e-3)
def test_onecycle_lr_linear_annealing(self):
lr_target = [1, 13, 25, 21.5, 18, 14.5, 11, 7.5, 4, 0.5]
momentum_target = [22, 11.5, 1, 4, 7, 10, 13, 16, 19, 22]
lr_targets = [lr_target, lr_target]
momentum_targets = [momentum_target, momentum_target]
scheduler = OneCycleLR(self.opt, max_lr=25, final_div_factor=2, base_momentum=1, max_momentum=22,
total_steps=10, anneal_strategy='linear')
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, 10)
def test_onecycle_lr_linear_annealing_three_phases(self):
lr_target = [1, 9, 17, 25, 17, 9, 1, 0.75, 0.5, 0.25]
momentum_target = [22, 15, 8, 1, 8, 15, 22, 22, 22, 22]
lr_targets = [lr_target, lr_target]
momentum_targets = [momentum_target, momentum_target]
scheduler = OneCycleLR(self.opt, max_lr=25, div_factor=25,
base_momentum=1, max_momentum=22,
total_steps=10, anneal_strategy='linear',
pct_start=0.4, final_div_factor=4,
three_phase=True)
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, 10)
def test_onecycle_lr_cosine_annealing(self):
def annealing_cos(start, end, pct):
cos_out = math.cos(math.pi * pct) + 1
return end + (start - end) / 2.0 * cos_out
lr_target = [1, 13, 25, annealing_cos(25, 0.5, 1 / 7.0), annealing_cos(25, 0.5, 2 / 7.0),
annealing_cos(25, 0.5, 3 / 7.0), annealing_cos(25, 0.5, 4 / 7.0), annealing_cos(25, 0.5, 5 / 7.0),
annealing_cos(25, 0.5, 6 / 7.0), 0.5]
momentum_target = [22, 11.5, 1, annealing_cos(1, 22, 1 / 7.0), annealing_cos(1, 22, 2 / 7.0),
annealing_cos(1, 22, 3 / 7.0), annealing_cos(1, 22, 4 / 7.0), annealing_cos(1, 22, 5 / 7.0),
annealing_cos(1, 22, 6 / 7.0), 22]
lr_targets = [lr_target, lr_target]
momentum_targets = [momentum_target, momentum_target]
scheduler = OneCycleLR(self.opt, max_lr=25, final_div_factor=2, base_momentum=1, max_momentum=22,
total_steps=10)
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, 10)
def test_cycle_lr_with_adam(self):
old_opt = self.opt
self.opt = optim.Adam(
[{'params': self.net.conv1.parameters()}, {'params': self.net.conv2.parameters(), 'lr': 0.5}],
lr=0.05)
lr_target = [1, 13, 25, 21.5, 18, 14.5, 11, 7.5, 4, 0.5]
momentum_target = [22, 11.5, 1, 4, 7, 10, 13, 16, 19, 22]
lr_targets = [lr_target, lr_target]
momentum_targets = [momentum_target, momentum_target]
scheduler = OneCycleLR(self.opt, max_lr=25, final_div_factor=2, base_momentum=1, max_momentum=22,
total_steps=10, anneal_strategy='linear')
self._test_cycle_lr(scheduler, lr_targets, momentum_targets, 10, use_beta1=True)
self.opt = old_opt # set optimizer back to SGD
def test_lambda_lr(self):
epochs = 10
self.opt.param_groups[0]['lr'] = 0.05
self.opt.param_groups[1]['lr'] = 0.4
targets = [[0.05 * (0.9 ** x) for x in range(epochs)], [0.4 * (0.8 ** x) for x in range(epochs)]]
scheduler = LambdaLR(self.opt,
lr_lambda=[lambda x1: 0.9 ** x1, lambda x2: 0.8 ** x2])
self._test(scheduler, targets, epochs)
def test_multiplicative_lr(self):
epochs = 10
self.opt.param_groups[0]['lr'] = 0.05
self.opt.param_groups[1]['lr'] = 0.4
targets = [[0.05 * (0.9 ** x) for x in range(epochs)], [0.4 * (0.8 ** x) for x in range(epochs)]]
scheduler = MultiplicativeLR(self.opt, lr_lambda=[lambda x1: 0.9, lambda x2: 0.8])
self._test(scheduler, targets, epochs)
def test_CosineAnnealingWarmRestarts_lr1(self):
iters = 100
eta_min = 1e-10
T_mults = [1, 2, 4]
for T_mult in T_mults:
T_i = 10
T_cur = 0
targets = [[0.05], [0.5]]
scheduler = CosineAnnealingWarmRestarts(self.opt, T_0=T_i, T_mult=T_mult, eta_min=eta_min)
for _ in range(1, iters, 1):
T_cur += 1
if T_cur >= T_i:
T_cur = T_cur - T_i
T_i = int(T_mult) * T_i
targets[0] += [eta_min + (0.05 - eta_min) * (1 + math.cos(math.pi * T_cur / T_i)) / 2]
targets[1] += [eta_min + (0.5 - eta_min) * (1 + math.cos(math.pi * T_cur / T_i)) / 2]
self._test(scheduler, targets, iters)
def test_CosineAnnealingWarmRestarts_lr2(self):
iters = 30
eta_min = 1e-10
T_mults = [1, 2, 4]
for T_mult in T_mults:
T_i = 10
T_cur = 0
targets = [[0.05], [0.5]]
scheduler = CosineAnnealingWarmRestarts(self.opt, T_0=T_i, T_mult=T_mult, eta_min=eta_min)
for _ in torch.arange(0.1, iters, 0.1):
T_cur = round(T_cur + 0.1, 1)
if T_cur >= T_i:
T_cur = T_cur - T_i
T_i = int(T_mult) * T_i
targets[0] += [eta_min + (0.05 - eta_min) * (1 + math.cos(math.pi * T_cur / T_i)) / 2]
targets[1] += [eta_min + (0.5 - eta_min) * (1 + math.cos(math.pi * T_cur / T_i)) / 2]
self._test_CosineAnnealingWarmRestarts(scheduler, targets, iters)
def test_CosineAnnealingWarmRestarts_lr3(self):
epochs_for_T_mults = [[0, 1, 2, 3, 4, 5, 12, 27, 3, 4, 5, 6, 13],
[0, 1, 2, 3, 4, 5, 25, 32, 33, 34, 80, 81, 3],
[0, 0.1, 0.2, 0.3, 1.3, 2.3, 17.5, 18.5, 19.5, 29.5, 30.5, 31.5, 50]]
T_curs_for_T_mults = [[1, 2, 3, 4, 5, 2, 7, 3, 4, 5, 6, 3],
[1, 2, 3, 4, 5, 15, 2, 3, 4, 10, 11, 3],
[0.1, 0.2, 0.3, 1.3, 2.3, 7.5, 8.5, 9.5, 19.5, 20.5, 21.5, 10]]
T_is_for_T_mults = [[10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10],
[10, 10, 10, 10, 10, 20, 40, 40, 40, 80, 80, 10],
[10, 10, 10, 10, 10, 30, 30, 30, 30, 30, 30, 90]]
eta_min = 1e-10
T_mults = [1, 2, 3]
for epochs, T_mult, T_curs, T_is in zip(epochs_for_T_mults, T_mults, T_curs_for_T_mults, T_is_for_T_mults):
targets = [[0.05], [0.5]]
scheduler = CosineAnnealingWarmRestarts(self.opt, T_0=10, T_mult=T_mult, eta_min=eta_min)
for T_cur, T_i in zip(T_curs, T_is):
targets[0] += [eta_min + (0.05 - eta_min) * (1 + math.cos(math.pi * T_cur / T_i)) / 2]
targets[1] += [eta_min + (0.5 - eta_min) * (1 + math.cos(math.pi * T_cur / T_i)) / 2]
self._test_interleaved_CosineAnnealingWarmRestarts(scheduler, targets, epochs)
def test_swalr_no_anneal(self):
epochs, swa_start, swa_lr = 10, 5, 0.01
initial_lrs = [group['lr'] for group in self.opt.param_groups]
targets = [[lr] * (swa_start + 1) + [swa_lr] * (epochs - swa_start - 1)
for lr in initial_lrs]
swa_scheduler = SWALR(self.opt, anneal_epochs=1, swa_lr=swa_lr)
self._test_swalr(swa_scheduler, None, targets, swa_start, epochs)
def test_swalr_cosine_anneal_after_multiplicative(self):
# same swa_lr for different param_groups
epochs, swa_start, swa_lr, anneal_epochs = 15, 5, 0.01, 5
mult_factor = 0.9
scheduler = MultiplicativeLR(self.opt, lr_lambda=lambda epoch: mult_factor)
swa_scheduler = SWALR(self.opt, anneal_epochs=anneal_epochs, swa_lr=swa_lr)
def anneal_coef(t):
if t + 1 >= anneal_epochs:
return 0.
return (1 + math.cos(math.pi * (t + 1) / anneal_epochs)) / 2
initial_lrs = [group['lr'] for group in self.opt.param_groups]
targets_before_swa = [[lr * mult_factor**i for i in range(swa_start + 1)]
for lr in initial_lrs]
swa_epochs = epochs - swa_start - 1
targets = [lrs + [lrs[-1] * anneal_coef(t) + swa_lr * (1 - anneal_coef(t)) for t in range(swa_epochs)]
for lrs in targets_before_swa]
self._test_swalr(swa_scheduler, scheduler, targets, swa_start, epochs)
def test_swalr_linear_anneal_after_multiplicative(self):
# separate swa_lr for different param_groups
epochs, swa_start, swa_lrs, anneal_epochs = 15, 5, [0.01, 0.02], 4
mult_factor = 0.9
scheduler = MultiplicativeLR(self.opt, lr_lambda=lambda epoch: mult_factor)
swa_scheduler = SWALR(self.opt, anneal_epochs=anneal_epochs,
anneal_strategy="linear", swa_lr=swa_lrs)
def anneal_coef(t):
if t + 1 >= anneal_epochs:
return 0.
return 1 - (t + 1) / anneal_epochs
initial_lrs = [group['lr'] for group in self.opt.param_groups]
targets_before_swa = [[lr * mult_factor**i for i in range(swa_start + 1)]
for lr in initial_lrs]
swa_epochs = epochs - swa_start - 1
targets = [lrs + [lrs[-1] * anneal_coef(t) + swa_lr * (1 - anneal_coef(t)) for t in range(swa_epochs)]
for lrs, swa_lr in zip(targets_before_swa, swa_lrs)]
self._test_swalr(swa_scheduler, scheduler, targets, swa_start, epochs)
def _test_swalr(self, swa_scheduler, scheduler, targets, swa_start, epochs):
for epoch in range(epochs):
for param_group, target in zip(self.opt.param_groups, targets):
self.assertEqual(target[epoch], param_group['lr'],
msg='LR is wrong in epoch {}: expected {}, got {}'.format(
epoch, target[epoch], param_group['lr']), atol=1e-5, rtol=0)
if epoch >= swa_start:
swa_scheduler.step()
elif scheduler is not None:
scheduler.step()
def test_swalr_hypers(self):
# Test that SWALR raises errors for incorrect hyper-parameters
with self.assertRaisesRegex(ValueError, "anneal_strategy must"):
swa_scheduler = SWALR(self.opt, anneal_strategy="exponential", swa_lr=1.)
with self.assertRaisesRegex(ValueError, "anneal_epochs must"):
swa_scheduler = SWALR(self.opt, anneal_epochs=-1, swa_lr=1.)
with self.assertRaisesRegex(ValueError, "anneal_epochs must"):
swa_scheduler = SWALR(self.opt, anneal_epochs=1.7, swa_lr=1.)
with self.assertRaisesRegex(ValueError, "swa_lr must"):
swa_scheduler = SWALR(self.opt, swa_lr=[1., 0.1, 0.01])
def test_step_lr_state_dict(self):
self._check_scheduler_state_dict(
lambda: StepLR(self.opt, gamma=0.1, step_size=3),
lambda: StepLR(self.opt, gamma=0.01 / 2, step_size=1))
def test_multi_step_lr_state_dict(self):
self._check_scheduler_state_dict(
lambda: MultiStepLR(self.opt, gamma=0.1, milestones=[2, 5, 9]),
lambda: MultiStepLR(self.opt, gamma=0.01, milestones=[1, 4, 6]))
def test_exp_step_lr_state_dict(self):
self._check_scheduler_state_dict(
lambda: ExponentialLR(self.opt, gamma=0.1),
lambda: ExponentialLR(self.opt, gamma=0.01))
def test_cosine_lr_state_dict(self):
epochs = 10
eta_min = 1e-10
self._check_scheduler_state_dict(
lambda: CosineAnnealingLR(self.opt, T_max=epochs, eta_min=eta_min),
lambda: CosineAnnealingLR(self.opt, T_max=epochs // 2, eta_min=eta_min / 2),
epochs=epochs)
def test_reduce_lr_on_plateau_state_dict(self):
scheduler = ReduceLROnPlateau(self.opt, mode='min', factor=0.1, patience=2)
for score in [1.0, 2.0, 3.0, 4.0, 3.0, 4.0, 5.0, 3.0, 2.0, 1.0]:
scheduler.step(score)
scheduler_copy = ReduceLROnPlateau(self.opt, mode='max', factor=0.5, patience=10)
scheduler_copy.load_state_dict(scheduler.state_dict())
for key in scheduler.__dict__.keys():
if key not in {'optimizer', 'is_better'}:
self.assertEqual(scheduler.__dict__[key], scheduler_copy.__dict__[key])
def test_lambda_lr_state_dict_fn(self):
scheduler = LambdaLR(self.opt, lr_lambda=lambda x: x)
state = scheduler.state_dict()
self.assertIsNone(state['lr_lambdas'][0])
scheduler_copy = LambdaLR(self.opt, lr_lambda=lambda x: x)
scheduler_copy.load_state_dict(state)
for key in scheduler.__dict__.keys():
if key not in {'optimizer', 'lr_lambdas'}:
self.assertEqual(scheduler.__dict__[key], scheduler_copy.__dict__[key])
def test_lambda_lr_state_dict_obj(self):
scheduler = LambdaLR(self.opt, lr_lambda=LambdaLRTestObject(10))
state = scheduler.state_dict()
self.assertIsNotNone(state['lr_lambdas'][0])
scheduler_copy = LambdaLR(self.opt, lr_lambda=LambdaLRTestObject(-1))
scheduler_copy.load_state_dict(state)
for key in scheduler.__dict__.keys():
if key not in {'optimizer'}:
self.assertEqual(scheduler.__dict__[key], scheduler_copy.__dict__[key])
def test_CosineAnnealingWarmRestarts_lr_state_dict(self):
self._check_scheduler_state_dict(
lambda: CosineAnnealingWarmRestarts(self.opt, T_0=10, T_mult=2),
lambda: CosineAnnealingWarmRestarts(self.opt, T_0=100))
def test_swa_lr_state_dict(self):
self._check_scheduler_state_dict(
lambda: SWALR(self.opt, anneal_epochs=3, swa_lr=0.5),
lambda: SWALR(self.opt, anneal_epochs=10, anneal_strategy="linear", swa_lr=5.))
def _check_scheduler_state_dict(self, constr, constr2, epochs=10):
scheduler = constr()
for _ in range(epochs):
scheduler.step()
scheduler_copy = constr2()
scheduler_copy.load_state_dict(scheduler.state_dict())
for key in scheduler.__dict__.keys():
if key != 'optimizer':
self.assertEqual(scheduler.__dict__[key], scheduler_copy.__dict__[key])
self.assertEqual(scheduler.get_last_lr(), scheduler_copy.get_last_lr())
def _test_get_last_lr(self, schedulers, targets, epochs=10):
if isinstance(schedulers, _LRScheduler):
schedulers = [schedulers]
for epoch in range(epochs):
result = [scheduler.get_last_lr() for scheduler in schedulers]
[scheduler.step() for scheduler in schedulers]
target = [[t[epoch] for t in targets]] * len(schedulers)
for t, r in zip(target, result):
self.assertEqual(target, result,
msg='LR is wrong in epoch {}: expected {}, got {}'.format(
epoch, t, r), atol=1e-5, rtol=0)
def _test_with_epoch(self, schedulers, targets, epochs=10):
if isinstance(schedulers, _LRScheduler):
schedulers = [schedulers]
for epoch in range(epochs):
[scheduler.step(epoch) for scheduler in schedulers] # step before assert: skip initial lr
for param_group, target in zip(self.opt.param_groups, targets):
self.assertEqual(target[epoch], param_group['lr'],
msg='LR is wrong in epoch {}: expected {}, got {}'.format(
epoch, target[epoch], param_group['lr']), atol=1e-5, rtol=0)
def _test(self, schedulers, targets, epochs=10):
if isinstance(schedulers, _LRScheduler):
schedulers = [schedulers]
for epoch in range(epochs):
for param_group, target in zip(self.opt.param_groups, targets):
self.assertEqual(target[epoch], param_group['lr'],
msg='LR is wrong in epoch {}: expected {}, got {}'.format(
epoch, target[epoch], param_group['lr']), atol=1e-5, rtol=0)
[scheduler.step() for scheduler in schedulers]
def _test_CosineAnnealingWarmRestarts(self, scheduler, targets, epochs=10):
for index, epoch in enumerate(torch.arange(0, epochs, 0.1)):
epoch = round(epoch.item(), 1)
scheduler.step(epoch)
for param_group, target in zip(self.opt.param_groups, targets):
self.assertEqual(target[index], param_group['lr'],
msg='LR is wrong in epoch {}: expected {}, got {}'.format(
epoch, target[index], param_group['lr']), atol=1e-5, rtol=0)
def _test_interleaved_CosineAnnealingWarmRestarts(self, scheduler, targets, epochs):
for index, epoch in enumerate(epochs):
scheduler.step(epoch)
for param_group, target in zip(self.opt.param_groups, targets):
self.assertEqual(target[index], param_group['lr'],
msg='LR is wrong in epoch {}: expected {}, got {}'.format(
epoch, target[index], param_group['lr']), atol=1e-5, rtol=0)
def _test_against_closed_form(self, scheduler, closed_form_scheduler, epochs=10):
self.setUp()
targets = []
for epoch in range(epochs):
closed_form_scheduler.step(epoch)
targets.append([group['lr'] for group in self.opt.param_groups])
self.setUp()
for epoch in range(epochs):
scheduler.step()
for i, param_group in enumerate(self.opt.param_groups):
self.assertEqual(targets[epoch][i], param_group['lr'],
msg='LR is wrong in epoch {}: expected {}, got {}'.format(
epoch, targets[epoch][i], param_group['lr']), atol=1e-5, rtol=0)
def _test_reduce_lr_on_plateau(self, schedulers, targets, metrics, epochs=10, verbose=False):
if isinstance(schedulers, _LRScheduler) or isinstance(schedulers, ReduceLROnPlateau):
schedulers = [schedulers]
for epoch in range(epochs):
for scheduler in schedulers:
if isinstance(scheduler, ReduceLROnPlateau):
scheduler.step(metrics[epoch])
else:
scheduler.step()
if verbose:
print('epoch{}:\tlr={}'.format(epoch, self.opt.param_groups[0]['lr']))
for param_group, target in zip(self.opt.param_groups, targets):
self.assertEqual(target[epoch], param_group['lr'],
msg='LR is wrong in epoch {}: expected {}, got {}'.format(
epoch, target[epoch], param_group['lr']), atol=1e-5, rtol=0)
def _test_cycle_lr(self, scheduler, lr_targets, momentum_targets, batch_iterations, verbose=False, use_beta1=False):
for batch_num in range(batch_iterations):
if verbose:
if 'momentum' in self.opt.param_groups[0].keys():
print('batch{}:\tlr={},momentum={}'.format(batch_num, self.opt.param_groups[0]['lr'],
self.opt.param_groups[0]['momentum']))
elif use_beta1 and 'betas' in self.opt.param_groups[0].keys():
print('batch{}:\tlr={},beta1={}'.format(batch_num, self.opt.param_groups[0]['lr'],
self.opt.param_groups[0]['betas'][0]))
else:
print('batch{}:\tlr={}'.format(batch_num, self.opt.param_groups[0]['lr']))
for param_group, lr_target, momentum_target in zip(self.opt.param_groups, lr_targets, momentum_targets):
self.assertEqual(
lr_target[batch_num], param_group['lr'],
msg='LR is wrong in batch_num {}: expected {}, got {}'.format(
batch_num, lr_target[batch_num], param_group['lr']), atol=1e-5, rtol=0)
if use_beta1 and 'betas' in param_group.keys():
self.assertEqual(
momentum_target[batch_num], param_group['betas'][0],
msg='Beta1 is wrong in batch_num {}: expected {}, got {}'.format(
batch_num, momentum_target[batch_num], param_group['betas'][0]), atol=1e-5, rtol=0)
elif 'momentum' in param_group.keys():
self.assertEqual(
momentum_target[batch_num], param_group['momentum'],
msg='Momentum is wrong in batch_num {}: expected {}, got {}'.format(
batch_num, momentum_target[batch_num], param_group['momentum']), atol=1e-5, rtol=0)
scheduler.step()
def test_cosine_then_cyclic(self):
# https://github.com/pytorch/pytorch/issues/21965
max_lr = 0.3
base_lr = 0.1
optim_lr = 0.5
model = torch.nn.Linear(2, 1)
optimizer = torch.optim.SGD(model.parameters(), lr=optim_lr)
lr_scheduler_1 = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=20, eta_min=0.1)
lr_scheduler_2 = torch.optim.lr_scheduler.CyclicLR(
optimizer, base_lr=base_lr, max_lr=max_lr, step_size_up=1, step_size_down=3
)
for i in range(40):
if i <= lr_scheduler_1.T_max:
lr_scheduler_1.step()
else:
lr_scheduler_2.step()
last_lr = optimizer.param_groups[0]["lr"]
self.assertLessEqual(last_lr, max_lr)
class SWATestDNN(torch.nn.Module):
def __init__(self, input_features):
super(SWATestDNN, self).__init__()
self.n_features = 100
self.fc1 = torch.nn.Linear(input_features, self.n_features)
self.bn = torch.nn.BatchNorm1d(self.n_features)
def compute_preactivation(self, x):
return self.fc1(x)
def forward(self, x):
x = self.fc1(x)
x = self.bn(x)
return x
class SWATestCNN(torch.nn.Module):
def __init__(self, input_channels):
super(SWATestCNN, self).__init__()
self.n_features = 10
self.conv1 = torch.nn.Conv2d(input_channels, self.n_features, kernel_size=3, padding=1)
self.bn = torch.nn.BatchNorm2d(self.n_features, momentum=0.3)
def compute_preactivation(self, x):
return self.conv1(x)
def forward(self, x):
x = self.conv1(x)
x = self.bn(x)
return x
class TestSWAUtils(TestCase):
def _test_averaged_model(self, net_device, swa_device):
dnn = torch.nn.Sequential(
torch.nn.Conv2d(1, 5, kernel_size=3),
torch.nn.ReLU(),
torch.nn.MaxPool2d(kernel_size=2),
torch.nn.BatchNorm2d(5, momentum=0.3),
torch.nn.Conv2d(5, 2, kernel_size=3),
torch.nn.ReLU(),
torch.nn.Linear(5, 5),
torch.nn.ReLU(),
torch.nn.Linear(5, 10)
).to(net_device)
averaged_dnn = AveragedModel(dnn, device=swa_device)
averaged_params = [torch.zeros_like(param) for param in dnn.parameters()]
n_updates = 10
for i in range(n_updates):
for p, p_avg in zip(dnn.parameters(), averaged_params):
p.detach().add_(torch.randn_like(p))
p_avg += p.detach() / n_updates
averaged_dnn.update_parameters(dnn)
for p_avg, p_swa in zip(averaged_params, averaged_dnn.parameters()):
self.assertEqual(p_avg, p_swa)
# Check that AveragedModel is on the correct device
self.assertTrue(p_swa.device == swa_device)
self.assertTrue(p.device == net_device)
self.assertTrue(averaged_dnn.n_averaged.device == swa_device)
def test_averaged_model_all_devices(self):
cpu = torch.device("cpu")
self._test_averaged_model(cpu, cpu)
if torch.cuda.is_available():
cuda = torch.device(0)
self._test_averaged_model(cuda, cpu)
self._test_averaged_model(cpu, cuda)
self._test_averaged_model(cuda, cuda)
def test_averaged_model_mixed_device(self):
if not torch.cuda.is_available():
return
dnn = torch.nn.Sequential(
torch.nn.Conv2d(1, 5, kernel_size=3),
torch.nn.Linear(5, 10)
)
dnn[0].cuda()
dnn[1].cpu()
averaged_dnn = AveragedModel(dnn)
averaged_params = [torch.zeros_like(param) for param in dnn.parameters()]
n_updates = 10
for i in range(n_updates):
for p, p_avg in zip(dnn.parameters(), averaged_params):
p.detach().add_(torch.randn_like(p))
p_avg += p.detach() / n_updates
averaged_dnn.update_parameters(dnn)
for p_avg, p_swa in zip(averaged_params, averaged_dnn.parameters()):
self.assertEqual(p_avg, p_swa)
# Check that AveragedModel is on the correct device
self.assertTrue(p_avg.device == p_swa.device)
def test_averaged_model_state_dict(self):
dnn = torch.nn.Sequential(
torch.nn.Conv2d(1, 5, kernel_size=3),
torch.nn.Linear(5, 10)
)
averaged_dnn = AveragedModel(dnn)
averaged_dnn2 = AveragedModel(dnn)
n_updates = 10
for i in range(n_updates):
for p in dnn.parameters():
p.detach().add_(torch.randn_like(p))
averaged_dnn.update_parameters(dnn)
averaged_dnn2.load_state_dict(averaged_dnn.state_dict())
for p_swa, p_swa2 in zip(averaged_dnn.parameters(), averaged_dnn2.parameters()):
self.assertEqual(p_swa, p_swa2)
self.assertTrue(averaged_dnn.n_averaged == averaged_dnn2.n_averaged)
def test_averaged_model_exponential(self):
# Test AveragedModel with EMA as avg_fn
dnn = torch.nn.Sequential(
torch.nn.Conv2d(1, 5, kernel_size=3),
torch.nn.Linear(5, 10)
)
alpha = 0.9
def avg_fn(p_avg, p, n_avg):
return alpha * p_avg + (1 - alpha) * p
averaged_dnn = AveragedModel(dnn, avg_fn=avg_fn)
averaged_params = [torch.zeros_like(param) for param in dnn.parameters()]
n_updates = 10
for i in range(n_updates):
updated_averaged_params = []
for p, p_avg in zip(dnn.parameters(), averaged_params):
p.detach().add_(torch.randn_like(p))
if i == 0:
updated_averaged_params.append(p.clone())
else:
updated_averaged_params.append((p_avg * alpha +
p * (1 - alpha)).clone())
averaged_dnn.update_parameters(dnn)
averaged_params = updated_averaged_params
for p_avg, p_swa in zip(averaged_params, averaged_dnn.parameters()):
self.assertEqual(p_avg, p_swa)
def _test_update_bn(self, dnn, dl_x, dl_xy, cuda):
preactivation_sum = torch.zeros(dnn.n_features)
preactivation_squared_sum = torch.zeros(dnn.n_features)
if cuda:
preactivation_sum = preactivation_sum.cuda()
preactivation_squared_sum = preactivation_squared_sum.cuda()
total_num = 0
for x in dl_x:
x = x[0]
if cuda:
x = x.cuda()
dnn.forward(x)
preactivations = dnn.compute_preactivation(x)
if len(preactivations.shape) == 4:
preactivations = preactivations.transpose(1, 3)
preactivations = preactivations.contiguous().view(-1, dnn.n_features)
total_num += preactivations.shape[0]
preactivation_sum += torch.sum(preactivations, dim=0)
preactivation_squared_sum += torch.sum(preactivations**2, dim=0)
preactivation_mean = preactivation_sum / total_num
preactivation_var = preactivation_squared_sum / total_num
preactivation_var = preactivation_var - preactivation_mean**2
update_bn(dl_xy, dnn, device=x.device)
self.assertEqual(preactivation_mean, dnn.bn.running_mean)
self.assertEqual(preactivation_var, dnn.bn.running_var, atol=1e-1, rtol=0)
def _reset_bn(module):
if issubclass(module.__class__,
torch.nn.modules.batchnorm._BatchNorm):
module.running_mean = torch.zeros_like(module.running_mean)
module.running_var = torch.ones_like(module.running_var)
# reset batch norm and run update_bn again
dnn.apply(_reset_bn)
update_bn(dl_xy, dnn, device=x.device)
self.assertEqual(preactivation_mean, dnn.bn.running_mean)
self.assertEqual(preactivation_var, dnn.bn.running_var, atol=1e-1, rtol=0)
# using the dl_x loader instead of dl_xy
dnn.apply(_reset_bn)
update_bn(dl_x, dnn, device=x.device)
self.assertEqual(preactivation_mean, dnn.bn.running_mean)
self.assertEqual(preactivation_var, dnn.bn.running_var, atol=1e-1, rtol=0)
def test_update_bn_dnn(self):
# Test update_bn for a fully-connected network with BatchNorm1d
objects, input_features = 100, 5
x = torch.rand(objects, input_features)
y = torch.rand(objects)
ds_x = torch.utils.data.TensorDataset(x)
ds_xy = torch.utils.data.TensorDataset(x, y)
dl_x = torch.utils.data.DataLoader(ds_x, batch_size=5, shuffle=True)
dl_xy = torch.utils.data.DataLoader(ds_xy, batch_size=5, shuffle=True)
dnn = SWATestDNN(input_features=input_features)
dnn.train()
self._test_update_bn(dnn, dl_x, dl_xy, False)
if torch.cuda.is_available():
dnn = SWATestDNN(input_features=input_features)
dnn.train()
self._test_update_bn(dnn.cuda(), dl_x, dl_xy, True)
self.assertTrue(dnn.training)
def test_update_bn_cnn(self):
# Test update_bn for convolutional network and BatchNorm2d
objects = 100
input_channels = 3
height, width = 5, 5
x = torch.rand(objects, input_channels, height, width)
y = torch.rand(objects)
ds_x = torch.utils.data.TensorDataset(x)
ds_xy = torch.utils.data.TensorDataset(x, y)
dl_x = torch.utils.data.DataLoader(ds_x, batch_size=5, shuffle=True)
dl_xy = torch.utils.data.DataLoader(ds_xy, batch_size=5, shuffle=True)
dnn = SWATestCNN(input_channels=input_channels)
dnn.train()
self._test_update_bn(dnn, dl_x, dl_xy, False)
if torch.cuda.is_available():
dnn = SWATestCNN(input_channels=input_channels)
dnn.train()
self._test_update_bn(dnn.cuda(), dl_x, dl_xy, True)
self.assertTrue(dnn.training)
def test_bn_update_eval_momentum(self):
# check that update_bn preserves eval mode
objects = 100
input_channels = 3
height, width = 5, 5
x = torch.rand(objects, input_channels, height, width)
ds_x = torch.utils.data.TensorDataset(x)
dl_x = torch.utils.data.DataLoader(ds_x, batch_size=5, shuffle=True)
dnn = SWATestCNN(input_channels=input_channels)
dnn.eval()
update_bn(dl_x, dnn)
self.assertFalse(dnn.training)
# check that momentum is preserved
self.assertEqual(dnn.bn.momentum, 0.3)
if __name__ == '__main__':
run_tests()
| [
"[email protected]"
] | |
eee85f7fa13c8c9a2568ff0b2c328fcd74c447dd | a3eb732ead7e1d10a85a88e42dc639eb16a40265 | /instagram_api/request/base.py | e326afae24f36536b07fc4d0d96d658ed435c783 | [
"MIT"
] | permissive | carsam2021/instagram_api | 7654c0f485c22935cf478016e46e65acbeda9344 | b53f72db36c505a2eb24ebac1ba8267a0cc295bb | refs/heads/master | 2023-03-16T14:06:27.515432 | 2020-10-17T04:39:19 | 2020-10-17T04:39:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,849 | py | from typing import Dict, List
import json
from instagram_api.interfaces.api_request import ApiRequestInterface
from instagram_api.signatures import Signatures
__all__ = ['CollectionBase']
class CollectionBase:
def __init__(self, ig):
from instagram_api.instagram import Instagram
self._ig: Instagram = ig
@staticmethod
def _paginate_with_exclusion(request: ApiRequestInterface,
exclude_list: List[int],
rank_token: str,
limit: int = 30) -> ApiRequestInterface:
assert Signatures.is_valid_uuid(rank_token), f'`{rank_token}` is not a valid rank token.'
# Что-то тут не так в логике
if not exclude_list:
request.add_params(
count=str(limit),
)
return request.add_params(
count=str(limit),
exclude_list=json.dumps(exclude_list, separators=(',', ':')),
rank_token=rank_token,
)
@staticmethod
def _paginate_with_multi_exclusion(request: ApiRequestInterface,
exclude_groups: Dict[str, List[int]],
rank_token: str,
limit: int = 30) -> ApiRequestInterface:
assert Signatures.is_valid_uuid(rank_token), f'`{rank_token}` is not a valid rank token.'
if not exclude_groups:
request.add_params(
count=str(limit),
)
total_count = 0
for ids in exclude_groups.values():
total_count += len(ids)
return request.add_params(
count=str(limit),
exclude_list=json.dumps(exclude_groups, separators=(',', ':')),
rank_token=rank_token,
)
| [
"[email protected]"
] | |
c378c2866324df13f145039f1ca8f38f447aeb85 | be3920640bbbdb055876f2c1f49c6cc2e81cbab2 | /pyexample/tests/test_viz.py | a4cf5e98a83b2a45624536877e68475621765059 | [
"BSD-3-Clause"
] | permissive | phobson/pyexample | 4993fe8f9b70e5224d09e878f137e94c8c1b4f60 | 133598954b514a80dc0f65c02c8740b626e569d2 | refs/heads/new-branch | 2021-12-02T12:29:32.156032 | 2021-11-24T19:33:21 | 2021-11-24T19:33:21 | 68,402,565 | 0 | 0 | BSD-3-Clause | 2018-01-19T19:44:27 | 2016-09-16T17:55:31 | null | UTF-8 | Python | false | false | 905 | py | import numpy
from matplotlib import pyplot
import pytest
from pyexample import viz
from . import helpers
BASELINE_DIR = 'baseline_images/test_viz'
TOLERANCE = 15
@pytest.fixture
def plot_data():
data = numpy.array([
3.113, 3.606, 4.046, 4.046, 4.710, 6.140, 6.978,
2.000, 4.200, 4.620, 5.570, 5.660, 5.860, 6.650,
6.780, 6.790, 7.500, 7.500, 7.500, 8.630, 8.710,
8.990, 9.850, 10.820, 11.250, 11.250, 12.200, 14.920,
16.770, 17.810, 19.160, 19.190, 19.640, 20.180, 22.970,
])
return data
@pytest.mark.mpl_image_compare(baseline_dir=BASELINE_DIR, tolerance=TOLERANCE)
@helpers.seed
def test_demo_plotting_function(plot_data):
x = numpy.random.uniform(size=len(plot_data))
fig = viz.demo_plotting_function(x, plot_data, ax=None)
assert isinstance(fig, pyplot.Figure)
return fig
| [
"[email protected]"
] | |
2fb5eec5f1b153b97c548b1814a2487ab20fc821 | e2b9f2354c36bd1edfa141d29f60c13ea176c0fe | /2018/blaze/sl0thcoin/smarter/solve.py | a2ba36e35e7368629a46ee89ae5a80409bf445d9 | [] | no_license | Jinmo/ctfs | 236d2c9c5a49d500e80ece4631a22c7fb32c3c3f | d225baef7942250a5ff15a3f2a9b7ad8501c7566 | refs/heads/master | 2021-07-24T15:17:05.489163 | 2021-07-05T16:05:15 | 2021-07-05T16:05:15 | 68,125,231 | 162 | 26 | null | null | null | null | UTF-8 | Python | false | false | 699 | py | #!/usr/bin/python3
from hashlib import sha256
from itertools import permutations
import string
# Just looked EVM assembly for many hours to solve it.
if False:
keys = string.ascii_lowercase
for combs in permutations(keys, 4):
z = sha256(bytearray(combs)).hexdigest()
# x = x[::-1]
if z == hash or z == hash[::-1] or z == bytes.fromhex(hash)[::-1].hex() or z[::-1] == bytes.fromhex(hash)[::-1].hex():
print(combs)
exit()
keys = bytearray(bytes.fromhex("4419194e"))
for i in range(len(keys)):
keys[i] ^= 42
hash = 'a8c8af687609bf404c202ac1378e10cd19421e72c0a161edc56b53752326592a'
prefix = b"flag{mayb3_w3_"
suffix = b"_bett3r_t00ls}"
x = bytearray(prefix + keys + suffix)
print(x)
| [
"[email protected]"
] | |
139a12db6cc98827a46acc6950671f20e7643c71 | 480e33f95eec2e471c563d4c0661784c92396368 | /CondTools/Hcal/test/dbwriteCastorElectronicsMap_cfg.py | b60f9e71df8fabf598fe481066fc52793c1f219b | [
"Apache-2.0"
] | permissive | cms-nanoAOD/cmssw | 4d836e5b76ae5075c232de5e062d286e2026e8bd | 4eccb8a758b605875003124dd55ea58552b86af1 | refs/heads/master-cmsswmaster | 2021-01-23T21:19:52.295420 | 2020-08-27T08:01:20 | 2020-08-27T08:01:20 | 102,867,729 | 7 | 14 | Apache-2.0 | 2022-05-23T07:58:09 | 2017-09-08T14:03:57 | C++ | UTF-8 | Python | false | false | 2,177 | py | import FWCore.ParameterSet.Config as cms
process = cms.Process("TEST")
process.MessageLogger=cms.Service("MessageLogger",
destinations=cms.untracked.vstring("cout"),
cout=cms.untracked.PSet(
threshold=cms.untracked.string("INFO")
)
)
process.load("CondCore.DBCommon.CondDBCommon_cfi")
#process.CondDBCommon.connect = cms.string('sqlite_file:CastorEmap.db')
process.CondDBCommon.DBParameters.authenticationPath = cms.untracked.string('/afs/cern.ch/cms/DB/conddb')
#process.CondDBCommon.connect = cms.string('oracle://cms_orcoff_prep/CMS_COND_31X_HCAL')
process.CondDBCommon.connect = cms.string('oracle://cms_orcon_prod/CMS_COND_31X_HCAL')
process.CondDBCommon.DBParameters.authenticationPath = cms.untracked.string('/nfshome0/popcondev/conddb')
process.source = cms.Source("EmptyIOVSource",
timetype = cms.string('runnumber'),
firstValue = cms.uint64(1),
lastValue = cms.uint64(1),
interval = cms.uint64(1)
)
process.es_ascii = cms.ESSource("CastorTextCalibrations",
input = cms.VPSet(cms.PSet(
object = cms.string('ElectronicsMap'),
file = cms.FileInPath('CondFormats/CastorObjects/data/emap_dcc_nominal_Run121872.txt')
))
)
process.PoolDBOutputService = cms.Service("PoolDBOutputService",
process.CondDBCommon,
timetype = cms.untracked.string('runnumber'),
# logconnect= cms.untracked.string('sqlite_file:log.db'),
#logconnect= cms.untracked.string('oracle://cms_orcoff_prep/CMS_COND_31X_POPCONLOG'),
logconnect= cms.untracked.string('oracle://cms_orcon_prod/CMS_COND_31X_POPCONLOG'),
toPut = cms.VPSet(cms.PSet(
record = cms.string('CastorElectronicsMapRcd'),
tag = cms.string('CastorElectronicsMap_v2.01_mc')
))
)
process.mytest = cms.EDAnalyzer("CastorElectronicsMapPopConAnalyzer",
record = cms.string('CastorElectronicsMapRcd'),
loggingOn= cms.untracked.bool(True),
SinceAppendMode=cms.bool(True),
Source=cms.PSet(
# firstSince=cms.untracked.double(300)
IOVRun=cms.untracked.uint32(1)
)
)
process.p = cms.Path(process.mytest)
| [
"[email protected]"
] | |
f6cbef75142bef6fca11beffb23c99d4b87e2dcb | 6a95b330e1beec08b917ff45eccfd6be3fd4629f | /kubernetes/client/models/extensions_v1beta1_scale_status.py | 2abdb17853310a267b8ff6d08d3f314a58e3068e | [
"Apache-2.0"
] | permissive | TokkoLabs/client-python | f4a83d6540e64861b59e322c951380a670578d7f | f1ad9c6889105d8510472606c98f8d3807f82020 | refs/heads/master | 2023-07-14T01:36:46.152341 | 2017-12-21T21:32:11 | 2017-12-21T21:32:11 | 115,042,671 | 0 | 0 | Apache-2.0 | 2021-08-06T03:29:17 | 2017-12-21T20:05:15 | Python | UTF-8 | Python | false | false | 6,162 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class ExtensionsV1beta1ScaleStatus(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'replicas': 'int',
'selector': 'dict(str, str)',
'target_selector': 'str'
}
attribute_map = {
'replicas': 'replicas',
'selector': 'selector',
'target_selector': 'targetSelector'
}
def __init__(self, replicas=None, selector=None, target_selector=None):
"""
ExtensionsV1beta1ScaleStatus - a model defined in Swagger
"""
self._replicas = None
self._selector = None
self._target_selector = None
self.discriminator = None
self.replicas = replicas
if selector is not None:
self.selector = selector
if target_selector is not None:
self.target_selector = target_selector
@property
def replicas(self):
"""
Gets the replicas of this ExtensionsV1beta1ScaleStatus.
actual number of observed instances of the scaled object.
:return: The replicas of this ExtensionsV1beta1ScaleStatus.
:rtype: int
"""
return self._replicas
@replicas.setter
def replicas(self, replicas):
"""
Sets the replicas of this ExtensionsV1beta1ScaleStatus.
actual number of observed instances of the scaled object.
:param replicas: The replicas of this ExtensionsV1beta1ScaleStatus.
:type: int
"""
if replicas is None:
raise ValueError("Invalid value for `replicas`, must not be `None`")
self._replicas = replicas
@property
def selector(self):
"""
Gets the selector of this ExtensionsV1beta1ScaleStatus.
label query over pods that should match the replicas count. More info: http://kubernetes.io/docs/user-guide/labels#label-selectors
:return: The selector of this ExtensionsV1beta1ScaleStatus.
:rtype: dict(str, str)
"""
return self._selector
@selector.setter
def selector(self, selector):
"""
Sets the selector of this ExtensionsV1beta1ScaleStatus.
label query over pods that should match the replicas count. More info: http://kubernetes.io/docs/user-guide/labels#label-selectors
:param selector: The selector of this ExtensionsV1beta1ScaleStatus.
:type: dict(str, str)
"""
self._selector = selector
@property
def target_selector(self):
"""
Gets the target_selector of this ExtensionsV1beta1ScaleStatus.
label selector for pods that should match the replicas count. This is a serializated version of both map-based and more expressive set-based selectors. This is done to avoid introspection in the clients. The string will be in the same format as the query-param syntax. If the target type only supports map-based selectors, both this field and map-based selector field are populated. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors
:return: The target_selector of this ExtensionsV1beta1ScaleStatus.
:rtype: str
"""
return self._target_selector
@target_selector.setter
def target_selector(self, target_selector):
"""
Sets the target_selector of this ExtensionsV1beta1ScaleStatus.
label selector for pods that should match the replicas count. This is a serializated version of both map-based and more expressive set-based selectors. This is done to avoid introspection in the clients. The string will be in the same format as the query-param syntax. If the target type only supports map-based selectors, both this field and map-based selector field are populated. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors
:param target_selector: The target_selector of this ExtensionsV1beta1ScaleStatus.
:type: str
"""
self._target_selector = target_selector
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, ExtensionsV1beta1ScaleStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
] | |
25d837ad1c315cf0467adfe2ff6b3178b0ee48a4 | c50e7eb190802d7849c0d0cea02fb4d2f0021777 | /src/k8s-extension/azext_k8s_extension/tests/latest/MockClasses.py | 8a6313c9ce63728ba80823afc1570193e5138272 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | Azure/azure-cli-extensions | c1615b19930bba7166c282918f166cd40ff6609c | b8c2cf97e991adf0c0a207d810316b8f4686dc29 | refs/heads/main | 2023-08-24T12:40:15.528432 | 2023-08-24T09:17:25 | 2023-08-24T09:17:25 | 106,580,024 | 336 | 1,226 | MIT | 2023-09-14T10:48:57 | 2017-10-11T16:27:31 | Python | UTF-8 | Python | false | false | 711 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core import AzCommandsLoader
class MockCommand:
def __init__(self):
self.cli_ctx = MockCLIContext()
class MockCLIContext:
def __init__(self):
self.cloud = MockCloud()
class MockCloud:
def __init__(self):
self.endpoints = Endpoints()
class Endpoints:
def __init__(self):
self.resource_manager = "" | [
"[email protected]"
] | |
d4904ca21f745e69b4cc262d2ecc00fba7d06012 | 3d19e1a316de4d6d96471c64332fff7acfaf1308 | /Users/A/anoukmpg/basic_twitter_scraperchavez_1.py | d364c9eb557ece039635cdb7b4d9fdd75b2b9d7f | [] | no_license | BerilBBJ/scraperwiki-scraper-vault | 4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc | 65ea6a943cc348a9caf3782b900b36446f7e137d | refs/heads/master | 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,420 | py | ###################################################################################
# Twitter scraper - designed to be forked and used for more interesting things
###################################################################################
import scraperwiki
import simplejson
import urllib2
# Change QUERY to your search term of choice.
# Examples: 'newsnight', 'from:bbcnewsnight', 'to:bbcnewsnight'
QUERY = 'project x haren'
RESULTS_PER_PAGE = '100'
NUM_PAGES = 1000000
for page in range(1, NUM_PAGES+1):
base_url = 'http://search.twitter.com/search.json?q=%s&rpp=%s&page=%s' \
% (urllib2.quote(QUERY), RESULTS_PER_PAGE, page)
try:
results_json = simplejson.loads(scraperwiki.scrape(base_url))
for result in results_json['results']:
#print result
data = {}
data['id'] = result['id']
data['text'] = result['text']
data['from_user'] = result['from_user']
data['created_at'] = result['created_at']
print data['from_user'], data['text']
scraperwiki.sqlite.save(["id"], data)
except:
print 'Oh dear, failed to scrape %s' % base_url
break
###################################################################################
# Twitter scraper - designed to be forked and used for more interesting things
###################################################################################
import scraperwiki
import simplejson
import urllib2
# Change QUERY to your search term of choice.
# Examples: 'newsnight', 'from:bbcnewsnight', 'to:bbcnewsnight'
QUERY = 'project x haren'
RESULTS_PER_PAGE = '100'
NUM_PAGES = 1000000
for page in range(1, NUM_PAGES+1):
base_url = 'http://search.twitter.com/search.json?q=%s&rpp=%s&page=%s' \
% (urllib2.quote(QUERY), RESULTS_PER_PAGE, page)
try:
results_json = simplejson.loads(scraperwiki.scrape(base_url))
for result in results_json['results']:
#print result
data = {}
data['id'] = result['id']
data['text'] = result['text']
data['from_user'] = result['from_user']
data['created_at'] = result['created_at']
print data['from_user'], data['text']
scraperwiki.sqlite.save(["id"], data)
except:
print 'Oh dear, failed to scrape %s' % base_url
break
| [
"[email protected]"
] | |
901f3c3dd2303e61d8c714f457a19c57173a0f9b | 0931696940fc79c4562c63db72c6cabfcb20884d | /Functions/Loading_Bar.py | 6924207e641d4053b76d424b3c26fb97beb5c9b1 | [] | no_license | ivklisurova/SoftUni_Fundamentals_module | f847b9de9955c8c5bcc057bb38d57162addd6ad8 | 69242f94977c72005f04da78243a5113e79d6c33 | refs/heads/master | 2021-12-01T01:56:22.067928 | 2021-11-08T17:07:31 | 2021-11-08T17:07:31 | 253,281,893 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 523 | py | number = int(input())
def loading(num):
loading_bar = []
x = num // 10
y = ((100 - num) // 10)
z = '[' + x * '%' + y * '.' + ']'
loading_bar.append(z)
if (number // 10) == 10:
loading_bar.insert(0, '100% Complete!')
print(loading_bar[0])
print(loading_bar[1])
else:
loading_bar.insert(0, f'{num}%')
loading_bar.append('Still loading...')
print(' ' .join(loading_bar[0:2]))
print(loading_bar[2])
return loading_bar
loading(number)
| [
"[email protected]"
] | |
3555ae78bbf5408ef0a69abe52dedbabad3a0cdf | 57ae5bfbb24ba5fec90c9b3ecf1e15f06f1546ee | /7-5-1.py | c11af7c98b341df6cf8ce912c9b84b4464523aac | [] | no_license | cjjhust/python_datastucture | f9104f1d5cae2df6436f56d4d3eaa212e6045943 | a222bbe4bfc145ee11f73676c8f033f451d45f78 | refs/heads/master | 2022-07-03T19:53:07.066387 | 2020-05-15T08:06:21 | 2020-05-15T08:06:21 | 260,272,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 588 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Nov 10 17:36:36 2018
@author: CJJ
"""
import random
def getRandomM(a,n,m):
if a==None or n<=0 or n<m:
print "参数不合理"
return
i=0
while i<m:
j=random.randint(i,n-1) # // 获取i到n-1间的随机数
# 随机选出的元素放到数组的前面
tmp=a[i]
a[i]=a[j]
a[j]=tmp
i +=1
if __name__=="__main__":
a= [1, 2, 3, 4, 5, 6, 7, 8, 9,10 ]
n = 10
m = 6
getRandomM(a, n, m)
i=0
while i<m:
print a[i],
i +=1
| [
"[email protected]"
] | |
d8c1bee039b5b2874724ca9a9ee2b2dd01e43952 | 7da433fc52a167a1e9b593eda1d1ee9eee03ccf1 | /2019-prog-labooefeningen-forrestjan/week8/week08_test_bestanden_klassen/test_spelers.py | d18fe5544310a4f2dedbacc858045acbeab0d460 | [] | no_license | forrestjan/Labos-MCT-19-20 | 69cb26d2f1584a54db32750037dcb900a65a0ae6 | c9392cf0cbd9ad6e5974140060b9d0beaf0a202f | refs/heads/main | 2022-12-30T14:49:35.461246 | 2020-10-14T11:59:16 | 2020-10-14T11:59:16 | 304,000,514 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,207 | py | from model.Speler import Speler
from model.Geboortedatum import Geboortedatum
def test_oef1():
# aanspreken van een public static variabele (attribute)
Speler.naam_ploeg = "Rode duivels"
sp1 = Speler("Thibault", "Cortous", "keeper", 8, 0)
sp2 = Speler("Vincent", "Kompany", "aanvaller", 8, 3)
# par 4 en 5 worden niet opgegeven, zie default par in classe __init__()
sp3 = Speler("Axel", "Witsel", "aanvaller")
print(sp1)
print(sp2)
print(sp3)
print("\nVincent scoort!")
sp2.maak_doelpunt()
print(sp2)
print("\nAxel scoort!")
sp3.maak_doelpunt()
print(sp3)
print(
f"Het doelpunten saldo van { Speler.naam_ploeg } is { Speler.get_doelpunten_saldo_ploeg()}")
# test_oef1()
def test_spelers_oef3():
sp1 = Speler("Thibault", "Cortous", "keeper",
8, 0, Geboortedatum(11, 5, 1992))
sp2 = Speler("Vincent", "Kompany", "aanvaller",
8, 3, Geboortedatum(10, 4, 1986))
sp3 = Speler("Axel", "Witsel", "aanvaller")
print("\nDe geboortedata van de spelers zijn:")
for speler in [sp1, sp2, sp3]:
print(f"{speler} -> gebootedatum: {speler.geboortedatum}")
test_spelers_oef3()
| [
"[email protected]"
] | |
69ef8c6d7beb126718ee82bda6f72258a06cd3ad | 32226e72c8cbaa734b2bdee081c2a2d4d0322702 | /railrl/data_management/env_replay_buffer.py | 781568e5f90fa8db7cff99c76ab6480aa1591e56 | [
"MIT"
] | permissive | Asap7772/rail-rl-franka-eval | 2b1cbad7adae958b3b53930a837df8a31ab885dc | 4bf99072376828193d05b53cf83c7e8f4efbd3ba | refs/heads/master | 2022-11-15T07:08:33.416025 | 2020-07-12T22:05:32 | 2020-07-12T22:05:32 | 279,155,722 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,705 | py | from gym.spaces import Discrete
from railrl.data_management.simple_replay_buffer import SimpleReplayBuffer
from railrl.envs.env_utils import get_dim
import numpy as np
class EnvReplayBuffer(SimpleReplayBuffer):
def __init__(
self,
max_replay_buffer_size,
env,
env_info_sizes=None
):
"""
:param max_replay_buffer_size:
:param env:
"""
self.env = env
self._ob_space = env.observation_space
self._action_space = env.action_space
if env_info_sizes is None:
if hasattr(env, 'info_sizes'):
env_info_sizes = env.info_sizes
else:
env_info_sizes = dict()
super().__init__(
max_replay_buffer_size=max_replay_buffer_size,
observation_dim=get_dim(self._ob_space),
action_dim=get_dim(self._action_space),
env_info_sizes=env_info_sizes
)
def add_sample(self, observation, action, reward, terminal,
next_observation, **kwargs):
if isinstance(self._action_space, Discrete):
new_action = np.zeros(self._action_dim)
new_action[action] = 1
else:
new_action = action
return super().add_sample(
observation=observation,
action=new_action,
reward=reward,
next_observation=next_observation,
terminal=terminal,
**kwargs
)
class VPGEnvReplayBuffer(EnvReplayBuffer):
def __init__(
self,
max_replay_buffer_size,
env,
discount_factor,
):
super().__init__(max_replay_buffer_size, env)
self._returns = np.zeros((max_replay_buffer_size, 1))
self.current_trajectory_rewards = np.zeros((max_replay_buffer_size, 1))
self._max_replay_buffer_size = max_replay_buffer_size
self.discount_factor = discount_factor
self._bottom = 0
def terminate_episode(self):
returns = []
return_so_far = 0
for t in range(len(self._rewards[self._bottom:self._top]) - 1, -1, -1):
return_so_far = self._rewards[t][0] + self.discount_factor * return_so_far
returns.append(return_so_far)
returns = returns[::-1]
returns = np.reshape(np.array(returns),(len(returns), 1))
self._returns[self._bottom:self._top] = returns
self._bottom = self._top
def add_sample(self, observation, action, reward, terminal,
next_observation, **kwargs):
if self._top == self._max_replay_buffer_size:
raise EnvironmentError('Replay Buffer Overflow, please reduce the number of samples added!')
super().add_sample(observation, action, reward, terminal, next_observation, **kwargs)
def get_training_data(self):
batch= dict(
observations=self._observations[0:self._top],
actions=self._actions[0:self._top],
rewards=self._rewards[0:self._top],
terminals=self._terminals[0:self._top],
next_observations=self._next_obs[0:self._top],
returns = self._returns[0:self._top],
)
return batch
def empty_buffer(self):
self._observations = np.zeros(self._observations.shape)
self._next_obs = np.zeros(self._next_obs.shape)
self._actions = np.zeros(self._actions.shape)
self._rewards = np.zeros(self._rewards.shape)
self._terminals = np.zeros(self._terminals.shape, dtype='uint8')
self._returns = np.zeros(self._returns.shape)
self._size = 0
self._top = 0
self._bottom = 0
| [
"[email protected]"
] | |
ee47403e642fefd6b1e6f6c62477a24a6e0ce22c | dfdb672bbe3b45175806928d7688a5924fc45fee | /Learn Python the Hard Way Exercises/ex41.py | 3abb331053e8c7304b85e45517c62a437d126264 | [] | no_license | mathans1695/Python-Practice | bd567b5210a4d9bcd830607627293d64b4baa909 | 3a8fabf14bc65b8fe973488503f12fac224a44ed | refs/heads/master | 2023-01-01T13:49:05.789809 | 2020-10-26T02:37:05 | 2020-10-26T02:37:05 | 306,300,672 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,972 | py | import random
from urllib import urlopen
import sys
WORD_URL = "http://learncodethehardway.org/words.txt"
WORDS = []
PHRASES = {
"class %%%(%%%):":
"Make a class named %%% that is-a %%%.",
"class %%%(object):\n\tdef __init__(self, ***)":
"class %%% has-a __init__ that takes self and *** parameters.",
"class %%%(object):\n\tdef ***(self, @@@)":
"class %%% has-a function named *** that takes self and @@@ parameters.",
"*** = %%%()":
"Set *** to an instance of class %%%.",
"***.***(@@@)":
"From *** get the *** function, and call it with parameters self, @@@.",
"***.*** = '***'":
"From *** get the *** attribute and set it to '***'."
}
# do they want to drill phrases first
PHRASE_FIRST = False
if len(sys.argv) == 2 and sys.argv[1] == "english":
PHRASE_FIRST = True
# load up the words from the website
for word in urlopen(WORD_URL).readlines():
WORDS.append(word.strip())
def convert(snippet, phrase):
class_names = [w.capitalize() for w in random.sample(WORDS, snippet.count("%%%"))]
results = []
param_names = []
for i in range(0, snippet.count("@@@")):
param_count = random.randint(1,3)
param_names.append(', '.join(random.sample(WORDS, param_count)))
for sentence in snippet, phrase:
result = sentence[:]
# fake class names
for word in class_names:
result = result.replace("%%%", word, 1)
# fake other names
for word in other_names:
result = result.replace("***", word, 1)
# fake parameter lists
for word in param_names:
result = result.replace("@@@", word, 1)
results.append(result)
return results
# keep going until they hit CTRL- D
try:
while True:
snippets = PHRASES.keys()
random.shuffle(snippets)
for snippet in snippets:
phrase = PHRASES[snippet]
question, answer = convert(snippet, phrase)
if PHRASE_FIRST:
question, answer = answer, question
print question
raw_input("> ")
print "ANSWER: %s\n\n" % answer
except EOFError:
print "\nBye" | [
"[email protected]"
] | |
a9af131a9d4b68580fd5afc8d61703d0b57ce1d8 | e42a61b7be7ec3412e5cea0ffe9f6e9f34d4bf8d | /a10sdk/core/A10_import/import_periodic_ssl_cert.py | 1a4f81061a75db8ac18bba2be452cb33eb364dc8 | [
"Apache-2.0"
] | permissive | amwelch/a10sdk-python | 4179565afdc76cdec3601c2715a79479b3225aef | 3e6d88c65bd1a2bf63917d14be58d782e06814e6 | refs/heads/master | 2021-01-20T23:17:07.270210 | 2015-08-13T17:53:23 | 2015-08-13T17:53:23 | 40,673,499 | 0 | 0 | null | 2015-08-13T17:51:35 | 2015-08-13T17:51:34 | null | UTF-8 | Python | false | false | 2,409 | py | from a10sdk.common.A10BaseClass import A10BaseClass
class SslCert(A10BaseClass):
"""Class Description::
SSL Cert File(enter bulk when import an archive file).
Class ssl-cert supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param pfx_password: {"description": "The password for certificate file (pfx type only)", "format": "string", "minLength": 1, "optional": true, "maxLength": 128, "type": "string"}
:param csr_generate: {"default": 0, "optional": true, "type": "number", "description": "Generate CSR file", "format": "flag"}
:param remote_file: {"optional": true, "type": "string", "description": "profile name for remote url", "format": "url"}
:param use_mgmt_port: {"default": 0, "optional": true, "type": "number", "description": "Use management port as source port", "format": "flag"}
:param period: {"description": "Specify the period in second", "format": "number", "type": "number", "maximum": 31536000, "minimum": 60, "optional": true}
:param certificate_type: {"optional": true, "enum": ["pem", "der", "pfx", "p7b"], "type": "string", "description": "'pem': pem; 'der': der; 'pfx': pfx; 'p7b': p7b; ", "format": "enum"}
:param ssl_cert: {"description": "SSL Cert File(enter bulk when import an archive file)", "format": "string", "minLength": 1, "optional": false, "maxLength": 255, "type": "string"}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/import-periodic/ssl-cert/{ssl_cert}`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required = [ "ssl_cert"]
self.b_key = "ssl-cert"
self.a10_url="/axapi/v3/import-periodic/ssl-cert/{ssl_cert}"
self.DeviceProxy = ""
self.pfx_password = ""
self.csr_generate = ""
self.remote_file = ""
self.use_mgmt_port = ""
self.period = ""
self.certificate_type = ""
self.ssl_cert = ""
self.uuid = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
| [
"[email protected]"
] | |
3505d9a921cb2f8e0d01d19c363eb3e875fa5f8c | 1a83ce28cf596558bd0d8280086e27bc48d0a500 | /src/command_modules/azure-cli-iotcentral/setup.py | c771f17dc15992b5221cf74250e4dc4c4ef6371c | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | willerLiu/azure-cli | 70d8c5fa4e00c0dd3fb3bc90434d613346a03bd2 | 628ba933b954d41ad42f5c938b0f2cac55f94be2 | refs/heads/master | 2020-04-10T15:08:55.939172 | 2018-11-29T21:32:10 | 2018-12-04T22:09:38 | 161,098,793 | 0 | 0 | NOASSERTION | 2018-12-10T01:18:57 | 2018-12-10T01:18:56 | null | UTF-8 | Python | false | false | 1,988 | py | #!/usr/bin/env python
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from codecs import open
from setuptools import setup
try:
from azure_bdist_wheel import cmdclass
except ImportError:
from distutils import log as logger
logger.warn("Wheel is not available, disabling bdist_wheel hook")
cmdclass = {}
VERSION = "0.1.4"
# The full list of classifiers is available at
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
]
DEPENDENCIES = [
'azure-cli-core',
'azure-mgmt-iotcentral==1.0.0'
]
with open('README.rst', 'r', encoding='utf-8') as f:
README = f.read()
with open('HISTORY.rst', 'r', encoding='utf-8') as f:
HISTORY = f.read()
setup(
name='azure-cli-iotcentral',
version=VERSION,
description='Microsoft Azure Command-Line Tools IoT Central Command Module',
long_description=README + '\n\n' + HISTORY,
license='MIT',
author='Microsoft Corporation',
author_email='[email protected]',
url='https://github.com/Azure/azure-cli',
classifiers=CLASSIFIERS,
packages=[
'azure',
'azure.cli',
'azure.cli.command_modules',
'azure.cli.command_modules.iotcentral'
],
install_requires=DEPENDENCIES,
cmdclass=cmdclass
)
| [
"[email protected]"
] | |
30baca899c2b35d673ab60b7bd8c885b3d9cb6e7 | 94dde46196ec93704367d4b3dae3a8ec700e2fd7 | /examples/button.py | c475fae2f499f877cdb74c51fe1181576328600e | [
"LicenseRef-scancode-other-permissive",
"BSD-3-Clause"
] | permissive | saghul/python-asiri | 5200697ddb99471ff7daba415351c23430de9791 | 1060fa6805fe52f348fe1b33ebcfa5814b0fec26 | refs/heads/master | 2021-01-01T18:47:44.947924 | 2013-10-28T22:39:55 | 2013-10-28T22:39:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py |
from __future__ import print_function
from asiri import GPIO
from time import sleep
BTN = 9
gpio = GPIO(num_gpios=16)
gpio.setup(BTN, GPIO.IN)
try:
prev_input = 0
while True:
input = gpio.input(BTN)
if not prev_input and input:
print("Button pressed!")
prev_input = input
sleep(0.05)
except KeyboardInterrupt:
pass
finally:
gpio.cleanup()
| [
"[email protected]"
] | |
9d43360e38253f6483d86bd55776635339f42e08 | 8707f9244fcb0f34901a9ff79683c4f0fe883d20 | /neo/test/iotest/test_spikeglxio.py | b77ec7accc6cfe3393c66907495c3963ad470306 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | teogale/python-neo | cd9b93d1fdbf99848684fbdedd46ced0dabd5feb | cd4226ddcfbace080c4734f562f706423979f2dc | refs/heads/master | 2021-06-21T20:38:06.781570 | 2021-05-21T16:28:46 | 2021-05-21T16:28:46 | 195,394,638 | 0 | 0 | BSD-3-Clause | 2020-07-20T11:45:16 | 2019-07-05T10:59:36 | Python | UTF-8 | Python | false | false | 388 | py | """
Tests of neo.io.spikeglxio
"""
import unittest
from neo.io import SpikeGLXIO
from neo.test.iotest.common_io_test import BaseTestIO
class TestSpikeGLXIO(BaseTestIO, unittest.TestCase):
ioclass = SpikeGLXIO
entities_to_download = [
'spikeglx'
]
entities_to_test = [
'spikeglx/Noise4Sam_g0'
]
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
155f07aadc216c4e1e5c876307b9a82734fe9ed3 | 0add7953d3e3ce2df9e8265102be39b758579753 | /built-in/TensorFlow/Research/reinforcement-learning/DQN_for_TensorFlow/rl/xt/model/dqn/rainbow_network_cnn.py | 9406c8f1e66f4627c29bc2808b78f94f19b43391 | [
"Apache-2.0"
] | permissive | Huawei-Ascend/modelzoo | ae161c0b4e581f8b62c77251e9204d958c4cf6c4 | df51ed9c1d6dbde1deef63f2a037a369f8554406 | refs/heads/master | 2023-04-08T08:17:40.058206 | 2020-12-07T08:04:57 | 2020-12-07T08:04:57 | 319,219,518 | 1 | 1 | Apache-2.0 | 2023-03-24T22:22:00 | 2020-12-07T06:01:32 | Python | UTF-8 | Python | false | false | 3,107 | py | """
@Author: Jack Qian
@license : Copyright(C), Huawei
"""
from __future__ import absolute_import, division, print_function
import tensorflow as tf
from xt.model.tf_compat import Dense, Input, Conv2D, \
Model, Adam, Lambda, Flatten, K
from xt.model.dqn.default_config import LR
from xt.model import XTModel
from xt.util.common import import_config
from xt.framework.register import Registers
@Registers.model.register
class RainbowNetworkCnn(XTModel):
"""docstring for ."""
def __init__(self, model_info):
model_config = model_info.get('model_config', None)
import_config(globals(), model_config)
self.state_dim = model_info['state_dim']
self.action_dim = model_info['action_dim']
self.learning_rate = LR
self.atoms = 51
super(RainbowNetworkCnn, self).__init__(model_info)
def create_model(self, model_info):
"""create keras model"""
state = Input(shape=self.state_dim, name='state_input')
action = Input(shape=(2, ), name='action', dtype='int32')
target_p = Input(shape=(self.atoms, ), name="target_p")
convlayer = Conv2D(32, (8, 8), strides=(4, 4), activation='relu', padding='same')(state)
convlayer = Conv2D(64, (4, 4), strides=(2, 2), activation='relu', padding='same')(convlayer)
convlayer = Conv2D(64, (3, 3), strides=(1, 1), activation='relu', padding='same')(convlayer)
flattenlayer = Flatten()(convlayer)
denselayer = Dense(512, activation='relu')(flattenlayer)
value = Dense(1, activation=None)(denselayer)
denselayer = Dense(512, activation='relu')(flattenlayer)
atom = Dense(self.action_dim * self.atoms, activation=None)(denselayer)
mean = Lambda(lambda x: tf.subtract(
tf.reshape(x, [-1, self.action_dim, self.atoms]),
tf.reduce_mean(tf.reshape(x, [-1, self.action_dim, self.atoms]), axis=1, keep_dims=True)))(atom)
value = Lambda(lambda x: tf.add(tf.expand_dims(x[0], 1), x[1]))([value, mean])
#prob = Lambda(lambda x: tf.nn.softmax(x), name="output")(value)
#pylint error Lambda may not be necessary
prob = tf.nn.softmax(value, name="output")
model = Model(inputs=[state, action, target_p], outputs=prob)
adam = Adam(lr=self.learning_rate, clipnorm=10.)
model.compile(loss=[dist_dqn_loss(action=action, target_p=target_p)], optimizer=adam)
return model
def train(self, state, label):
with self.graph.as_default():
K.set_session(self.sess)
# print(type(state[2][0][0]))
loss = self.model.fit(x={
'state_input': state[0],
'action': state[1],
'target_p': state[2]
},
y={"output": label},
verbose=0)
return loss
def dist_dqn_loss(action, target_p):
"""loss for rainbow"""
def loss(y_true, y_pred):
y_pred = tf.gather_nd(y_pred, action)
return -K.mean(target_p * K.log((y_pred + 1e-10)))
return loss
| [
"[email protected]"
] | |
7f1bf9f84ba0a732878a0df6d12969923da3f860 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03417/s820557335.py | 781e7a91ed400cdd9fa97625e372b4993251ae0e | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 288 | py | N, M = map(int, input().split())
N, M = min(N,M), max(N,M)
ans = [[1], [2, 0], [3, 0, 9], [2, 0, 6, 4]]
if M <= 4:
print(ans[M-1][N-1])
exit()
if N == 1:
print(M-2)
exit()
if N == 2:
print(0)
exit()
if N == 3:
print(N*M - 6)
exit()
print(4 + (N-4 + M-4)*2 + (N-4)*(M-4)) | [
"[email protected]"
] | |
d9c2578c85a6972b534c9c7bb4f968d53c437283 | c3c31ce9a8822ac0352475934f5b3fbdacac62a1 | /ssseg/cfgs/setr/cfgs_ade20k_vitlargepup.py | f48eba513dac55967b0f6bcdfe89c5ae16221369 | [
"MIT"
] | permissive | guofenggitlearning/sssegmentation | 1e51b5b14bff3b5ad0d469ac98d711adb79cef11 | 7a405b1a4949606deae067223ebd68cceec6b225 | refs/heads/main | 2023-08-28T03:26:39.204259 | 2021-11-03T00:51:30 | 2021-11-03T00:51:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,896 | py | '''define the config file for ade20k and ViT-Large'''
import os
from .base_cfg import *
# modify dataset config
DATASET_CFG = DATASET_CFG.copy()
DATASET_CFG.update({
'type': 'ade20k',
'rootdir': os.path.join(os.getcwd(), 'ADE20k'),
})
# modify dataloader config
DATALOADER_CFG = DATALOADER_CFG.copy()
# modify optimizer config
OPTIMIZER_CFG = OPTIMIZER_CFG.copy()
OPTIMIZER_CFG.update(
{
'max_epochs': 130
}
)
# modify losses config
LOSSES_CFG = LOSSES_CFG.copy()
# modify model config
MODEL_CFG = MODEL_CFG.copy()
MODEL_CFG.update(
{
'num_classes': 150,
'backbone': {
'type': 'jx_vit_large_p16_384',
'series': 'vit',
'img_size': (512, 512),
'drop_rate': 0.,
'out_indices': (9, 14, 19, 23),
'norm_cfg': {'type': 'layernorm', 'opts': {'eps': 1e-6}},
'pretrained': True,
'selected_indices': (0, 1, 2, 3),
},
'auxiliary': [
{'in_channels': 1024, 'out_channels': 256, 'dropout': 0, 'num_convs': 2, 'scale_factor': 4, 'kernel_size': 3},
{'in_channels': 1024, 'out_channels': 256, 'dropout': 0, 'num_convs': 2, 'scale_factor': 4, 'kernel_size': 3},
{'in_channels': 1024, 'out_channels': 256, 'dropout': 0, 'num_convs': 2, 'scale_factor': 4, 'kernel_size': 3},
],
}
)
# modify inference config
INFERENCE_CFG = INFERENCE_CFG.copy()
# modify common config
COMMON_CFG = COMMON_CFG.copy()
COMMON_CFG['train'].update(
{
'backupdir': 'setrpup_vitlarge_ade20k_train',
'logfilepath': 'setrpup_vitlarge_ade20k_train/train.log',
}
)
COMMON_CFG['test'].update(
{
'backupdir': 'setrpup_vitlarge_ade20k_test',
'logfilepath': 'setrpup_vitlarge_ade20k_test/test.log',
'resultsavepath': 'setrpup_vitlarge_ade20k_test/setrpup_vitlarge_ade20k_results.pkl'
}
) | [
"[email protected]"
] | |
2bfbc58965d9d248a69f51634025236919cf1d9e | debffca14a39dbeaf6af2f1b73ea530913e2cdad | /astromodels/sources/point_source.py | 49a84aa715727218eee22eced99200b679a5a506 | [
"BSD-3-Clause"
] | permissive | BjoernBiltzinger/astromodels | 6986695abfc4510a62254854fd0977b1e96e192f | d94a3d3bc607def2b5e3cd145c3922e0a00a7b15 | refs/heads/master | 2022-11-03T19:28:16.949036 | 2019-03-12T17:05:59 | 2019-03-12T17:05:59 | 175,420,543 | 0 | 0 | BSD-3-Clause | 2019-03-13T12:53:03 | 2019-03-13T12:53:03 | null | UTF-8 | Python | false | false | 8,896 | py | import collections
import astropy.units as u
import numpy
import scipy.integrate
from astromodels.core.sky_direction import SkyDirection
from astromodels.core.spectral_component import SpectralComponent
from astromodels.core.tree import Node
from astromodels.core.units import get_units
from astromodels.sources.source import Source, POINT_SOURCE
from astromodels.utils.pretty_list import dict_to_list
from astromodels.core.memoization import use_astromodels_memoization
__author__ = 'giacomov'
__all__ = ["PointSource"]
class PointSource(Source, Node):
"""
A point source. You can instance this class in many ways.
- with Equatorial position and a function as spectrum (the component will be automatically called 'main')::
>>> from astromodels import *
>>> point_source = PointSource('my_source', 125.6, -75.3, Powerlaw())
- with Galactic position and a function as spectrum (the component will be automatically called 'main')::
>>> point_source = PointSource('my_source', l=15.67, b=80.75, spectral_shape=Powerlaw())
- with Equatorial position or Galactic position and a list of spectral components::
>>> c1 = SpectralComponent("component1", Powerlaw())
>>> c2 = SpectralComponent("component2", Powerlaw())
>>> point_source = PointSource("test_source",125.6, -75.3,components=[c1,c2])
Or with Galactic position:
>>> point_source = PointSource("test_source",l=15.67, b=80.75,components=[c1,c2])
NOTE: by default the position of the source is fixed (i.e., its positional parameters are fixed)
:param source_name: name for the source
:param ra: Equatorial J2000 Right Ascension (ICRS)
:param dec: Equatorial J2000 Declination (ICRS)
:param spectral_shape: a 1d function representing the spectral shape of the source
:param l: Galactic latitude
:param b: Galactic longitude
:param components: list of spectral components (instances of SpectralComponent)
:param sky_position: an instance of SkyDirection
:return:
"""
def __init__(self, source_name, ra=None, dec=None, spectral_shape=None,
l=None, b=None, components=None, sky_position=None):
# Check that we have all the required information
# (the '^' operator acts as XOR on booleans)
# Check that we have one and only one specification of the position
assert ((ra is not None and dec is not None) ^
(l is not None and b is not None) ^
(sky_position is not None)), "You have to provide one and only one specification for the position"
# Gather the position
if not isinstance(sky_position, SkyDirection):
if (ra is not None) and (dec is not None):
# Check that ra and dec are actually numbers
try:
ra = float(ra)
dec = float(dec)
except (TypeError, ValueError):
raise AssertionError("RA and Dec must be numbers. If you are confused by this message, you "
"are likely using the constructor in the wrong way. Check the documentation.")
sky_position = SkyDirection(ra=ra, dec=dec)
else:
sky_position = SkyDirection(l=l, b=b)
self._sky_position = sky_position
# Now gather the component(s)
# We need either a single component, or a list of components, but not both
# (that's the ^ symbol)
assert (spectral_shape is not None) ^ (components is not None), "You have to provide either a single " \
"component, or a list of components " \
"(but not both)."
# If the user specified only one component, make a list of one element with a default name ("main")
if spectral_shape is not None:
components = [SpectralComponent("main", spectral_shape)]
Source.__init__(self, components, POINT_SOURCE)
# A source is also a Node in the tree
Node.__init__(self, source_name)
# Add the position as a child node, with an explicit name
self._add_child(self._sky_position)
# Add a node called 'spectrum'
spectrum_node = Node('spectrum')
spectrum_node._add_children(self._components.values())
self._add_child(spectrum_node)
# Now set the units
# Now sets the units of the parameters for the energy domain
current_units = get_units()
# Components in this case have energy as x and differential flux as y
x_unit = current_units.energy
y_unit = (current_units.energy * current_units.area * current_units.time) ** (-1)
# Now set the units of the components
for component in self._components.values():
component.shape.set_units(x_unit, y_unit)
def __call__(self, x, tag=None):
if tag is None:
# No integration nor time-varying or whatever-varying
if isinstance(x, u.Quantity):
# Slow version with units
results = [component.shape(x) for component in self.components.values()]
# We need to sum like this (slower) because using np.sum will not preserve the units
# (thanks astropy.units)
return sum(results)
else:
# Fast version without units, where x is supposed to be in the same units as currently defined in
# units.get_units()
results = [component.shape(x) for component in self.components.values()]
return numpy.sum(results, 0)
else:
# Time-varying or energy-varying or whatever-varying
integration_variable, a, b = tag
if b is None:
# Evaluate in a, do not integrate
# Suspend memoization because the memoization gets confused when integrating
with use_astromodels_memoization(False):
integration_variable.value = a
res = self.__call__(x, tag=None)
return res
else:
# Integrate between a and b
integrals = numpy.zeros(len(x))
# TODO: implement an integration scheme avoiding the for loop
# Suspend memoization because the memoization gets confused when integrating
with use_astromodels_memoization(False):
reentrant_call = self.__call__
for i, e in enumerate(x):
def integral(y):
integration_variable.value = y
return reentrant_call(e, tag=None)
# Now integrate
integrals[i] = scipy.integrate.quad(integral, a, b, epsrel=1e-5)[0]
return integrals / (b - a)
def has_free_parameters(self):
"""
Returns True or False whether there is any parameter in this source
:return:
"""
for component in self._components.values():
for par in component.shape.parameters.values():
if par.free:
return True
for par in self.position.parameters.values():
if par.free:
return True
return False
@property
def free_parameters(self):
"""
Returns a dictionary of free parameters for this source
:return:
"""
free_parameters = collections.OrderedDict()
for component in self._components.values():
for par in component.shape.parameters.values():
if par.free:
free_parameters[par.name] = par
for par in self.position.parameters.values():
if par.free:
free_parameters[par.name] = par
return free_parameters
def _repr__base(self, rich_output=False):
"""
Representation of the object
:param rich_output: if True, generates HTML, otherwise text
:return: the representation
"""
# Make a dictionary which will then be transformed in a list
repr_dict = collections.OrderedDict()
key = '%s (point source)' % self.name
repr_dict[key] = collections.OrderedDict()
repr_dict[key]['position'] = self._sky_position.to_dict(minimal=True)
repr_dict[key]['spectrum'] = collections.OrderedDict()
for component_name, component in self.components.iteritems():
repr_dict[key]['spectrum'][component_name] = component.to_dict(minimal=True)
return dict_to_list(repr_dict, rich_output)
| [
"[email protected]"
] | |
5e8f48e09af192100501d1ea79b2d6ebdfeb1891 | 8c39fa8241e1ecefab6c693862bee127fd3e1461 | /proyectoferreteria/apps/gestionadmin/admin.py | cc0cf6bf0161c0beb7007b523f67678f91a2881b | [] | no_license | ujcv4273/Ferreteriav-0.0.5 | b5433e727b68e318204386f84416208f99470446 | 9dd16363ce9f4a012a177aa3d5414051b79cd3a2 | refs/heads/master | 2022-11-29T16:50:19.066725 | 2020-08-01T18:16:35 | 2020-08-01T18:16:35 | 284,314,116 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,642 | py | from django.contrib import admin
from django.db import models
from proyectoferreteria.apps.gestionadmin.models import FormaPago, MetodoPago, Garantia, Marca, Categoria, Proveedor,Cliente, Planilla, Empleado, Producto, Factura, TurnoEmpleado, ComprasDet,ComprasEnc
from django.core.paginator import Paginator
# Register your models here.
##@admin.register(Factura)
from django.contrib import admin
from django.http import HttpResponse
import csv
from django.http import FileResponse
from reportlab.pdfgen import canvas
import io
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import inch
import time
from import_export import resources
from import_export.admin import ImportExportModelAdmin
import os
from django.conf import settings
from django.http import HttpResponse
from django.template.loader import get_template
from xhtml2pdf import pisa
from django.contrib.staticfiles import finders
from django.utils import timezone
from django_xhtml2pdf.utils import generate_pdf
from django.contrib.contenttypes.models import ContentType
from django.http import HttpResponseRedirect
from reportlab.platypus import Paragraph, Table
from django.conf import settings
from io import BytesIO
from reportlab.pdfgen import canvas
from django.views.generic import View
def link_callback(self, request, queryset,uri, rel):
"""
Convert HTML URIs to absolute system paths so xhtml2pdf can access those
resources
"""
# use short variable names
sUrl = settings.STATIC_URL # Typically /static/
sRoot = settings.STATIC_ROOT # Typically /home/userX/project_static/
mUrl = settings.MEDIA_URL # Typically /static/media/
mRoot = settings.MEDIA_ROOT # Typically /home/userX/project_static/media/
# convert URIs to absolute system paths
if uri.startswith(mUrl):
path = os.path.join(mRoot, uri.replace(mUrl, ""))
elif uri.startswith(sUrl):
path = os.path.join(sRoot, uri.replace(sUrl, ""))
else:
return uri # handle absolute uri (ie: http://some.tld/foo.png)
# make sure that file exists
if not os.path.isfile(path):
raise Exception(
'media URI must start with %s or %s' % (sUrl, mUrl)
)
return path
class MarcaResource (resources.ModelResource):
class Meta:
model= Marca
class MarcaAdmin(admin.ModelAdmin):
list_per_page = 10
list_display = ('idMarca','nombreMarca')
resource_class = MarcaResource
actions = (
'export_as_csv','imprimir_pdf',
)
def export_as_csv(self, request, queryset):
""" Export CSV action """
# En meta almacenamos el nombre del archivo
meta = self.model._meta
# Guardamos una lista con los nombres de los campos
field_names = [field.name for field in meta.fields]
# Creamos una HttpResponse para enviar el archivo CSV
response = HttpResponse(content_type='text/csv')
# Indicamos el nombre del archivo (meta)
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
# Creamos un objeto csv que va a escribir en nuestro HttpResponse
writer = csv.writer(response)
# El metodo writerow escribe secuencialmente los elementos de la lista que recibe
# por parametro en las columas del csv y realiza un salto de linea
writer.writerow(field_names)
# En queryset tenemos almacenados los objetos que seleccionamos, recorremos la lista
# para escribir sus elementos en el csv.
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
# Devolvemos el objeto HttpResponse
return response
# Le ponemos un nombre bonito.
export_as_csv.short_description = 'Exportar a Excel'
def imprimir_pdf(self, request, queryset):
template_path = 'gestionadmin/pdfmarca.html'
today = timezone.now()
marcas = Marca.objects.all()
context = {
'obj': marcas,
'today': today,
'request': request
}
# Create a Django response object, and specify content_type as pdf
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="reporte de marcas.pdf"'
# find the template and render it.
template = get_template(template_path)
html = template.render(context)
# create a pdf
pisaStatus = pisa.CreatePDF(
html, dest=response, link_callback=link_callback)
# if error then show some funy view
if pisaStatus.err:
return HttpResponse('We had some errors <pre>' + html + '</pre>')
return response
imprimir_pdf.short_description = 'Exportar a pdf'
admin.site.register(Marca, MarcaAdmin)
class ClienteAdmin(admin.ModelAdmin):
list_per_page = 10
list_display = ('Id_Cliente','Identidad','Nombre_Cliente','Correo_Cliente','Direccion_Cliente','Telefono_Cliente')
##list_filter = ('Nombre_Cliente','Id_Cliente')
##list_display_links = ('Id_Cliente', 'Correo_Cliente')
search_fields = ('Nombre_Cliente','Id_Cliente')
actions = (
'export_as_csv','imprimir_pdf',
)
def export_as_csv(self, request, queryset):
""" Export CSV action """
# En meta almacenamos el nombre del archivo
meta = self.model._meta
# Guardamos una lista con los nombres de los campos
field_names = [field.name for field in meta.fields]
# Creamos una HttpResponse para enviar el archivo CSV
response = HttpResponse(content_type='text/csv')
# Indicamos el nombre del archivo (meta)
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
# Creamos un objeto csv que va a escribir en nuestro HttpResponse
writer = csv.writer(response)
# El metodo writerow escribe secuencialmente los elementos de la lista que recibe
# por parametro en las columas del csv y realiza un salto de linea
writer.writerow(field_names)
# En queryset tenemos almacenados los objetos que seleccionamos, recorremos la lista
# para escribir sus elementos en el csv.
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
# Devolvemos el objeto HttpResponse
return response
# Le ponemos un nombre bonito.
export_as_csv.short_description = 'Exportar a Excel'
def imprimir_pdf(self, request, queryset):
template_path = 'gestionadmin/pdfcliente.html'
today = timezone.now()
clientes = Cliente.objects.all()
context = {
'obj': clientes,
'today': today,
'request': request
}
# Create a Django response object, and specify content_type as pdf
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="reporte de marcas.pdf"'
# find the template and render it.
template = get_template(template_path)
html = template.render(context)
# create a pdf
pisaStatus = pisa.CreatePDF(
html, dest=response, link_callback=link_callback)
# if error then show some funy view
if pisaStatus.err:
return HttpResponse('We had some errors <pre>' + html + '</pre>')
return response
imprimir_pdf.short_description = 'Exportar a pdf'
admin.site.register(Cliente, ClienteAdmin)
class CategoriaAdmin(admin.ModelAdmin):
list_per_page = 10
list_display =('Id_Categoria','Descripcion_Categoria')
search_fields =('Id_Categoria','Descripcion_Categoria')
actions = (
'export_as_csv','imprimir_pdf',
)
def export_as_csv(self, request, queryset):
""" Export CSV action """
# En meta almacenamos el nombre del archivo
meta = self.model._meta
# Guardamos una lista con los nombres de los campos
field_names = [field.name for field in meta.fields]
# Creamos una HttpResponse para enviar el archivo CSV
response = HttpResponse(content_type='text/csv')
# Indicamos el nombre del archivo (meta)
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
# Creamos un objeto csv que va a escribir en nuestro HttpResponse
writer = csv.writer(response)
# El metodo writerow escribe secuencialmente los elementos de la lista que recibe
# por parametro en las columas del csv y realiza un salto de linea
writer.writerow(field_names)
# En queryset tenemos almacenados los objetos que seleccionamos, recorremos la lista
# para escribir sus elementos en el csv.
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
# Devolvemos el objeto HttpResponse
return response
# Le ponemos un nombre bonito.
export_as_csv.short_description = 'Exportar a Excel'
def imprimir_pdf(self, request, queryset):
template_path = 'gestionadmin/pdfcategoria.html'
today = timezone.now()
categorias = Categoria.objects.all()
context = {
'obj': categorias,
'today': today,
'request': request
}
# Create a Django response object, and specify content_type as pdf
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="reporte de marcas.pdf"'
# find the template and render it.
template = get_template(template_path)
html = template.render(context)
# create a pdf
pisaStatus = pisa.CreatePDF(
html, dest=response, link_callback=link_callback)
# if error then show some funy view
if pisaStatus.err:
return HttpResponse('We had some errors <pre>' + html + '</pre>')
return response
imprimir_pdf.short_description = 'Exportar a pdf'
admin.site.register(Categoria,CategoriaAdmin)
class ProveedorAdmin(admin.ModelAdmin):
list_per_page = 10
list_display = ('Id_Proveedor','Nombre_Proveedor','Correo_Proveedor','Direccion_Proveedor','Telefono_Proveedor')
search_fields = ('Id_Proveedor','Nombre_Proveedor','Correo_Proveedor','Direccion_Proveedor','Telefono_Proveedor')
actions = (
'export_as_csv','imprimir_pdf',
)
def export_as_csv(self, request, queryset):
""" Export CSV action """
# En meta almacenamos el nombre del archivo
meta = self.model._meta
# Guardamos una lista con los nombres de los campos
field_names = [field.name for field in meta.fields]
# Creamos una HttpResponse para enviar el archivo CSV
response = HttpResponse(content_type='text/csv')
# Indicamos el nombre del archivo (meta)
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
# Creamos un objeto csv que va a escribir en nuestro HttpResponse
writer = csv.writer(response)
# El metodo writerow escribe secuencialmente los elementos de la lista que recibe
# por parametro en las columas del csv y realiza un salto de linea
writer.writerow(field_names)
# En queryset tenemos almacenados los objetos que seleccionamos, recorremos la lista
# para escribir sus elementos en el csv.
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
# Devolvemos el objeto HttpResponse
return response
# Le ponemos un nombre bonito.
export_as_csv.short_description = 'Exportar a Excel'
def imprimir_pdf(self, request, queryset):
template_path = 'gestionadmin/pdfproveedor.html'
today = timezone.now()
proveedores = Proveedor.objects.all()
context = {
'obj': proveedores,
'today': today,
'request': request
}
# Create a Django response object, and specify content_type as pdf
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="reporte de marcas.pdf"'
# find the template and render it.
template = get_template(template_path)
html = template.render(context)
# create a pdf
pisaStatus = pisa.CreatePDF(
html, dest=response, link_callback=link_callback)
# if error then show some funy view
if pisaStatus.err:
return HttpResponse('We had some errors <pre>' + html + '</pre>')
return response
imprimir_pdf.short_description = 'Exportar a pdf'
admin.site.register(Proveedor,ProveedorAdmin)
class GarantiaAdmin(admin.ModelAdmin):
list_per_page = 10
list_display = ('Id_Garantia','Descripcion_Garantia','Tiempo_Garantia_Mes')
search_fields = ('Id_Garantia','Descripcion_Garantia','Tiempo_Garantia_Mes')
actions = (
'export_as_csv','imprimir_pdf',
)
def export_as_csv(self, request, queryset):
""" Export CSV action """
# En meta almacenamos el nombre del archivo
meta = self.model._meta
# Guardamos una lista con los nombres de los campos
field_names = [field.name for field in meta.fields]
# Creamos una HttpResponse para enviar el archivo CSV
response = HttpResponse(content_type='text/csv')
# Indicamos el nombre del archivo (meta)
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
# Creamos un objeto csv que va a escribir en nuestro HttpResponse
writer = csv.writer(response)
# El metodo writerow escribe secuencialmente los elementos de la lista que recibe
# por parametro en las columas del csv y realiza un salto de linea
writer.writerow(field_names)
# En queryset tenemos almacenados los objetos que seleccionamos, recorremos la lista
# para escribir sus elementos en el csv.
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
# Devolvemos el objeto HttpResponse
return response
# Le ponemos un nombre bonito.
export_as_csv.short_description = 'Exportar a Excel'
def imprimir_pdf(self, request, queryset):
template_path = 'gestionadmin/pdfgarantia.html'
today = timezone.now()
garantias = Garantia.objects.all()
context = {
'obj': garantias,
'today': today,
'request': request
}
# Create a Django response object, and specify content_type as pdf
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="reporte de marcas.pdf"'
# find the template and render it.
template = get_template(template_path)
html = template.render(context)
# create a pdf
pisaStatus = pisa.CreatePDF(
html, dest=response, link_callback=link_callback)
# if error then show some funy view
if pisaStatus.err:
return HttpResponse('We had some errors <pre>' + html + '</pre>')
return response
imprimir_pdf.short_description = 'Exportar a pdf'
admin.site.register(Garantia,GarantiaAdmin)
class FormaPagoAdmin(admin.ModelAdmin):
list_per_page = 10
list_display = ('Id_Forma_Pago','Descripcion_Forma_Pago')
search_fields= ('Id_Forma_Pago','Descripcion_Forma_Pago')
actions = (
'export_as_csv','imprimir_pdf',
)
def export_as_csv(self, request, queryset):
""" Export CSV action """
# En meta almacenamos el nombre del archivo
meta = self.model._meta
# Guardamos una lista con los nombres de los campos
field_names = [field.name for field in meta.fields]
# Creamos una HttpResponse para enviar el archivo CSV
response = HttpResponse(content_type='text/csv')
# Indicamos el nombre del archivo (meta)
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
# Creamos un objeto csv que va a escribir en nuestro HttpResponse
writer = csv.writer(response)
# El metodo writerow escribe secuencialmente los elementos de la lista que recibe
# por parametro en las columas del csv y realiza un salto de linea
writer.writerow(field_names)
# En queryset tenemos almacenados los objetos que seleccionamos, recorremos la lista
# para escribir sus elementos en el csv.
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
# Devolvemos el objeto HttpResponse
return response
# Le ponemos un nombre bonito.
export_as_csv.short_description = 'Exportar a Excel'
def imprimir_pdf(self, request, queryset):
template_path = 'gestionadmin/pdfformapago.html'
today = timezone.now()
formapagos = FormaPago.objects.all()
context = {
'obj': formapagos,
'today': today,
'request': request
}
# Create a Django response object, and specify content_type as pdf
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="reporte de marcas.pdf"'
# find the template and render it.
template = get_template(template_path)
html = template.render(context)
# create a pdf
pisaStatus = pisa.CreatePDF(
html, dest=response, link_callback=link_callback)
# if error then show some funy view
if pisaStatus.err:
return HttpResponse('We had some errors <pre>' + html + '</pre>')
return response
imprimir_pdf.short_description = 'Exportar a pdf'
admin.site.register(FormaPago,FormaPagoAdmin)
class MetodoPagoAdmin(admin.ModelAdmin):
list_per_page = 10
list_display = ('idMetodoPago','descripcionMetodoPago')
search_fields= ('idMetodoPago','descripcionMetodoPago')
actions = (
'export_as_csv','imprimir_pdf',
)
def export_as_csv(self, request, queryset):
""" Export CSV action """
# En meta almacenamos el nombre del archivo
meta = self.model._meta
# Guardamos una lista con los nombres de los campos
field_names = [field.name for field in meta.fields]
# Creamos una HttpResponse para enviar el archivo CSV
response = HttpResponse(content_type='text/csv')
# Indicamos el nombre del archivo (meta)
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
# Creamos un objeto csv que va a escribir en nuestro HttpResponse
writer = csv.writer(response)
# El metodo writerow escribe secuencialmente los elementos de la lista que recibe
# por parametro en las columas del csv y realiza un salto de linea
writer.writerow(field_names)
# En queryset tenemos almacenados los objetos que seleccionamos, recorremos la lista
# para escribir sus elementos en el csv.
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
# Devolvemos el objeto HttpResponse
return response
# Le ponemos un nombre bonito.
export_as_csv.short_description = 'Exportar a Excel'
def imprimir_pdf(self, request, queryset):
template_path = 'gestionadmin/pdfmetodopago.html'
today = timezone.now()
metodopagos = MetodoPago.objects.all()
context = {
'obj': metodopagos,
'today': today,
'request': request
}
# Create a Django response object, and specify content_type as pdf
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="reporte de marcas.pdf"'
# find the template and render it.
template = get_template(template_path)
html = template.render(context)
# create a pdf
pisaStatus = pisa.CreatePDF(
html, dest=response, link_callback=link_callback)
# if error then show some funy view
if pisaStatus.err:
return HttpResponse('We had some errors <pre>' + html + '</pre>')
return response
imprimir_pdf.short_description = 'Exportar a pdf'
admin.site.register(MetodoPago, MetodoPagoAdmin)
class PlanillaAdmin(admin.ModelAdmin):
list_per_page = 10
list_display = ('Id_Planilla','Sueldo_Base','IHSS','RAP')
search_fields = ('Id_Planilla','Sueldo_Base','IHSS','RAP')
actions = (
'export_as_csv','imprimir_pdf',
)
def export_as_csv(self, request, queryset):
""" Export CSV action """
# En meta almacenamos el nombre del archivo
meta = self.model._meta
# Guardamos una lista con los nombres de los campos
field_names = [field.name for field in meta.fields]
# Creamos una HttpResponse para enviar el archivo CSV
response = HttpResponse(content_type='text/csv')
# Indicamos el nombre del archivo (meta)
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
# Creamos un objeto csv que va a escribir en nuestro HttpResponse
writer = csv.writer(response)
# El metodo writerow escribe secuencialmente los elementos de la lista que recibe
# por parametro en las columas del csv y realiza un salto de linea
writer.writerow(field_names)
# En queryset tenemos almacenados los objetos que seleccionamos, recorremos la lista
# para escribir sus elementos en el csv.
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
# Devolvemos el objeto HttpResponse
return response
# Le ponemos un nombre bonito.
export_as_csv.short_description = 'Exportar a Excel'
def imprimir_pdf(self, request, queryset):
template_path = 'gestionadmin/pdfplanilla.html'
today = timezone.now()
planillas = Planilla.objects.all()
context = {
'obj': planillas,
'today': today,
'request': request
}
# Create a Django response object, and specify content_type as pdf
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="reporte de marcas.pdf"'
# find the template and render it.
template = get_template(template_path)
html = template.render(context)
# create a pdf
pisaStatus = pisa.CreatePDF(
html, dest=response, link_callback=link_callback)
# if error then show some funy view
if pisaStatus.err:
return HttpResponse('We had some errors <pre>' + html + '</pre>')
return response
imprimir_pdf.short_description = 'Exportar a pdf'
admin.site.register(Planilla,PlanillaAdmin)
class EmpleadoAdmin(admin.ModelAdmin):
list_per_page = 10
list_display = ('Id_Empleado','Nombre_Empleado','Id_Turno','Id_Planilla','Direccion_Empleado','Telefono_Empleado')
search_fields = ('Id_Empleado','Id_Turno','Id_Planilla','Nombre_Empleado','Direccion_Empleado','Telefono_Empleado')
actions = (
'export_as_csv','imprimir_pdf',
)
def export_as_csv(self, request, queryset):
""" Export CSV action """
# En meta almacenamos el nombre del archivo
meta = self.model._meta
# Guardamos una lista con los nombres de los campos
field_names = [field.name for field in meta.fields]
# Creamos una HttpResponse para enviar el archivo CSV
response = HttpResponse(content_type='text/csv')
# Indicamos el nombre del archivo (meta)
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
# Creamos un objeto csv que va a escribir en nuestro HttpResponse
writer = csv.writer(response)
# El metodo writerow escribe secuencialmente los elementos de la lista que recibe
# por parametro en las columas del csv y realiza un salto de linea
writer.writerow(field_names)
# En queryset tenemos almacenados los objetos que seleccionamos, recorremos la lista
# para escribir sus elementos en el csv.
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
# Devolvemos el objeto HttpResponse
return response
# Le ponemos un nombre bonito.
export_as_csv.short_description = 'Exportar a Excel'
def imprimir_pdf(self, request, queryset):
template_path = 'gestionadmin/pdfempleado.html'
today = timezone.now()
empleados = Empleado.objects.all()
context = {
'obj': empleados,
'today': today,
'request': request
}
# Create a Django response object, and specify content_type as pdf
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="reporte de marcas.pdf"'
# find the template and render it.
template = get_template(template_path)
html = template.render(context)
# create a pdf
pisaStatus = pisa.CreatePDF(
html, dest=response, link_callback=link_callback)
# if error then show some funy view
if pisaStatus.err:
return HttpResponse('We had some errors <pre>' + html + '</pre>')
return response
imprimir_pdf.short_description = 'Exportar a pdf'
admin.site.register(Empleado,EmpleadoAdmin)
class ProductoAdmin(admin.ModelAdmin):
list_per_page = 10
list_display = ('Id_Producto','Nombre_Producto','Precio_Venta','Precio_Compra','Id_Marca','Id_Categoria','Id_Garantia','Existencia','Existencia_Minima')
search_fields = ('Id_Producto','Nombre_Producto')
actions = (
'export_as_csv','imprimir_pdf',
)
def export_as_csv(self, request, queryset):
""" Export CSV action """
# En meta almacenamos el nombre del archivo
meta = self.model._meta
# Guardamos una lista con los nombres de los campos
field_names = [field.name for field in meta.fields]
# Creamos una HttpResponse para enviar el archivo CSV
response = HttpResponse(content_type='text/csv')
# Indicamos el nombre del archivo (meta)
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
# Creamos un objeto csv que va a escribir en nuestro HttpResponse
writer = csv.writer(response)
# El metodo writerow escribe secuencialmente los elementos de la lista que recibe
# por parametro en las columas del csv y realiza un salto de linea
writer.writerow(field_names)
# En queryset tenemos almacenados los objetos que seleccionamos, recorremos la lista
# para escribir sus elementos en el csv.
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
# Devolvemos el objeto HttpResponse
return response
# Le ponemos un nombre bonito.
export_as_csv.short_description = 'Exportar a Excel'
def imprimir_pdf(self, request, queryset):
template_path = 'gestionadmin/pdfproducto.html'
today = timezone.now()
productos = Producto.objects.all()
context = {
'obj': productos,
'today': today,
'request': request
}
# Create a Django response object, and specify content_type as pdf
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="reporte de marcas.pdf"'
# find the template and render it.
template = get_template(template_path)
html = template.render(context)
# create a pdf
pisaStatus = pisa.CreatePDF(
html, dest=response, link_callback=link_callback)
# if error then show some funy view
if pisaStatus.err:
return HttpResponse('We had some errors <pre>' + html + '</pre>')
return response
imprimir_pdf.short_description = 'Exportar a pdf'
admin.site.register(Producto,ProductoAdmin)
class TurnoEmpleadoAdmin(admin.ModelAdmin):
list_per_page = 10
list_display = ('Id_Turno','Turno','Hora_de_Entrada','Hora_de_Salida')
search_fields = ('Id_Turno','Turno','Hora_de_Entrada','Hora_de_Salida')
actions = (
'export_as_csv','imprimir_pdf',
)
def export_as_csv(self, request, queryset):
""" Export CSV action """
# En meta almacenamos el nombre del archivo
meta = self.model._meta
# Guardamos una lista con los nombres de los campos
field_names = [field.name for field in meta.fields]
# Creamos una HttpResponse para enviar el archivo CSV
response = HttpResponse(content_type='text/csv')
# Indicamos el nombre del archivo (meta)
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
# Creamos un objeto csv que va a escribir en nuestro HttpResponse
writer = csv.writer(response)
# El metodo writerow escribe secuencialmente los elementos de la lista que recibe
# por parametro en las columas del csv y realiza un salto de linea
writer.writerow(field_names)
# En queryset tenemos almacenados los objetos que seleccionamos, recorremos la lista
# para escribir sus elementos en el csv.
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
# Devolvemos el objeto HttpResponse
return response
# Le ponemos un nombre bonito.
export_as_csv.short_description = 'Exportar a Excel'
def imprimir_pdf(self, request, queryset):
template_path = 'gestionadmin/pdfturnoempleado.html'
today = timezone.now()
turnoempleados = TurnoEmpleado.objects.all()
context = {
'obj': turnoempleados,
'today': today,
'request': request
}
# Create a Django response object, and specify content_type as pdf
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'inline; filename="reporte de marcas.pdf"'
# find the template and render it.
template = get_template(template_path)
html = template.render(context)
# create a pdf
pisaStatus = pisa.CreatePDF(
html, dest=response, link_callback=link_callback)
# if error then show some funy view
if pisaStatus.err:
return HttpResponse('We had some errors <pre>' + html + '</pre>')
return response
imprimir_pdf.short_description = 'Exportar a pdf'
admin.site.register(TurnoEmpleado,TurnoEmpleadoAdmin)
#class FacturaAdmin(admin.ModelAdmin):
# list_per_page = 10
# list_display = ('Id_Factura','Id_Empleado','Id_Cliente','Id_MetodoPago','Id_FormaPago','Numero_Sar','Codigo_CAI','ISV18','ISV15','Total_Factura')
# search_fields = ('Id_Factura','Total_Factura')
#admin.site.register(Factura, FacturaAdmin)
| [
"[email protected]"
] | |
2591de4f548ec359e4c3fca9f0f9399fabf88dfb | 2e157761ea124b5cdbadbad61daded246deaafb2 | /wagtail/contrib/wagtailsearchpromotions/migrations/0001_initial.py | 62809da8def86144e08439d9c142bfec5ecd05d5 | [
"BSD-3-Clause"
] | permissive | wgiddens/wagtail | 043e6b110229cd29d64a22860085355d38f66e03 | 4371368854a99ef754c3332ab10675ba62e614a6 | refs/heads/master | 2021-05-23T03:20:13.455601 | 2015-08-31T19:45:06 | 2015-08-31T19:45:06 | 41,701,494 | 0 | 0 | NOASSERTION | 2020-11-16T13:48:19 | 2015-08-31T21:27:06 | Python | UTF-8 | Python | false | false | 1,750 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0015_add_more_verbose_names'),
('wagtailsearch', '0003_remove_editors_pick'),
]
operations = [
migrations.SeparateDatabaseAndState(
state_operations=[
migrations.CreateModel(
name='EditorsPick',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID', auto_created=True)),
('sort_order', models.IntegerField(editable=False, null=True, blank=True)),
('description', models.TextField(verbose_name='Description', blank=True)),
('page', models.ForeignKey(verbose_name='Page', to='wagtailcore.Page')),
('query', models.ForeignKey(to='wagtailsearch.Query', related_name='editors_picks')),
],
options={
'db_table': 'wagtailsearch_editorspick',
'verbose_name': "Editor's Pick",
'ordering': ('sort_order',),
},
),
],
database_operations=[]
),
migrations.AlterModelTable(
name='editorspick',
table=None,
),
migrations.RenameModel(
old_name='EditorsPick',
new_name='SearchPromotion'
),
migrations.AlterModelOptions(
name='searchpromotion',
options={'ordering': ('sort_order',), 'verbose_name': 'Search promotion'},
),
]
| [
"[email protected]"
] | |
a6e7ad120c45a540a1287acfb626fbd4a3de82fb | ab9ab9e30b8c50273d2e01e3a497b8fd1a8e8841 | /Democode/evolution-strategies-starter-master/es_distributed/main.py | 483d22a5b846be797fd5c18a22e9c0406fed474f | [
"MIT"
] | permissive | Asurada2015/Multi-objective-evolution-strategy | 32a88abe584beae24cc96a020ff79659176f1916 | 62f85e9fd23c9f6a3344855614a74e988bf3edd3 | refs/heads/master | 2023-02-03T04:19:28.394586 | 2020-04-14T12:25:06 | 2020-04-14T12:25:06 | 154,918,864 | 3 | 1 | MIT | 2023-02-02T03:23:23 | 2018-10-27T03:03:39 | Jupyter Notebook | UTF-8 | Python | false | false | 2,169 | py | import errno
import json
import logging
import os
import sys
import click
from .dist import RelayClient
from .es import run_master, run_worker, SharedNoiseTable
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
@click.group()
def cli():
logging.basicConfig(
format='[%(asctime)s pid=%(process)d] %(message)s',
level=logging.INFO,
stream=sys.stderr)
@cli.command()
@click.option('--exp_str')
@click.option('--exp_file')
@click.option('--master_socket_path', required=True)
@click.option('--log_dir')
def master(exp_str, exp_file, master_socket_path, log_dir):
# Start the master
assert (exp_str is None) != (exp_file is None), 'Must provide exp_str xor exp_file to the master'
if exp_str:
exp = json.loads(exp_str)
elif exp_file:
with open(exp_file, 'r') as f:
exp = json.loads(f.read())
else:
assert False
log_dir = os.path.expanduser(log_dir) if log_dir else '/tmp/es_master_{}'.format(os.getpid())
mkdir_p(log_dir)
run_master({'unix_socket_path': master_socket_path}, log_dir, exp)
@cli.command()
@click.option('--master_host', required=True)
@click.option('--master_port', default=6379, type=int)
@click.option('--relay_socket_path', required=True)
@click.option('--num_workers', type=int, default=0)
def workers(master_host, master_port, relay_socket_path, num_workers):
# Start the relay
master_redis_cfg = {'host': master_host, 'port': master_port}
relay_redis_cfg = {'unix_socket_path': relay_socket_path}
if os.fork() == 0:
RelayClient(master_redis_cfg, relay_redis_cfg).run()
return
# Start the workers
noise = SharedNoiseTable() # Workers share the same noise
num_workers = num_workers if num_workers else os.cpu_count()
logging.info('Spawning {} workers'.format(num_workers))
for _ in range(num_workers):
if os.fork() == 0:
run_worker(relay_redis_cfg, noise=noise)
return
os.wait()
if __name__ == '__main__':
cli()
| [
"[email protected]"
] | |
ee19405107fff01363a5004a6f245590587e3446 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/226/users/4424/codes/1674_1101.py | ca6bd59c9146923fe8a25836f1b91360fddcbc3d | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 939 | py | # Teste seu código aos poucos.
# Não teste tudo no final, pois fica mais difícil de identificar erros.
# Use as mensagens de erro para corrigir seu código.
c = float(input("consumo em kWh: "))
t = input("residencias(r)/industrias(i)/comercios(c): ")
print("Entradas:", c, "kWh e tipo", t)
y = "Dados invalidos"
if ((t.lower()!= "r") and (t.lower()!= "i") and (t.lower()!= "c")) or (c<0):
print(y)
elif (c <= 500) and (t.lower()=="r"):
a = c*0.44
print('Valor total: R$ ', round(a, 2))
elif (c > 500) and (t.lower()== "r"):
a = c*0.65
print('Valor total: R$ ', round(a, 2))
elif (c<=1000) and (t.lower()== "c"):
a = c*0.55
print('Valor total: R$ ', round(a, 2))
elif (c>1000) and (t.lower()== "c"):
a = c*0.60
print('Valor total: R$ ', round(a, 2))
elif (c<=5000) and (t.lower()== "i"):
a = c*0.55
print('Valor total: R$ ', round(a, 2))
elif (c>5000) and (t.lower()== "i"):
a = c*0.60
print('Valor total: R$ ', round(a, 2)) | [
"[email protected]"
] | |
10fbdc05794edb4e95044f0031de8333a7f73a81 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_peeped.py | a186557aa21742047c7e1738b7c7afc26028a481 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 216 | py |
#calss header
class _PEEPED():
def __init__(self,):
self.name = "PEEPED"
self.definitions = peep
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['peep']
| [
"[email protected]"
] | |
a68501fe0fffd3341fe23f8561c763687c4ae485 | efc9b70544c0bc108aaec0ed6a2aefdf208fd266 | /393. UTF-8 Validation.py | cbd33de9cd3519f3032cff678b73861abd68bab3 | [] | no_license | fxy1018/Leetcode | 75fad14701703d6a6a36dd52c338ca56c5fa9eff | 604efd2c53c369fb262f42f7f7f31997ea4d029b | refs/heads/master | 2022-12-22T23:42:17.412776 | 2022-12-15T21:27:37 | 2022-12-15T21:27:37 | 78,082,899 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 671 | py | '''
'''
class Solution(object):
def validUtf8(self, data):
"""
:type data: List[int]
:rtype: bool
"""
count = 0
for d in data:
if count == 0: #first byte
if bin(d>>5) == "0b110":
count = 1
elif bin(d>>4) == "0b1110":
count = 2
elif bin(d>>3) == "0b11110":
count = 3
elif bin(d>>7) != "0b0":
return(False)
else:
if bin(d>>6) != "0b10":
return(False)
count -=1
return(count == 0)
| [
"[email protected]"
] | |
e95b9c3d1cd819f037b70f66076236dc02ccf425 | 73346545e69194dc1cfd887314afe600076ff263 | /polling_stations/apps/data_collection/management/commands/import_greenwich.py | 385a8fd726114f0e6f8492cc5e4ce64a3ce77aec | [] | permissive | chris48s/UK-Polling-Stations | c7a91f80c1ea423156ac75d88dfca31ca57473ff | 4742b527dae94f0276d35c80460837be743b7d17 | refs/heads/master | 2021-08-27T18:26:07.155592 | 2017-11-29T15:57:23 | 2017-11-29T15:57:23 | 50,743,117 | 1 | 0 | BSD-3-Clause | 2017-11-29T16:03:45 | 2016-01-30T20:20:50 | Python | UTF-8 | Python | false | false | 406 | py | from data_collection.management.commands import BaseHalaroseCsvImporter
class Command(BaseHalaroseCsvImporter):
council_id = 'E09000011'
addresses_name = 'parl.2017-06-08/Version 1/polling_station_export-2017-05-09 2.csv'
stations_name = 'parl.2017-06-08/Version 1/polling_station_export-2017-05-09 2.csv'
elections = ['parl.2017-06-08']
csv_encoding = 'windows-1252'
| [
"[email protected]"
] | |
8afa870d5cbd78906a2398e74afc2fcfdc6b3ccb | df264c442075e04bb09d82f9be1d915c070d7e09 | /SAMSUNG/SWEA/PROBLEM/2105_디저트카페_20210419.py | 04f8efd218eb8d69a5537a62f880f1ea920a3d47 | [] | no_license | Koozzi/Algorithms | ff7a73726f18e87cab9406e7b71cd5d1856df183 | 38048ac0774fcab3537bdd64f48cae7d9eb71e6f | refs/heads/master | 2021-07-06T04:59:38.564772 | 2021-05-03T08:49:56 | 2021-05-03T08:49:56 | 231,848,463 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,293 | py | def change_direction(X, Y, x, y, d1, d2):
if (X == x + d1 and Y == y - d1) \
or (X == x + d1 + d2 and Y == y - d1 + d2) \
or (X == x + d2 and Y == y + d2):
return True
return False
def solve(x, y, d1, d2):
desert = set([board[x][y]])
move = [[1, -1], [1, 1], [-1, 1], [-1, -1]]
X, Y, D = x, y, 0
while True:
if change_direction(X, Y, x, y, d1, d2):
D += 1
X += move[D][0]
Y += move[D][1]
if X == x and Y == y:
break
if board[X][Y] not in desert:
desert.add(board[X][Y])
elif board[X][Y] in desert:
return -2
return len(desert)
T = int(input())
for t in range(1, T + 1):
N = int(input())
board = [list(map(int, input().split())) for _ in range(N)]
answer = -1
for x in range(N - 2):
for y in range(N - 1):
for d1 in range(1, N):
for d2 in range(1, N):
if 0 <= y - d1 and x + d1 + d2 <= N - 1 and y + d2 <= N - 1:
answer = max(answer, solve(x, y, d1, d2))
print("#{} {}".format(t, answer))
"""
2
4
9 8 9 8
4 6 9 4
8 7 7 8
4 5 3 5
5
8 2 9 6 6
1 9 3 3 4
8 2 3 3 6
4 3 4 4 9
7 4 6 3 5
"""
| [
"[email protected]"
] | |
58eae06b2d666c5bc0e9960cc336e03c78e232e9 | cad91ae76d2746a6c28ddda0f33a58f9d461378f | /TensorFlow2/Recommendation/DLRM_and_DCNv2/nn/evaluator.py | eeb9354ef38f8574935923ebfc03a08286e22b2e | [
"Apache-2.0"
] | permissive | NVIDIA/DeepLearningExamples | fe677521e7e2a16e3cb0b77e358f9aab72f8c11a | a5388a45f71a949639b35cc5b990bd130d2d8164 | refs/heads/master | 2023-08-31T20:57:08.798455 | 2023-08-23T10:09:12 | 2023-08-23T10:09:12 | 131,881,622 | 11,838 | 3,124 | null | 2023-08-28T16:57:33 | 2018-05-02T17:04:05 | Jupyter Notebook | UTF-8 | Python | false | false | 3,299 | py | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# author: Tomasz Grel ([email protected])
import tensorflow as tf
import time
from .nn_utils import create_inputs_dict
class Evaluator:
def __init__(self, model, timer, auc_thresholds, max_steps=None, cast_dtype=None, distributed=False):
self.model = model
self.timer = timer
self.max_steps = max_steps
self.cast_dtype = cast_dtype
self.distributed = distributed
if self.distributed:
import horovod.tensorflow as hvd
self.hvd = hvd
else:
self.hvd = None
self.auc_metric = tf.keras.metrics.AUC(num_thresholds=auc_thresholds, curve='ROC',
summation_method='interpolation', from_logits=True)
self.bce_op = tf.keras.losses.BinaryCrossentropy(reduction=tf.keras.losses.Reduction.NONE, from_logits=True)
def _reset(self):
self.latencies, self.all_test_losses = [], []
self.auc_metric.reset_state()
@tf.function
def update_auc_metric(self, labels, y_pred):
self.auc_metric.update_state(labels, y_pred)
@tf.function
def compute_bce_loss(self, labels, y_pred):
return self.bce_op(labels, y_pred)
def _step(self, pipe):
begin = time.time()
batch = pipe.get_next()
(numerical_features, categorical_features), labels = batch
if self.cast_dtype is not None:
numerical_features = tf.cast(numerical_features, self.cast_dtype)
inputs = create_inputs_dict(numerical_features, categorical_features)
y_pred = self.model(inputs, sigmoid=False, training=False)
end = time.time()
self.latencies.append(end - begin)
if self.distributed:
y_pred = self.hvd.allgather(y_pred)
labels = self.hvd.allgather(labels)
self.timer.step_test()
if not self.distributed or self.hvd.rank() == 0:
self.update_auc_metric(labels, y_pred)
test_loss = self.compute_bce_loss(labels, y_pred)
self.all_test_losses.append(test_loss)
def __call__(self, validation_pipeline):
self._reset()
auc, test_loss = 0, 0
pipe = iter(validation_pipeline.op())
num_steps = len(validation_pipeline)
if self.max_steps is not None and self.max_steps >= 0:
num_steps = min(num_steps, self.max_steps)
for _ in range(num_steps):
self._step(pipe)
if not self.distributed or self.hvd.rank() == 0:
auc = self.auc_metric.result().numpy().item()
test_loss = tf.reduce_mean(self.all_test_losses).numpy().item()
return auc, test_loss, self.latencies
| [
"[email protected]"
] | |
082ae1287bba556369350aaf57ecbedc6fbd0a87 | f7c72e8adde14499f119708642e7ca1e3e7424f3 | /network_health_service/tests/create_query_tests.py | 523fa50216d13ffb1ad98df0c5b0b9098a793f0e | [
"MIT"
] | permissive | pmaisel/tgnms | 0b5d7ad90821a4e52d11490199ab1253bb31ac59 | 8ab714d29f2f817f08cad928cf54b7bf1faf0aa7 | refs/heads/main | 2023-08-01T10:14:28.527892 | 2021-09-24T21:28:16 | 2021-09-24T21:56:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,106 | py | #!/usr/bin/env python3
# Copyright 2004-present Facebook. All Rights Reserved.
import json
import unittest
from network_health_service.stats.fetch_stats import get_link_queries, get_node_queries
from network_health_service.stats.metrics import Metrics
class CreateQueryTests(unittest.TestCase):
def setUp(self) -> None:
self.maxDiff = None
with open("tests/metrics.json") as f:
metrics = json.load(f)
Metrics.update_metrics(
metrics, prometheus_hold_time=30, use_real_throughput=True
)
def test_get_link_queries(self) -> None:
expected_link_queries = {
"analytics_alignment_status": (
"sum_over_time(min by (linkName) "
'(analytics_alignment_status{network="network_A"} == bool 1) '
"[3599s:30s])"
),
"topology_link_is_online": (
"sum_over_time(min by (linkName) "
'(topology_link_is_online{network="network_A"}) [3599s:30s])'
),
"tx_byte": (
"quantile_over_time(0.75, sum by (linkName) "
'(tx_byte{network="network_A"}) [3599s:30s])'
),
"analytics_foliage_factor": (
"quantile_over_time(0.75, "
'abs(analytics_foliage_factor{network="network_A"}) [3599s:30s])'
),
"drs_cn_egress_routes_count": (
"quantile_over_time(0.75, max by (linkName) "
'(drs_cn_egress_routes_count{network="network_A"}) [3599s:30s])'
),
"tx_ok": (
"quantile_over_time(0.75, sum by (linkName) "
'(tx_ok{network="network_A",intervalSec="1"}) [3599s:1s])'
),
"link_avail": (
"max by (linkName) "
'(resets(link_avail{network="network_A",intervalSec="1"} [3600s]))'
),
"mcs": (
"quantile_over_time(0.25, min by (linkName) "
'(mcs{network="network_A",intervalSec="1"}) [3599s:1s])'
),
"mcs_diff": (
"quantile_over_time(0.75, "
'abs(mcs{network="network_A",intervalSec="1",linkDirection="A"} '
"- on (linkName) "
'mcs{network="network_A",intervalSec="1",linkDirection="Z"}) '
"[3599s:1s])"
),
"tx_power_diff": (
"quantile_over_time(0.75, "
'abs(tx_power{network="network_A",intervalSec="1",linkDirection="A"} '
"- on (linkName) "
'tx_power{network="network_A",intervalSec="1",linkDirection="Z"}) '
"[3599s:1s])"
),
}
link_queries = get_link_queries("network_A", 3600)
self.assertDictEqual(link_queries, expected_link_queries)
expected_node_queries = {
"analytics_cn_power_status": (
"sum_over_time("
'(analytics_cn_power_status{network="network_A"} == bool 3) '
"[3599s:30s])"
),
"topology_node_is_online": (
'sum_over_time(topology_node_is_online{network="network_A"} [3600s])'
),
"drs_default_routes_changed": (
"sum_over_time(drs_default_routes_changed"
'{network="network_A"} [3600s])'
),
"udp_pinger_loss_ratio": (
"sum_over_time("
'(udp_pinger_loss_ratio{network="network_A",intervalSec="30"} '
"< bool 0.9) [3599s:30s])"
),
"udp_pinger_rtt_avg": (
"quantile_over_time(0.75, "
'udp_pinger_rtt_avg{network="network_A",intervalSec="30"} [3600s])'
),
"min_route_mcs": (
"quantile_over_time(0.25, "
'drs_min_route_mcs{network="network_A"} [3599s:60s])'
),
}
node_queries = get_node_queries("network_A", 3600)
self.assertDictEqual(node_queries, expected_node_queries)
| [
"[email protected]"
] | |
ab7269496f8882045c1c28e24afc8d9c5912824f | 75bee875a2d26ed71513f46a2acbb564dd9a1c44 | /app/modules/users/schemas.py | c58686ed7b25645868c0382549201f648b8ca6d9 | [
"MIT"
] | permissive | frol/flask-restplus-server-example | d096aa1f4e3b6024ecb16af3d0769ccc20e7cff8 | 53a3a156cc9df414537860ed677bd0cc98dd2271 | refs/heads/master | 2023-08-28T14:27:34.047855 | 2023-06-21T14:30:54 | 2023-06-21T14:30:54 | 46,421,329 | 1,487 | 412 | MIT | 2023-06-21T14:30:55 | 2015-11-18T13:43:34 | Python | UTF-8 | Python | false | false | 1,179 | py | # encoding: utf-8
# pylint: disable=too-few-public-methods
"""
User schemas
------------
"""
from flask_marshmallow import base_fields
from flask_restplus_patched import Schema, ModelSchema
from .models import User
class BaseUserSchema(ModelSchema):
"""
Base user schema exposes only the most general fields.
"""
class Meta:
# pylint: disable=missing-docstring
model = User
fields = (
User.id.key,
User.username.key,
User.first_name.key,
User.middle_name.key,
User.last_name.key,
)
dump_only = (
User.id.key,
)
class DetailedUserSchema(BaseUserSchema):
"""
Detailed user schema exposes all useful fields.
"""
class Meta(BaseUserSchema.Meta):
fields = BaseUserSchema.Meta.fields + (
User.email.key,
User.created.key,
User.updated.key,
User.is_active.fget.__name__,
User.is_regular_user.fget.__name__,
User.is_admin.fget.__name__,
)
class UserSignupFormSchema(Schema):
recaptcha_server_key = base_fields.String(required=True)
| [
"[email protected]"
] | |
163380fce342e9d5ef8fe2951cc513046cff926b | 146db0a1ba53d15ab1a5c3dce5349907a49217c3 | /omega_miya/plugins/nbnhhsh/utils.py | 651e9331bf5ab0fa191f726697c602576fcbbf5c | [
"Python-2.0",
"MIT"
] | permissive | hailong-z/nonebot2_miya | 84d233122b2d785bfc230c4bfb29326844700deb | 7d52ef52a0a13c5ac6519199e9146a6e3c80bdce | refs/heads/main | 2023-03-26T14:59:31.107103 | 2021-03-09T17:01:08 | 2021-03-09T17:01:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,018 | py | import aiohttp
from omega_miya.utils.Omega_Base import Result
API_URL = 'https://lab.magiconch.com/api/nbnhhsh/guess/'
async def get_guess(guess: str) -> Result:
timeout_count = 0
error_info = ''
while timeout_count < 3:
try:
timeout = aiohttp.ClientTimeout(total=10)
async with aiohttp.ClientSession(timeout=timeout) as session:
data = {'text': guess}
async with session.post(url=API_URL, data=data, timeout=timeout) as resp:
_json = await resp.json()
result = Result(error=False, info='Success', result=_json)
return result
except Exception as e:
error_info += f'{repr(e)} Occurred in get_guess trying {timeout_count + 1} using paras: {data}\n'
finally:
timeout_count += 1
else:
error_info += f'Failed too many times in get_guess using paras: {data}'
result = Result(error=True, info=error_info, result=[])
return result
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.