blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b3de952ebfad9a2dcddea113de5570690cb125ff | 28ed6f9c587b8efd27182116ee39984e3856ea6a | /pythonBasic/2列表和元组/2.2通用序列操作/2.2.4乘法/乘法.py | fccc547755e3ae088e1ec4cbed7bd726f549b691 | [] | no_license | Fangziqiang/PythonInterfaceTest | 758c62a0599a9d98179b6e3b402016e0972f2415 | def37ed36258dfa9790032b0165e35c6278057f0 | refs/heads/master | 2020-04-30T07:55:32.725421 | 2019-09-19T08:33:42 | 2019-09-19T08:33:42 | 176,699,735 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 353 | py | #coding=utf-8
# 用数字x乘以一个序列会生成新的序列,而在新的序列中,原来的序列将被重复x次
print "python"*5
# 输出:pythonpythonpythonpythonpython
print [42]*5
# 输出:[42, 42, 42, 42, 42]
# 如果想初始化一个长度为10的列表,可以按照下面的例子来实现:
sequence=[None]*10
print sequence | [
"[email protected]"
] | |
85c1c80a414995f2b6d9694723f564d5910b28a6 | 4cacf8188446170e0b4a14b05021bbd595c4db53 | /pyrolite/comp/codata.py | 7946db4208ecec6801584c7458f2f5294d3c9166 | [
"BSD-3-Clause",
"MIT"
] | permissive | JustinGOSSES/pyrolite | 2d145583344f79e8f935ed19fa00037d42969664 | 21eb5b28d9295625241b73b820fc8892b00fc6b0 | refs/heads/master | 2020-12-23T11:26:55.078871 | 2020-01-10T09:03:22 | 2020-01-10T09:03:22 | 237,136,389 | 1 | 0 | NOASSERTION | 2020-01-30T04:08:52 | 2020-01-30T04:08:51 | null | UTF-8 | Python | false | false | 9,246 | py | import numpy as np
import pandas as pd
import scipy.stats as scpstats
import scipy.special as scpspec
# from .renorm import renormalise, close
from ..util.math import orthogonal_basis_default, orthogonal_basis_from_array
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
logger = logging.getLogger(__name__)
def close(X: np.ndarray, sumf=np.sum):
"""
Closure operator for compositional data.
Parameters
-----------
X : :class:`numpy.ndarray`
Array to close.
sumf : :class:`callable`, :func:`numpy.sum`
Sum function to use for closure.
Returns
--------
:class:`numpy.ndarray`
Closed array.
Notes
------
* Does not check for non-positive entries.
"""
if X.ndim == 2:
return np.divide(X, sumf(X, axis=1)[:, np.newaxis])
else:
return np.divide(X, sumf(X, axis=0))
def renormalise(df: pd.DataFrame, components: list = [], scale=100.0):
"""
Renormalises compositional data to ensure closure.
Parameters
------------
df : :class:`pandas.DataFrame`
Dataframe to renomalise.
components : :class:`list`
Option subcompositon to renormalise to 100. Useful for the use case
where compostional data and non-compositional data are stored in the
same dataframe.
scale : :class:`float`, :code:`100.`
Closure parameter. Typically either 100 or 1.
Returns
--------
:class:`pandas.DataFrame`
Renormalized dataframe.
"""
dfc = df.copy(deep=True)
if components:
cmpnts = [c for c in components if c in dfc.columns]
dfc.loc[:, cmpnts] = scale * dfc.loc[:, cmpnts].divide(
dfc.loc[:, cmpnts].sum(axis=1).replace(0, np.nan), axis=0
)
return dfc
else:
dfc = dfc.divide(dfc.sum(axis=1).replace(0, 100.0), axis=0) * scale
return dfc
def alr(X: np.ndarray, ind: int = -1, null_col=False):
"""
Additive Log Ratio transformation.
Parameters
---------------
X: :class:`numpy.ndarray`
Array on which to perform the transformation, of shape :code:`(N, D)`.
ind: :class:`int`
Index of column used as denominator.
null_col : :class:`bool`
Whether to keep the redundant column.
Returns
---------
:class:`numpy.ndarray`
ALR-transformed array, of shape :code:`(N, D-1)`.
"""
Y = X.copy()
assert Y.ndim in [1, 2]
dimensions = Y.shape[Y.ndim - 1]
if ind < 0:
ind += dimensions
if Y.ndim == 2:
Y = np.divide(Y, Y[:, ind][:, np.newaxis])
if not null_col:
Y = Y[:, [i for i in range(dimensions) if not i == ind]]
else:
Y = np.divide(X, X[ind])
if not null_col:
Y = Y[[i for i in range(dimensions) if not i == ind]]
return np.log(Y)
def inverse_alr(Y: np.ndarray, ind=-1, null_col=False):
"""
Inverse Centred Log Ratio transformation.
Parameters
---------------
Y : :class:`numpy.ndarray`
Array on which to perform the inverse transformation, of shape :code:`(N, D-1)`.
ind : :class:`int`
Index of column used as denominator.
null_col : :class:`bool`, :code:`False`
Whether the array contains an extra redundant column
(i.e. shape is :code:`(N, D)`).
Returns
--------
:class:`numpy.ndarray`
Inverse-ALR transformed array, of shape :code:`(N, D)`.
"""
assert Y.ndim in [1, 2]
X = Y.copy()
dimensions = X.shape[X.ndim - 1]
if not null_col:
idx = np.arange(0, dimensions + 1)
if ind != -1:
idx = np.array(list(idx[idx < ind]) + [-1] + list(idx[idx >= ind + 1] - 1))
# Add a zero-column and reorder columns
if Y.ndim == 2:
X = np.concatenate((X, np.zeros((X.shape[0], 1))), axis=1)
X = X[:, idx]
else:
X = np.append(X, np.array([0]))
X = X[idx]
# Inverse log and closure operations
X = np.exp(X)
X = close(X)
return X
def clr(X: np.ndarray):
"""
Centred Log Ratio transformation.
Parameters
---------------
X : :class:`numpy.ndarray`
Array on which to perform the transformation, of shape :code:`(N, D)`.
Returns
---------
:class:`numpy.ndarray`
CLR-transformed array, of shape :code:`(N, D)`.
"""
X = np.divide(X, np.sum(X, axis=1)[:, np.newaxis]) # Closure operation
Y = np.log(X) # Log operation
Y -= 1 / X.shape[1] * np.nansum(Y, axis=1)[:, np.newaxis]
return Y
def inverse_clr(Y: np.ndarray):
"""
Inverse Centred Log Ratio transformation.
Parameters
---------------
Y : :class:`numpy.ndarray`
Array on which to perform the inverse transformation, of shape :code:`(N, D)`.
Returns
---------
:class:`numpy.ndarray`
Inverse-CLR transformed array, of shape :code:`(N, D)`.
"""
# Inverse of log operation
X = np.exp(Y)
# Closure operation
X = np.divide(X, np.nansum(X, axis=1)[:, np.newaxis])
return X
def ilr(X: np.ndarray):
"""
Isometric Log Ratio transformation.
Parameters
---------------
X : :class:`numpy.ndarray`
Array on which to perform the transformation, of shape :code:`(N, D)`.
Returns
--------
:class:`numpy.ndarray`
ILR-transformed array, of shape :code:`(N, D-1)`.
"""
d = X.shape[1]
Y = clr(X)
psi = orthogonal_basis_from_array(X) # Get a basis
assert np.allclose(psi @ psi.T, np.eye(d - 1))
return Y @ psi.T
def inverse_ilr(Y: np.ndarray, X: np.ndarray = None):
"""
Inverse Isometric Log Ratio transformation.
Parameters
---------------
Y : :class:`numpy.ndarray`
Array on which to perform the inverse transformation, of shape :code:`(N, D-1)`.
X : :class:`numpy.ndarray`, :code:`None`
Optional specification for an array from which to derive the orthonormal basis,
with shape :code:`(N, D)`.
Returns
--------
:class:`numpy.ndarray`
Inverse-ILR transformed array, of shape :code:`(N, D)`.
"""
if X is None:
psi = orthogonal_basis_default(D=Y.shape[1] + 1)
else:
psi = orthogonal_basis_from_array(X)
C = Y @ psi
X = inverse_clr(C) # Inverse log operation
return X
def boxcox(
X: np.ndarray,
lmbda=None,
lmbda_search_space=(-1, 5),
search_steps=100,
return_lmbda=False,
):
"""
Box-Cox transformation.
Parameters
---------------
X : :class:`numpy.ndarray`
Array on which to perform the transformation.
lmbda : :class:`numpy.number`, :code:`None`
Lambda value used to forward-transform values. If none, it will be calculated
using the mean
lmbda_search_space : :class:`tuple`
Range tuple (min, max).
search_steps : :class:`int`
Steps for lambda search range.
return_lmbda : :class:`bool`
Whether to also return the lambda value.
Returns
-------
:class:`numpy.ndarray` | :class:`numpy.ndarray`(:class:`float`)
Box-Cox transformed array. If `return_lmbda` is true, tuple contains data and
lambda value.
"""
if isinstance(X, pd.DataFrame) or isinstance(X, pd.Series):
_X = X.values
else:
_X = X.copy()
if lmbda is None:
l_search = np.linspace(*lmbda_search_space, search_steps)
llf = np.apply_along_axis(scpstats.boxcox_llf, 0, np.array([l_search]), _X.T)
if llf.shape[0] == 1:
mean_llf = llf[0]
else:
mean_llf = np.nansum(llf, axis=0)
lmbda = l_search[mean_llf == np.nanmax(mean_llf)]
if _X.ndim < 2:
out = scpstats.boxcox(_X, lmbda)
elif _X.shape[0] == 1:
out = scpstats.boxcox(np.squeeze(_X), lmbda)
else:
out = np.apply_along_axis(scpstats.boxcox, 0, _X, lmbda)
if isinstance(_X, pd.DataFrame) or isinstance(_X, pd.Series):
_out = X.copy()
_out.loc[:, :] = out
out = _out
if return_lmbda:
return out, lmbda
else:
return out
def inverse_boxcox(Y: np.ndarray, lmbda):
"""
Inverse Box-Cox transformation.
Parameters
---------------
Y : :class:`numpy.ndarray`
Array on which to perform the transformation.
lmbda : :class:`float`
Lambda value used to forward-transform values.
Returns
-------
:class:`numpy.ndarray`
Inverse Box-Cox transformed array.
"""
return scpspec.inv_boxcox(Y, lmbda)
def logratiomean(df, transform=clr, inverse_transform=inverse_clr):
"""
Take a mean of log-ratios along the index of a dataframe.
Parameters
-----------
df : :class:`pandas.DataFrame`
Dataframe from which to compute a mean along the index.
transform : :class:`callable`
Log transform to use.
inverse_transform : :class:`callable`
Inverse of log transform.
Returns
---------
:class:`pandas.Series`
Mean values as a pandas series.
"""
return pd.Series(
inverse_transform(np.mean(transform(df.values), axis=0)[np.newaxis, :])[0],
index=df.columns,
)
| [
"[email protected]"
] | |
e64b66ecb488c6aef2c7591335ccc40c08f08013 | 3b239e588f2ca6e49a28a63d906dd8dd26173f88 | /code/run_gtp.py | 22a8710cdc6285439eca3628c9aee2ea342dd378 | [] | no_license | Angi16/deep_learning_and_the_game_of_go | 3bbf4f075f41359b87cb06fe01b4c7af85837c18 | ba63d5e3f60ec42fa1088921ecf93bdec641fd04 | refs/heads/master | 2020-03-23T16:02:47.431241 | 2018-07-21T02:57:16 | 2018-07-21T02:57:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 359 | py | #!/usr/local/bin/python2
from dlgo.gtp import GTPFrontend
from dlgo.agent.predict import load_prediction_agent
from dlgo.agent import termination
import h5py
model_file = h5py.File("agents/betago.hdf5", "r")
agent = load_prediction_agent(model_file)
termination = termination.get("opponent_passes")
frontend = GTPFrontend(agent, termination)
frontend.run()
| [
"[email protected]"
] | |
f99eb4d682177ed04151f93cefcb7d9041dccddd | 6a4bfed49f65ff74b5c076d19ce8b9a6209754e5 | /quicksort3.py | cb79716efee99c68337998c9753914e989a1cf13 | [] | no_license | Styfjion/-offer | ef4f2f44722ad221e39afa67dd70b3c1453c8b01 | 1f9c311b7775138d8096bf41adbef8a02a129397 | refs/heads/master | 2020-05-23T12:04:46.833239 | 2019-05-15T12:21:42 | 2019-05-15T12:21:42 | 186,750,197 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 647 | py | def quick_sort(alist,first,last):
'''快速排序'''
if first >= last:
return
mid_value = alist[first]
low = first
high = last
while low < high :
# high左移
while low < high and alist[high]>= mid_value:
high-=1
alist[low] = alist[high]
while high > low and alist[low]<mid_value:
low+=1
alist[high] = alist[low]
alist[low]=mid_value
quick_sort(alist,first,low-1)
quick_sort(alist,low+1,last)
if __name__=='__main__':
li = [54,26,93,17,77,31,44,55,20]
print(li)
quick_sort(li,0,len(li)-1)
print(li) | [
"[email protected]"
] | |
0c0d009cc5b976759c50e86d94edf401b1be0d39 | e1eeec4c9e84f52a14e0c29a1384dfbd60d9f2b4 | /apps/paintapp.py | a9c5c27f02c14ca25b47625410dd904964741044 | [] | no_license | Yobmod/dmlgames | f88583523b6631fa4b1f71e2a98555f1e655b97f | 952f6416e2f7e9cd268d6a62b3a771839c99f1b0 | refs/heads/master | 2022-07-16T23:56:32.449207 | 2020-05-21T14:45:29 | 2020-05-21T14:45:29 | 265,620,729 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,507 | py | from __future__ import annotations
# import kivy
from kivy.app import App
from kivy.graphics import Line, Color, Ellipse
from kivy.uix.gridlayout import GridLayout
from kivy.uix.label import Label
from kivy.uix.button import Button
from kivy.uix.textinput import TextInput
from kivy.uix.widget import Widget
import dotenv
import os
from random import random
from kivy.input import MotionEvent
env = dotenv.load_dotenv('../.env')
KIVY_DATA_DIR = os.environ.get('KIVY_DATA_DIR') or './data'
# KIVY_MODULES_DIR = os.environ['KIVY_DATA_DIR'] or './modules'
class LoginScreen(GridLayout):
""""""
def __init__(self, **kwargs: object) -> None:
super(LoginScreen, self).__init__(**kwargs)
self.cols = 2
self.add_widget(Label(text="Username: "))
self.username = TextInput(multiline=False)
self.add_widget(self.username)
self.add_widget(Label(text="Password: "))
self.password = TextInput(multiline=False, password=True)
self.add_widget(self.password)
self.add_widget(Label(text="Two Factor Auth: "))
self.tfa = TextInput(multiline=False)
self.add_widget(self.tfa)
class PaintWidget(Widget):
def on_touch_down(self, touch: MotionEvent) -> None:
# print(touch)
with self.canvas:
color = (random(), 1.0, 1.0)
Color(*color, mode='hsv') # default mode='rgb'
# color = (random(), random(), random())
# Color(*color, mode='rgb') # default mode='rgb'
d = 30.0
Ellipse(pos=(touch.x - d / 2, touch.y - d / 2), size=(d, d))
touch.ud['line'] = Line(points=(touch.x, touch.y))
def on_touch_move(self, touch: MotionEvent) -> None:
# print(touch)
touch.ud["line"].points += (touch.x, touch.y)
def on_touch_up(self, touch: MotionEvent) -> None:
# print("Released", touch)
with self.canvas:
d = 30.0
Ellipse(pos=(touch.x - d / 2, touch.y - d / 2), size=(d, d))
class PaintApp(App):
""""""
def build(self) -> Widget:
# return LoginScreen()
parent: Widget = Widget()
self.painter = PaintWidget()
clearbtn = Button(text='Clear')
clearbtn.bind(on_release=self.clear_canvas)
parent.add_widget(self.painter)
parent.add_widget(clearbtn)
return parent
def clear_canvas(self, obj: Button) -> None:
self.painter.canvas.clear()
if __name__ == "__main__":
PaintApp().run()
| [
"[email protected]"
] | |
1c633147d0ead3420afc3562ee2df7db626b298a | 76cd7a11f6b421936442484b217ee95654cbe377 | /src/ploomber/spec/taskspec.py | fd3cb1c8281be7dc89d2d932efefbaffb4f57a92 | [
"Apache-2.0"
] | permissive | idomic/ploomber | 03c8f1844de8d9cc1331cf1bd323435dafa576c2 | 89b5e544b0540cf2cbb6bcd09946537198115d17 | refs/heads/master | 2023-06-02T21:18:48.655287 | 2021-06-22T05:18:02 | 2021-06-22T05:18:02 | 377,858,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,082 | py | """
Create Tasks from dictionaries
"""
from functools import partial
from copy import copy
from pathlib import Path
from collections.abc import MutableMapping, Mapping
from ploomber import tasks, products
from ploomber.util.util import _make_iterable
from ploomber.util import validate, dotted_path
from ploomber.tasks.taskgroup import TaskGroup
from ploomber import validators
from ploomber.exceptions import DAGSpecInitializationError
suffix2taskclass = {
'.py': tasks.NotebookRunner,
'.R': tasks.NotebookRunner,
'.Rmd': tasks.NotebookRunner,
'.r': tasks.NotebookRunner,
'.ipynb': tasks.NotebookRunner,
'.sql': tasks.SQLScript,
'.sh': tasks.ShellScript
}
def _safe_suffix(product):
try:
return Path(product).suffix
except Exception:
return None
def task_class_from_source_str(source_str, lazy_import, reload, product):
"""
The source field in a DAG spec is a string. The actual value needed to
instantiate the task depends on the task class, but to make task class
optional, we try to guess the appropriate task here. If the source_str
needs any pre-processing to pass it to the task constructor, it also
happens here. If product is not None, it's also used to determine if
a task is a SQLScript or SQLDump
"""
extension = Path(source_str).suffix
# if lazy load, just locate the module without importing it
fn_checker = dotted_path.locate_dotted_path if lazy_import else partial(
dotted_path.load_dotted_path, raise_=True, reload=reload)
if extension and extension in suffix2taskclass:
if extension == '.sql' and _safe_suffix(product) in {
'.csv', '.parquet'
}:
return tasks.SQLDump
return suffix2taskclass[extension]
else:
try:
imported = fn_checker(source_str)
error = None
except Exception as e:
imported = None
error = e
if imported is None:
raise ValueError(
'Could not determine task class for '
f'source {source_str!r}. This looks like a dotted path '
'but it failed to import. Original error '
f'message: {error!s}. You can also set the task class '
'using the "class" key')
else:
return tasks.PythonCallable
def task_class_from_spec(task_spec, lazy_import, reload):
"""
Returns the class for the TaskSpec, if the spec already has the class
name (str), it just returns the actual class object with such name,
otherwise it tries to guess based on the source string
"""
class_name = task_spec.get('class', None)
if class_name:
try:
class_ = validators.string.validate_task_class_name(class_name)
except Exception as e:
msg = f'Error validating Task spec (class field): {e.args[0]}'
e.args = (msg, )
raise
else:
class_ = task_class_from_source_str(
task_spec['source'],
lazy_import,
reload,
task_spec.get('product'),
)
return class_
def _init_source_for_task_class(source_str, task_class, project_root,
lazy_import, make_absolute):
"""
Initialize source. Loads dotted patht to callable if a PythonCallable
task, otherwise it returns a path
"""
if task_class is tasks.PythonCallable:
if lazy_import:
return source_str
else:
return dotted_path.load_dotted_path(source_str)
else:
path = Path(source_str)
# NOTE: there is some inconsistent behavior here. project_root
# will be none if DAGSpec was initialized with a dictionary, hence
# this won't resolve to absolute paths - this is a bit confusing.
# maybe always convert to absolute?
if project_root and not path.is_absolute() and make_absolute:
return Path(project_root, source_str)
else:
return path
class TaskSpec(MutableMapping):
"""
A TaskSpec converts dictionaries to Task objects. This class is not
intended to be used directly, but through DAGSpec
Parameters
----------
data : dict
The data that holds the spec information
meta : dict
The "meta" section information from the calling DAGSpec
project_root : str or pathlib.Path
The project root folder. Relative paths in "product" are so to this
folder
lazy_import : bool, default=False
If False, sources are loaded when initializing the spec (e.g.
a dotted path is imported, a source loaded using a SourceLoader
is converted to a Placeholder object)
reload : bool, default=False
Reloads modules before getting dotted paths. Has no effect if
lazy_import=True
"""
def __init__(self,
data,
meta,
project_root,
lazy_import=False,
reload=False):
# FIXME: make sure data and meta are immutable structures
self.data = data
self.meta = meta
self.project_root = project_root
self.lazy_import = lazy_import
self.validate()
source_loader = meta['source_loader']
# initialize required elements
self.data['class'] = task_class_from_spec(self.data, lazy_import,
reload)
# preprocess source obj, at this point it will either be a Path if the
# task requires a file or a callable if it's a PythonCallable task
self.data['source'] = _init_source_for_task_class(
self.data['source'],
self.data['class'],
self.project_root,
lazy_import,
# only make sources absolute paths when not using a source loader
# otherwise keep them relative
make_absolute=source_loader is None)
is_path = isinstance(self.data['source'], Path)
# check if we need to use the source_loader. we don't if the path is
# relative because that doesn't make sense with a source_loader, and
# this gives the user the ability to load some files that might
# not be part of the source loader
if source_loader and is_path and not self.data['source'].is_absolute():
if lazy_import:
self.data['source'] = source_loader.path_to(
self.data['source'])
else:
self.data['source'] = source_loader[self.data['source']]
def validate(self):
"""
Validates the data schema
"""
if 'upstream' not in self.data:
self.data['upstream'] = None
if self.meta['extract_product']:
required = {'source'}
else:
required = {'product', 'source'}
validate.keys(valid=None,
passed=self.data,
required=required,
name=repr(self))
if self.meta['extract_upstream'] and self.data.get('upstream'):
raise ValueError('Error validating task "{}", if '
'meta.extract_upstream is set to True, tasks '
'should not have an "upstream" key'.format(
self.data))
if self.meta['extract_product'] and self.data.get('product'):
raise ValueError('Error validating task "{}", if '
'meta.extract_product is set to True, tasks '
'should not have a "product" key'.format(
self.data))
def to_task(self, dag):
"""
Convert the spec to a Task or TaskGroup and add it to the dag.
Returns a (task, upstream) tuple with the Task instance and list of
upstream dependencies (as described in the 'upstream' key, if any,
empty if no 'upstream' key). If the spec has a 'grid' key, a TaskGroup
instance instead
Parameters
----------
dag
The DAG to add the task(s) to
"""
data = copy(self.data)
upstream = _make_iterable(data.pop('upstream'))
if 'grid' in data:
if 'name' not in data:
raise KeyError(f'Error initializing task with spec {data!r}: '
'tasks with \'grid\' must have a \'name\'')
task_class = data.pop('class')
product_class = _find_product_class(task_class, data, self.meta)
product = data.pop('product')
name = data.pop('name')
grid = data.pop('grid')
# TODO: support for hooks
return TaskGroup.from_grid(task_class=task_class,
product_class=product_class,
product_primitive=product,
task_kwargs=data,
dag=dag,
name=name,
grid=grid), upstream
else:
return _init_task(data=data,
meta=self.meta,
project_root=self.project_root,
lazy_import=self.lazy_import,
dag=dag), upstream
def __getitem__(self, key):
return self.data[key]
def __setitem__(self, key, value):
self.data[key] = value
def __delitem__(self, key):
del self.data[key]
def __iter__(self):
for e in self.data:
yield e
def __len__(self):
return len(self.data)
def __repr__(self):
return '{}({!r})'.format(type(self).__name__, self.data)
def _init_task(data, meta, project_root, lazy_import, dag):
"""Initialize a single task from a dictionary spec
"""
task_dict = copy(data)
class_ = task_dict.pop('class')
product = _init_product(task_dict, meta, class_, project_root)
_init_client(task_dict)
source = task_dict.pop('source')
name = task_dict.pop('name', None)
on_finish = task_dict.pop('on_finish', None)
on_render = task_dict.pop('on_render', None)
on_failure = task_dict.pop('on_failure', None)
if 'serializer' in task_dict:
task_dict['serializer'] = dotted_path.load_callable_dotted_path(
task_dict['serializer'])
if 'unserializer' in task_dict:
task_dict['unserializer'] = dotted_path.load_callable_dotted_path(
task_dict['unserializer'])
# edge case: if using lazy_import, we should not check if the kernel
# is installed. this is used when exporting to Argo/Airflow using
# soopervisor, since the exporting process should not require to have
# the ir kernel installed. The same applies when Airflow has to convert
# the DAG, the Airflow environment shouldn't require the ir kernel
if (class_ == tasks.NotebookRunner and lazy_import
and 'check_if_kernel_installed' not in task_dict):
task_dict['check_if_kernel_installed'] = False
try:
task = class_(source=source,
product=product,
name=name,
dag=dag,
**task_dict)
except Exception as e:
msg = f'Error initializing Task from {data!r}. Error: {e.args[0]}'
e.args = (msg, )
raise
if on_finish:
task.on_finish = dotted_path.load_callable_dotted_path(on_finish)
if on_render:
task.on_render = dotted_path.load_callable_dotted_path(on_render)
if on_failure:
task.on_failure = dotted_path.load_callable_dotted_path(on_failure)
return task
# FIXME: how do we make a default product client? use the task's client?
def _init_product(task_dict, meta, task_class, root_path):
"""
Initialize product.
Resolution logic order:
task.product_class
meta.{task_class}.product_default_class
Current limitation: When there is more than one product, they all must
be from the same class.
"""
product_raw = task_dict.pop('product')
# return if we already have a product
if isinstance(product_raw, products.product.Product):
return product_raw
CLASS = _find_product_class(task_class, task_dict, meta)
if 'product_client' in task_dict:
kwargs = {
'client': dotted_path.call_spec(task_dict.pop('product_client'))
}
else:
kwargs = {}
relative_to = (Path(task_dict['source']).parent
if meta['product_relative_to_source'] else root_path)
if isinstance(product_raw, Mapping):
return {
key: try_product_init(CLASS,
resolve_if_file(value, relative_to, CLASS),
kwargs)
for key, value in product_raw.items()
}
else:
source = resolve_if_file(product_raw, relative_to, CLASS)
return try_product_init(CLASS, source, kwargs)
def _find_product_class(task_class, task_dict, meta):
key = 'product_default_class.' + task_class.__name__
meta_product_default_class = get_value_at(meta, key)
if 'product_class' in task_dict:
return validate_product_class_name(task_dict.pop('product_class'))
elif meta_product_default_class:
return validate_product_class_name(meta_product_default_class)
else:
raise ValueError('Could not determine a product class for task: '
'"{}". Add an explicit value in the '
'"product_class" key or provide a default value in '
'meta.product_default_class by setting the '
'key to the applicable task class'.format(task_dict))
def try_product_init(class_, source, kwargs):
"""
Try to initialize product, raises a chained exception if not possible.
This provides more contextual information
"""
try:
return class_(source, **kwargs)
except Exception as e:
kwargs_msg = f'and keyword arguments: {kwargs!r}' if kwargs else ''
raise DAGSpecInitializationError(
f'Error initializing {class_.__name__} with source: '
f'{source!r}' + kwargs_msg) from e
def validate_product_class_name(product_class_name):
try:
return validators.string.validate_product_class_name(
product_class_name)
except Exception as e:
msg = ('Error validating Task spec (product_class field): '
f'{e.args[0]}')
e.args = (msg, )
raise
def resolve_if_file(product_raw, relative_to, class_):
try:
return _resolve_if_file(product_raw, relative_to, class_)
except Exception as e:
e.args = ('Error initializing File with argument '
f'{product_raw!r} ({e})', )
raise
def _resolve_if_file(product_raw, relative_to, class_):
if class_ != products.File:
return product_raw
elif relative_to:
# To keep things consistent, product relative paths are so to the
# pipeline.yaml file (not to the current working directory). This is
# important because there is no guarantee that the process calling
# this will be at the pipeline.yaml location. One example is
# when using the integration with Jupyter notebooks, each notebook
# will set its working directory to the current parent.
return str(Path(relative_to, product_raw).resolve())
else:
return Path(product_raw).resolve()
def _init_client(task_dict):
if 'client' in task_dict:
task_dict['client'] = dotted_path.call_spec(task_dict.pop('client'))
def get_value_at(d, dotted_path):
current = d
for key in dotted_path.split('.'):
try:
current = current[key]
except KeyError:
return None
return current
| [
"[email protected]"
] | |
4dd4d5b00a402333928aa06495267ea665fb2135 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/3/hv7.py | f2d754c339f59f6972a73151c1e013f6820c85b3 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'hV7':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
1970df72902b0a82e5c6714a75f174d5dc12f140 | 300cb04e51274541611efec5cb8e06d9994f7f66 | /Bai4.py | 22c15634333a611bab3d437c56890a831f99ee97 | [] | no_license | thuongtran1210/Buoi1 | b77c2a40d0107506cbd4bff12336e006942e9ff6 | d1c6240e60c10606bc430cd86ec473e3f11027f4 | refs/heads/master | 2022-12-16T19:49:40.028110 | 2020-09-24T17:16:13 | 2020-09-24T17:16:13 | 295,476,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 203 | py | pi=3.14
print("Nhập chiều dài bán kính hình tròn: ",end='')
r=float(input())
cv=r*2*pi
dt=r*r*pi
print("Hình tròn có chu vi là: "+str(cv))
print("Hình tròn có diện tích là: "+str(dt)) | [
"[email protected]"
] | |
01fa7e142fd2ebb49b922ab2250b16e3ae2a44bd | dce0807fa8587e676d56bd36871cfcc98550619c | /python/docs/conf.py | 591636a9a861622b80bd49ae3b38df0d4fbbd06e | [
"BSD-3-Clause"
] | permissive | ZigmundRat/spidriver | ca45993342ac2f3b134dbb37b686550318383846 | 9f3a7e75d55eea425eae03529680e6f7302cc042 | refs/heads/master | 2023-03-02T17:17:56.127183 | 2023-02-07T22:59:32 | 2023-02-07T22:59:32 | 153,183,836 | 0 | 0 | BSD-3-Clause | 2021-02-12T21:48:19 | 2018-10-15T21:25:51 | Python | UTF-8 | Python | false | false | 2,023 | py | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import sys
import os
sys.path.insert(0, os.path.abspath('..'))
# -- Project information -----------------------------------------------------
project = 'spidriver'
copyright = '2021, Excamera Labs'
author = 'James Bowman'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx_rtd_theme",
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx"
]
intersphinx_mapping = {'https://docs.python.org/3/': None}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
master_doc = 'index'
| [
"[email protected]"
] | |
19573ae17cac9b95b5c1c8e0d9dfc26b363993a8 | 0ceaad883368e250a8cc9d54dd3df6c82f550983 | /826/test.py | 98e8cfce3fb41c391831dd25293c29056c6d2719 | [] | no_license | tarp20/LeetCode | ae6b5a3838308156ea24a678c63992acb0528fa9 | 551a76714306d4ae9663718b5ce0769c7356e152 | refs/heads/main | 2023-03-06T03:48:54.924494 | 2021-02-16T14:38:00 | 2021-02-16T14:38:00 | 337,395,859 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 603 | py | import collections
difficulty = [3, 5, 9, 2, 3]
workers = [3, 9, 5]
profit = [6, 10, 18, 4, 6]
a = collections.defaultdict(int)
def maxProfitAssignment(difficulty, profit, worker):
diffPro = collections.defaultdict(int)
for diff, pro in zip(difficulty, profit):
diffPro[diff] = max(diffPro[diff], pro)
maxVal = 0
for x in range(min(difficulty + worker), max(difficulty + worker) + 1):
diffPro[x] = max(diffPro[x], maxVal)
maxVal = max(diffPro[x], maxVal)
return sum(diffPro[w] for w in worker)
print(maxProfitAssignment(difficulty, profit, workers))
| [
"[email protected]"
] | |
224c6cc4afbfaa41d9c508c782c894db9575c373 | 8bbfb5b937772066ea965058eb29e9f6362847c2 | /runtime/tags/qk511b-opengl-update-merged/quarkpy/qdictionnary.py | bfd5a9cdc7150008f44e2711ec291095163454b8 | [] | no_license | QuakeEngines/QuArK_quake_editor-clone | e1aeeb38e7ec8287835d643c3a0bfe5612f0b7f3 | 412bf28a14d4e369479bf38408bd93e6a2612f87 | refs/heads/master | 2021-02-15T16:11:17.332239 | 2020-03-04T14:28:50 | 2020-03-04T14:28:50 | 244,911,440 | 2 | 0 | null | null | null | null | WINDOWS-1252 | Python | false | false | 40,906 | py | """ QuArK - Quake Army Knife
Dictionnary of all strings used within the program
"""
#
# Copyright (C) 1996-99 Armin Rigo
# THIS FILE IS PROTECTED BY THE GNU GENERAL PUBLIC LICENCE
# FOUND IN FILE "COPYING.TXT"
#
Strings = {
0: "Version 5.11",
1: " QuArK - Quake Army Knife %s by Armin Rigo - logo McKay & Brian",
2: "&New %s",
3: "&%d. %s",
4: " &? ",
5: "&Toolboxes",
6: "Console",
7: "note: this console may not display the program output before it's terminated",
44: "&Undo %s",
45: "&Redo %s",
# 87: "&Force to grid",
# 88: "&Force everything to grid",
98: "Paste face &into polyhedron",
99: "Paste &into group",
113: "nothing to undo",
129: " (no face selected) ",
130: "parent only",
131: "%d polyhedron(s)",
132: "Not a valid face",
133: "(not used)",
134: " Selected face ",
135: " %d faces selected ",
136: "Entities",
137: "Map structure",
138: "poly",
139: "face",
140: "%d faces",
141: "%d faces (+%d)",
142: " Selected polyhedron ",
143: " %d polyhedrons selected ",
144: "several groups",
145: " (no polyhedron selected) ",
146: "(specific)",
155: "-- Specifics common to entities in this group --",
156: "Creating view...",
# 157: "The map contains %d unused faces. Delete them now ?",
# 158: "The map contains an unused face. Delete it now ?",
157: "You left %d faces unused. Delete them now ?",
158: "You left a face unused. Delete it now ?",
159: "You made %d invalid polyhedrons. Delete them now ?",
160: "The resulting polyhedron is no longer valid -- %s\nDelete it now ?",
161: "You made %d invalid polyhedron(s) plus %d invalid face(s). Delete them now ?",
163: "-- Specifics common to selected items --",
172: "Searching for holes...",
175: "Extracting files...",
176: "// This map has been written by QuArK - Quake Army Knife, %s\n// It is a map for the game %s.\n\n// It is recommended that you compile this map using TXQBSP, a version of QBSP\n// that can process floating-point coordinates and enhanced texture positionning.\n// For more information see QuArK's Home Page : http://www.planetquake.com/quark\n\n",
179: "(%d textures)",
180: "noname",
181: " Map for the game %s",
182: " Unknown game",
183: "corner",
184: " Model for the game %s",
185: "Textures used in this map",
192: "You must enter %d value(s)//Text : %s",
193: "No matching item found.",
194: "One matching item found.",
195: "%d matching item(s) found.",
216: "Cannot move an item into one of its sub-items",
221: "No help available about '%s'",
222: "No selection.",
223: "Select the polyhedrons, entities, and groups to include in the test build, and try again.\n\nNote that the game will crash if there is no player start in the selection. If the normal player start is not in the selection, you should use a 'testplayerstart' entity (see the New Items window).",
226: "Thank you for having registered, %s !",
229: "intersection",
230: "Duplicators should not be put directly under ""worldspawn"", because they duplicate ""worldspawn"" itself; the resulting .map file is not valid.",
236: "Position of the selected vertex",
237: "Position of the duplicator image",
238: "Enter the new origin : (X Y Z)",
240: "less than 4 sides",
# 241: "two sides in a same plane",
242: "no interior",
243: "sides not closed",
248: "unexpected char",
249: "string without end quote",
250: "a single side with more than %d vertexes",
251: "invalid real number",
252: "Only one \042worldspawn\042 is allowed",
254: "Syntax error in map file, at line %d :\n%s",
255: "\042worldspawn\042 entity not found in this file",
256: "QuArK has found %d invalid polyhedron(s) in this file. Look for the \042broken polyhedron\042 icons in the list.",
257: "QuArK has found %d invalid polyhedron face(s) in this file. Look for the \042broken face\042 icons in the list.",
# 258: "Polyhedrons with the 'origin' content flag are not allowed in 'worldspawn'. Ignored.",
# Rowdy
260: "\042patchDef2\042 expected",
261: "bezier",
#262: "JPG image",
#263: "JPG image (*.jpg)|*.jpg",
264: "Map beziers",
# /Rowdy
288: "Help snippet",
289: "||Red line : these red lines delimit which portion of the map are to be considered visible on the other view. The objects that are not visible on both map views are considered invisible, and if you see them on one view, they will be grayed out and not selectable with the mouse.\n\nMove these red lines if you need, for example, a quick way to select objects in a room without selecting the ceiling first every time : in this case, scroll the XZ view and/or move its red line until it is below the ceiling, so that the ceiling doesn't come in the way any more.",
384: "Impossible to create the file :\n\n%s",
385: "QuArK failed to open the file :\n\n%s",
389: "Add a Specific/Arg",
390: "Delete Specific/Arg",
421: "This would display help about entity if an entity was selected",
501: "Checking map...",
502: "Saving compiled model...",
503: "Reworking model...",
504: "Conifying...",
505: "Interrupted !\nCancelling, please wait...",
507: "Making hollow...",
508: "Brush subtraction...",
509: "Map operation...",
# 511: "Interrupt",
512: "edit Specific/Arg",
513: "rotation",
514: "move",
515: "move polyhedron",
516: "resize polyhedron",
517: "polyhedron distortion",
518: "set side orientation",
519: "glue side to plane",
520: "move entity",
521: "move selection",
522: "move group",
524: "move Duplicator image",
525: "move vertex",
526: "change entity angle",
527: "linear distortion/shear",
528: "linear rotation/zoom",
539: "move here",
540: "insert into group",
541: "duplicate",
542: "cut",
543: "paste",
544: "new item",
546: "apply texture",
547: "apply texture (%d faces)",
548: "enlarge / shrink",
549: "inflate / deflate",
550: "rotation",
551: "symmetry",
552: "move (from tool palette)",
553: "delete Specific",
556: "create new group",
558: "move with arrow key",
559: "force angle to grid",
560: "force to grid",
563: "cut out corner",
564: "intersect polyhedrons",
565: "make hollow",
566: "rename object",
568: "drag new item",
579: "delete selection",
582: "delete item '%s'",
586: "lines through hole",
590: "set group view flags",
592: "duplicate item",
593: "save in Explorer",
594: "drop file(s)",
595: "change palette",
596: "set texture flags",
597: "move face or texture",
598: "move texture on face",
599: "(cannot undo)",
600: "paste into group",
601: "paste and cut in two",
602: "delete unused faces and polys",
603: "click on form button",
604: "make file links",
605: "import files",
606: "toolbox main folder",
607: "toolbox folder",
608: "edit description",
609: "edit object Spec/Arg",
610: "delete Specific",
611: "negative polyhedron",
612: "external editor",
613: "set texture link",
614: "update BSP",
615: "set file name",
616: "drop new item",
617: "texture distortion",
618: "texture rotation",
619: "adjust tex. to face",
620: "reset tex. scale",
621: "negative poly",
622: "set group color",
623: "new texture links",
624: "auto. delete unused faces and polys",
625: "resize texture",
626: "image resize or conversion",
627: "bezier reshape",
720: "Press a key or select one in the list :",
721: "Key mapping for special actions",
722: "Key",
723: "Associated action",
725: "&Key...",
768: "Known types (%s)|%s",
769: "Structured text for hand-editing (*.%0:s)|*.%0:s",
770: "Save file as... (enter name and select type)",
771: "Choose the file(s) to open",
772: "QuArK Explorer file (*.qrk)|*.qrk",
773: "QuakeC code (*.qc)|*.qc",
774: "All files (*.*)|*.*",
775: "QuArK Map (*.qkm)|*.qkm",
776: "Texture Wads (*.wad)|*.wad",
777: "Quake 2 textures (*.wal)|*.wal",
778: "Pak files (*.pak)|*.pak",
779: "Compiled BSP (*.bsp)|*.bsp",
780: "HexenC code (*.hc)|*.hc",
781: "PCX image (*.pcx)|*.pcx",
782: "Bitmap image (*.bmp)|*.bmp",
783: "Old QuArK format (*.qme)|*.qme",
784: "Quake .map file (*.map)|*.map",
785: "QuArK Model (*.qkl)|*.qkl",
786: "Classic Quake .mdl file (*.mdl)|*.mdl",
787: "Quake 2 .md2 file (*.md2)|*.md2",
788: ".wav Sound (*.wav)|*.wav",
789: "Quake 2 Video (*.cin)|*.cin",
790: "Text file (*.txt)|*.txt",
791: "Config. file (*.cfg)|*.cfg",
792: "Heretic II textures (*.m8)|*.m8",
793: "Sin textures (*.swl)|*.swl",
794: "Sin Pak files (*.sin)|*.sin",
795: "Heretic II models (*.fm)|*.fm",
796: "TGA image (*.tga)|*.tga",
# zip archive support
797: "ZIP archives (*.zip)|*.zip",
# Q3 Pak Support
798: "Quake 3 Pak Files (*.pk3)|*.pk3",
799: "Quake 1 / Half-Life Sprite Files (*.spr)|*.spr",
800: "Quake 2 Sprite Files (*.sp2)|*.sp2",
801: "JPEG Image (*.jpg)|*.jpg",
802: "C Files (*.c)|*.c",
803: "C Header Files (*.h)|*.h",
809: "Invalid version number in Quake's original Progs.dat",
2368: "Skins",
2369: "Frames",
2370: "Main component",
2371: "Model",
2372: "skin%d",
2373: "Frame group",
2374: "Skin group",
2432: "This model contains no data to save",
2433: "Internal error : Invalid packed model structure//Please report : %s",
2434: "The current structure of this model is invalid. It cannot be saved in %s format.",
2435: "The model contains no skin and the skin size is unspecified",
3001: "Esc",
3002: "1",
3003: "2",
3004: "3",
3005: "4",
3006: "5",
3007: "6",
3008: "7",
3009: "8",
3010: "9",
3011: "0",
3012: "-",
3013: "=",
3014: "BackSpace",
3015: "Tab",
3016: "q",
3017: "w",
3018: "e",
3019: "r",
3020: "t",
3021: "y",
3022: "u",
3023: "i",
3024: "o",
3025: "p",
3026: "[",
3027: "]",
3028: "Enter",
3029: "Ctrl",
3030: "a",
3031: "s",
3032: "d",
3033: "f",
3034: "g",
3035: "h",
3036: "j",
3037: "k",
3038: "l",
3039: ";",
3040: "'",
3041: "~",
3042: "Shift",
3043: "\\",
3044: "z",
3045: "x",
3046: "c",
3047: "v",
3048: "b",
3049: "n",
3050: "m",
3051: ",",
3052: ".",
3053: "/",
3054: "Shift",
3055: "*",
3056: "Alt",
3057: "Space",
3058: "CapsLock",
3059: "F1",
3060: "F2",
3061: "F3",
3062: "F4",
3063: "F5",
3064: "F6",
3065: "F7",
3066: "F8",
3067: "F9",
3068: "F10",
3069: "NumLock",
3070: "ScrollLock",
3071: "Home",
3072: "UpArrow",
3073: "PgUp",
3074: "-",
3075: "LeftArrow",
3076: "5",
3077: "RightArrow",
3078: "+",
3079: "End",
3080: "DownArrow",
3081: "PgDn",
3082: "Ins",
3083: "Del",
3086: "\\",
3087: "F11",
3088: "F12",
3256: "Tab",
3257: "Pause",
3258: "F10",
3259: "Alt",
3260: "Mouse1",
3261: "Mouse2",
3262: "Mouse3",
3263: "Joy1",
3264: "Joy2",
3265: "Joy3",
3266: "Joy4",
4096: "Real number expected",
4097: "Unexpected symbol. Expected : %s Found : %s",
4098: "\253 ' \273 expected",
4099: "Unexpected end of line",
4100: "Unexpected char",
4101: "Wrong operand types (if it's a function that returns a vector, you must directly assign it to a vector variable)",
4102: "Error in expression",
4103: "\253 . \273 must follow an expression of type Entity",
4104: "Cannot assign a value to an expression",
4105: "Identifier already defined",
4107: "Original function had not the same number of arguments",
4108: "Original function had other arguments",
4109: "Cannot initialize these type of variable",
4110: "Declaration expected",
4111: "\042void()\042 function expected",
4112: "Too many code ! Overflow is due to the structure of Progs.dat",
4113: "frame name expected",
4114: "Unknown identifier ('%s')",
4115: "Wrong argument count in function call",
4116: "Wrong argument type in function call",
4117: "Too many arguments (maximum is 8)",
4159: "Compile error : %s in entry '%s', at line %d//The error was found in this part of the code, within the last two lines :\n%s",
4160: "unknown symbol",
4161: "data type",
4162: "identifier",
4163: "variable",
4164: "object variable",
4165: "':'",
4166: "';'",
4167: "'.'",
4168: "'='",
4169: "','",
4170: "'+'",
4171: "'-'",
4172: "'*'",
4173: "'/'",
4174: "'&'",
4175: "'|'",
4176: "'!'",
4177: "'||'",
4178: "'&&'",
4179: "'=='",
4180: "'!='",
4181: "'<'",
4182: "'>'",
4183: "'<='",
4184: "'>='",
4185: "'{'",
4186: "'}'",
4187: "'('",
4188: "')'",
4189: "'LOCAL'",
4190: "'BIND'",
4191: "'AUTOEXEC'",
4192: "'IF'",
4193: "'ELSE'",
4194: "'WHILE'",
4195: "'DO'",
4196: "'RETURN'",
4197: "real number",
4198: "string constant",
4199: "vector",
4200: "end of file",
4201: "'$'",
4202: "'['",
4203: "']'",
4416: "Cannot set this attribute to screen panel objects",
4417: "QuArK file object expected",
4418: "Cannot load bitmap file '%s'",
4419: "Invalid dock position",
4420: "Subitem index out of bounds",
4421: "QuArK object expected",
4422: "Object cannot have multiple parents",
4423: "Insertitem index out of bounds",
4424: "Removeitem index out of bounds",
4425: "No such item to remove",
4426: "Unexpected end of data",
4427: "Cannot write into this file",
4428: "Write error (disk may be full)",
4429: "Unknown attribute",
4430: "Internal icon index out of bounds",
4431: "Image1 object expected",
4432: "QuArK File object attribute not found",
4433: "Internal error",
4434: "Callable object expected",
4435: "Loading error",
4436: "Saving error",
4437: "Invalid operation on the main panel",
4438: "expected a QuArK object of type '%s'",
4439: "expected a tuple (object to display, object with default values)",
4440: "expected a tuple of floats",
4441: "expected a 3D vector",
4442: "The module 'quarkx.action' is closed",
4443: "Invalid vector operation",
4444: "Invalid matrix operation",
4445: "ImageList index out of range",
4446: "Not a face of the given polyhedron",
4447: "Points must all be projected",
4448: "Operation not allowed on 3D perspective views",
4449: "Duplicator macro did not create a list of TreeMap objects",
4450: "Expected a list of Internal objects of class '%s'",
4451: "ProgressBar already closed",
4452: "Operation aborted",
4453: "Cannot execute this program",
4454: "Expected a file object or equivalent",
4455: "No such file or file still opened",
4456: "Copy text to clipboard ?",
4457: "Are you sure you want to delete this item(s) ?",
4458: "Invalid arguments to 'extendcoplanar'",
4459: "positive integer expected",
4614: "&More >>",
4616: " *** EXCEPTION REPORT ***\n\n%s Address in the program : %p (%p)\n",
4617: "\n\nIf you think this is a bug, please copy the above information in your bug report.",
4618: "//Description of the invalid polygon :",
4620: "Impossible to create the surface for a polygon//Three aligned points don't define a plane.",
4864: "",
4865: "Could not find the 3D drivers (%s). You need either a 3DFX card with installed drivers or the software 3D library, depending on the choice you made in the Configuration dialog box//Error code %d",
4866: "Error with the 3DFX Glide drivers//%s failed",
4867: "No 3D driver configured. Please select a 3D driver in the Configuration dialog box",
4868: "Could not load the OpenGL drivers//Error code %d",
4869: "Error in OpenGL initialization//'%s' failed",
4870: "Error in OpenGL commands [Error code(s) %s step %d]",
5119: "(new)",
5120: "Explorer Group",
5121: "Imported File",
5122: "(clipboard)",
5123: "QuArK %s editor",
5124: "QuakeC Code",
5125: "file",
5126: "Map",
5127: "ToolBox",
5128: "New %s",
5129: "Texture Wad",
5130: "Texture Link",
5131: "Quake 1 texture",
5132: "Quake 2 texture",
5133: "Pak file",
5134: "BSP",
5135: "Texture List",
5136: "Pak folder",
5137: "PCX image",
5138: "Bitmap image",
5139: "HexenC code",
5140: "ToolBox folder",
5141: "Qme (old QuArK format)",
5142: ".map file",
5143: "Model",
5144: ".mdl file",
5145: ".md2 file",
5155: "Quake Context",
5156: "Tool bar",
5157: ".wav Sound",
5158: "Video",
5159: "QuArK Macro",
5160: "Text file",
5161: "Config file",
5162: "Files for the game",
5163: "M8 texture (Heretic II)",
5164: "Half-Life texture",
5165: "Sin texture",
5166: "Sin Pak file",
5167: ".fm file (Heretic II)",
5168: "TGA image",
# Zip File Support
5169: "Zip Archive",
5170: "Quake 3 Pak",
5171: "Sprite file",
5172: "JPEG Image",
5173: "C File",
5174: "C Header File",
5183: "File corruption in '%s'. You can try to continue to load it, but Warning ! This will likely cause serious troubles like mess in the object trees !\n\nThis error message may be displayed several times per file.\n\nReally continue ?",
5184: "'%s' is not a QuArK-5 file",
5185: "'%s' was created with a more recent version of QuArK",
5186: "'%s' is invalid (the end of the file seems to be missing)",
5187: "The file extension of '%s' does not match the file contents, which seems to be of type '%s'",
5188: "The file '%s' does not exist",
5189: "Cannot write into file '%s', because it is currently opened in another window. Save your work in another file",
# 5190: "Internal error L - this program is buggy !//QList",
# 5191: "Internal error R - this program is buggy !//Cannot read file objects of type '%s' with format %d",
# 5192: "Internal error W - this program is buggy !//Cannot write file objects of type '%s' with format %d",
5193: "Syntax error in source file, line %d : %s",
5194: "'{' expected",
5195: "unexpected data after the final '}' has been ignored.",
5196: "invalid property definition",
5197: "'=' expected",
5198: "unexpected end of line, unbalenced quotes",
5199: "hexadecimal code expected",
5200: "// This file has been written by QuArK %s\n// It's the text version of file: %s\n",
5201: "Could not open '%s' : Unknown file extension",
5202: "'%s' cannot be opened in a new window",
5203: "File not found : %s//Current directory is %s",
5204: "Cannot load the configuration file (Defaults.qrk). Be sure QuArK is correctly installed. You may need to reinstall it.\n\n%s",
5205: "Invalid configuration file (Defaults.qrk). Reinstall QuArK. (Missing SetupSet '%s')",
5206: "Wrong version of configuration file (Defaults.qrk). Reinstall QuArK. (QuArK is '%s' and Defaults.qrk is '%s')",
# 5207: "Error loading file '%s'.//Object initialization failed",
5207: "Polyhedron has no width.",
5208: "Only %d valid faces are left.",
5209: "integer or floating-point value expected - '%s' is not a floating-point value",
5210: "You are changing the name of the file. This will make a new file; the file with the old name will not be deleted.",
5211: "Save changes to this file ?",
5212: "Save changes to '%s' ?",
5213: "\n\n(Memory undo buffer too small, reduced to %d)",
5214: "You will no longer be able to undo any of the previous operations if you go on.",
5215: "You will no longer be able to undo all of the previous operations if you go on.",
5216: "QuArK has left %d temporary file(s) in path %s, probably because it crashed or because of a bug. Do you want to delete this(these) file(s) now ?",
5217: "'%s' : Cannot (un)do this operation any more because the file has been closed",
# 5218: "Warning: the tree view you see is incomplete, because the Explorer views of QuArK cannot display the same objects more than once.",
# 5219: "Internal error X - this program is buggy !//No Explorer from root",
5220: "'%s' : Invalid file size. The file is %d bytes length instead of %d",
5221: "Cannot display '%s' in the tree view, because it is already visible in another Explorer views in QuArK.",
5222: "This file contains a link to the file '%s' which cannot be found. The link has been ignored (and deleted).",
5223: "Internal error (%s) - this program is buggy !//Please report: %0:s",
# 5224: "Cannot open file '%s' because it is already opened as file link",
5225: "This file contains a link to the file '%s' which is already opened elsewhere. The link has been ignored (and deleted).",
5248: "Cancel",
5249: "&Move here",
5250: "&Copy here",
5251: "&Move selected items here",
5252: "&Copy selected items here",
5253: "&Insert into this group",
5254: "C&opy into this group",
5255: "&Insert selected items into this group",
5256: "C&opy selected items into this group",
5257: "Name your new Add-on",
5258: "Toolbox Folders",
5259: "New Folder",
5230: "Custom Add-on",
5376: "Configuration",
# 5376: "The minimal value is %s",
# 5377: "The maximal value is %s",
5377: "Cancel",
5378: "Close",
5379: "View %0:s%2:s (%1:s %3:s)",
5380: "Angle Side view",
5381: "Specific",
5382: "Arg",
5383: "&Add a Specific\n&Delete Specific",
5384: "Palette",
5385: "%s palette",
# 5386: "You cannot change this palette directly here. Do you want to open the file that contains the %s ?",
5386: "You cannot change this palette directly here",
5387: "%s texture - %d × %d",
5388: "Texture Flags",
5389: "Groups",
5390: "Flags",
5391: "\267 differs \267",
5392: "%d KB",
5393: " Paste",
5394: "This stores your settings",
5395: "view of textures is disabled - click here to enable",
5396: "Paste special...",
5397: "(full-screen)",
5398: "Elapsed time: %d second",
5399: "Elapsed time: %d seconds",
5400: "Byte count: %d bytes (%d KB)",
5401: " Group: %s",
5402: "This group contains %d KB of data in %d hidden objects (impossible to edit them directly).",
5403: "Browse for directory",
5404: "Browse for texture",
5405: "Browse for file",
5406: "Hull *%d",
5407: "%d objects",
5408: "Press on shaded areas, or use keypad arrows",
5440: "Loading ToolBox...",
5441: "ToolBox",
5442: "Copying data...",
5443: "Writing as text...",
5444: "Loading ToolBoxes data...",
5445: "Loading configuration files...",
5446: "Loading tree view...",
5447: "Reading as text...",
5448: "Reading image...",
5449: "Writing image...",
5450: "Saving...",
5451: "Loading .map file...",
5452: "Opening...",
5453: "Preparing textures...",
5454: "Loading textures for 3D view...",
5455: "Fast forward...",
5456: "Building .pak file...",
5457: "Pasting...",
5504: "No texture image//Missing or invalid %s",
5505: "'%s' is not a WAD file//%d should be %d",
5506: "'%s' is not a PACK file//%d should be %d",
# 5507: "Internal error N - this program is buggy !//Acces without FNode",
5508: "Files names in PACK files are limited to %d characters, including the path. '%s' is too long",
5509: "Invalid data. The file is probably corrupted//Error %d",
# 5510: "Internal error S - this program is buggy !//Empty Specific with data '%s'",
5511: "The WAD file contains data that cannot be written to this type of file.//'%s' invalid",
# 5512: "Internal error A - this program is buggy !//Acces",
# 5513: "Internal error C - this program is buggy !//CloseReadOnly",
5514: "'%s' : structure invalid for a Quake %d-like texture",
5515: "FILE ERROR !!\n\nThe file has been correctly saved to :\n\n%s\n\nbut QuArK cannot reopen it. You can't continue editing the file.\n\nQuit QuArK now, find this temporary file and rename it '%s'",
5516: "Could not save the file '%s'. The file is maybe read-only or opened in another application.//The file has been correctly saved to :\n\n%s\n\nbut QuArK failed to move it to the correct location. You can look for this temporary file and rename it '%s' yourself",
5517: "Could not save the file. The disk is maybe full or read-only.//%s",
5518: "Invalid texture link - no link !",
5519: "No data - the file is empty",
5520: "'%s' is not a Quake 1, Hexen II nor Quake 2 BSP file//%d should be %d or %d",
5521: "Missing BSP data. You can't use or save an empty or incomplete .bsp file",
5522: "(Missing)",
5523: "(Empty data)",
5524: "Texture '%s' not found in %s",
# 5525: "Internal error R - this program is buggy !//DoRemoveReference",
5526: "Could not add this file to the QuArK Explorer",
5527: "This file is opened in several windows. You must close them before you can do this operation",
# 5528: "You cannot make changes here because QuArK could not find in which file this data is stored",
# 5529: "Do you want to save the changes you made ? They will be stored in the following file(s) :\n",
# 5530: "\n %s",
5528: "Are you sure you want to make changes here ? QuArK could not find in which file this data is stored.",
5529: "This data comes from the file '%s'.\n\nIt is possible to make changes there. QuArK will let you save or discard these changes later.",
5530: "Cannot make changes here, because QuArK could not find from which file this data comes",
5531: "Cannot save '%s' because it has been attached to something else. You should save changes you make before you do other complex operations on the objects",
5532: "'%s' is not a 256-colors PCX file//%d-%d should be %d-%d",
5533: "'%s' is a PCX file but contains no palette. QuArK cannot open such files",
5534: "No image data//Missing or invalid %s",
5535: "'%s' is not a Bitmap file//%d should be %d",
5536: "'%s' is not a 256-colors bitmap. Do you want to convert it to the %s palette ?",
# 5537: "Failed to load the bitmap image. This may come from incompatible video drivers//Error code %d\nInternals %d %d %d\nSize %dx%d %dx%d",
# 5537: "Not a supported bitmap format",
5538: "Failed to convert the object to the required file type",
5539: "The width and the height of textures must be multiples of 8. This image's size is %d by %d pixels. The image will be expanded up to %d by %d.",
#5540: "The image is too large to be converted. Its size is %d by %d. It will be reduced down to %d by %d pixels.",
5541: "The image's palette is different from the %s palette. The colors will have to be converted.",
5542: "QuArK tried to switch to a game that's not installed.//Mode %s",
5543: "QuArK must switch to %s game mode.",
5544: "You are converting this texture between different games. Because of the palette, the colors might slightly change. Do you want to map the colors to the palette of the new game ?",
5545: "You cannot drop files of this type here.",
5546: "Do you want to put a link to the file here ? If you answer No, the file will be copied. If you want to simply open this file instead, don't drag it to the Explorer panel.",
5547: "QuArK could not switch to '%s' game mode : this game seems unsupported in this version",
5548: "QuArK could not determine the game for which this file is made. The current game mode is '%s'.",
5549: "The information required to work with %s could not be loaded. Be sure that the supporting files for this game are installed. The missing file is '%s'; if you do not have it, you need to download it from the web site.",
5550: "The following file(s) have been indirectly modified. Save the changes ?\n",
5551: "\n %s",
5552: "(error: no filename found)",
5553: " However, it is recommended that you make a QuArK add-on instead of modifying this file.\n\nTo make a new Add-on, choose New Main Folder in the Folders menu.",
5554: "Sorry, this version of QuArK cannot save files in the old .qme format. Use 'Save object as file' or 'Copy object as'",
5555: "'%s' is not a QME file//%d should be %d",
5556: "'%s' contains unsupported version numbers. It might have been done with an old version of QuArK (2.x). This version of QuArK cannot read these files.//Version code : %d\nType code : %d",
5557: "QuArK did not find the registered add-on '%s'.",
5558: "This map is invalid and contains no data.",
5559: "QuArK needs the file '%1:s' from %0:s and could not find it on your disk. Please insert the CD-ROM now.",
5560: "QuArK needs the file '%1:s' from %0:s and could not find it. You must set up the path(s) to %0:s in the Configuration dialog box",
5561: "QuArK needs the file '%1:s' from %0:s (directory '%2:s') and could not find it. You must set up the path(s) to %0:s in the Configuration dialog box",
# 5562: "The clipboard contains %d objects. Do you want to open them all ?",
5563: "You must enter a Specific",
5564: "A Specific cannot begin with the symbol '%s'",
5565: "A Specific cannot contain the symbol '%s'",
5566: "This map contains unsupported objects. It might have been created with a more recent version of QuArK.\n\n%d object(s) of unknown type deleted",
5567: "Cannot undo this operation, sorry",
5568: "Could not build the tool bar '%s'.",
# 5569: "The data in this file ('%s') does not match the file name ('%s').",
5569: "",
5570: "(switch back to %s game mode)",
5571: "'%s' is not a Quake 2 MD2 file//%d-%d should be %d-%d",
5572: "'%s' is a Quake 2 BSP file but is unsupported version//%d should be %d",
5573: "QuArK does not know if '%s' is a Quake 1 or Hexen II file. Is it an Hexen II map ?",
5574: "Missing information : QuArK cannot determine the target game for this file//Specific 'Game' missing",
# 5575: "The skin path in this Model is ambiguous : several .pcx files with the same name exist in various paths. QuArK may have choosen a wrong one.",
5575: "Missing skin image file '%s' in model '%s'",
5576: "This Model is invalid and contains no data.",
5577: "Macro processing error in '%s' : %s",
5578: "unbalanced '['",
5579: "invalid character after '['",
5580: "unbalanced '<'",
5581: "'>' must be followed by ']'",
5582: "'%s' is not an add-on for the game %s but for %s. You must switch to the correct game mode before you can use it.",
5583: "Cannot open the palette to choose a color from",
5584: "(missing caption)",
5585: "Could not execute this program//Command line: ``%s´´\nDefault directory: ``%s´´",
5586: "QuArK could not execute this or these programs. You must be sure they are installed on your system, and then enter the path to them in the configuration dialog box. Do you want to enter the path now ?",
5587: "Impossible to create the directory '%s'. Be sure you entered the path to %s correctly in the configuration dialog box//Error code %d",
5588: "Texture '%s' not found.",
5589: "%d textures written to '%s'.",
5590: "%d textures written to '%s', including %d animated textures.",
5591: "Choose the file(s) to link to",
5592: "Choose the file(s) to import",
5593: "'%s' is not a Quake1 MDL file//%d-%d should be %d-%d",
5594: "Skin '%s' has no image",
5595: "Failed to convert this map into a .map file",
5596: "The Add-ons should be put in the same directory as QuArK itself",
5597: "You are deleting the main toolbox folder '%s' and its container '%s'.\n\nAre you sure ? (this operation can of course be undone)",
5598: "Enter the description of this item :",
5599: "QuArK must switch to %s game mode, but it cannot do so now because you are working in a ToolBox. You need to copy this data outside the toolbox before you can work on it. When copying textures, copy them using the standard 'Copy' command and then use 'Paste as...' to convert it to another game.",
5600: "This file is already registered as an Add-on",
5601: "Save this file in the QuArK Explorer ?\n\nFor organization purposes, this option lets you pack your files into a single .qrk file. If you answer No, QuArK will let you save your file normally.",
# 5602: "Structure error : Missing 'Typ' for '%s' in the form data.",
5603: "Cannot access the WAVE sound output. There is probably another sound currently played//waveOutOpen failed",
5604: "Cannot access the WAVE sound output. Internal error//waveOutPrepareHeader failed",
5605: "'%s' is not a WAV file//%d should be %d",
5606: "Wave output timed out",
5607: "'%s' is probably not a CIN file//Invalid frame size (%d by %d) or sound format (%d-%d-%d)",
5608: "Error while reading the CIN file. The end of the file is probably missing",
5609: "Time out",
5610: "Invalid path. You must select or type the path to the file '%s'",
5611: "Cannot save an empty file",
5612: "About QuArK - Quake Army Knife",
5613: "Impossible to delete the Registry association for '.%s'.",
# 5614: "QuArK will associate itself with the following file types :\n\n%s\nThe files with these extensions will be given a custom icon and double-clicking on them will open them with QuArK. Do you want to continue ? If No, you might want to select exactly the file types you want in the Configuration dialog box.",
# 5615: "\t.%s\t(%s)\n",
5616: "Impossible to create the Registry association for '.%s'.",
5617: "This program failed to build the following file(s) :\n\n%s\nPlease see its documentation to learn more about this program. You may also want to look at the program's log file (if any) or run the program again from an MS-DOS box if you didn't have the time to read its screen output.",
5618: "\t%s\n",
# 5619: "The texture '%s' is not for Quake 2",
5619: "Texture file format could not be converted : %s",
5620: "Your map is 'leaked', i.e. there is a hole. To help you find it, QuArK can display a list of points going through the hole. Ok ?",
5621: "The map is not opened. Cannot load .lin file",
5622: "Cannot use macro 'O' (Operation) without an object to operate on",
5623: "Duplicator behaviour '%s' not found",
5624: "This map contains a special Duplicator whose behaviour is unknown to QuArK 5//'Sym' is '%s'",
5625: "%s cannot run multiple TCs together :\n%s\nOnly the last one is used.",
5626: "the temporary directory '%s'",
5627: "The directory to %s seems to be wrong : could not find %s. Do you want to enter the correct path now ?",
5628: "Remove the temporary tag of file '%s' ? QuArK will no more consider it as a temporary file.",
5629: "Always create pak files instead of writing files in '%s'",
5630: "Cannot create a new temporary .pak file. The names are in use up to '%s'",
5631: "There is an error in the definition of this button//Form '%s' not found",
5632: "The WAVE file has bad formatted data at the end that will be ignored.",
5633: "This object has been moved or deleted",
5634: "Error in hull number %d : %s",
5635: "The BSP file structure seems to be invalid.\nError code %d",
5636: "Update the BSP file ?\n\nNote that no group information nor any polyhedron added in this map can be saved. You can only change entities and textures in BSPs.",
5637: "This BSP or parts of it are still opened. Cannot open the map display again",
5638: "The hull number %d contains %d invalid face(s).\n%s",
5639: "No texture number %d",
5640: "Cannot edit BSP faces yet. Wait for a future version of QuArK !..",
5641: "The BSP structure seems a bit strange. Be sure the file didn't get truncated.",
5642: "Save changes in the configuration ?",
5643: "Load-time include command '%s' : not found",
5644: "Unknown file extension '%s'",
5645: "Press the key you want for this action...",
5646: "%s\n\nAre you sure you want to INTERRUPT this process ?",
5647: "You are about to remove all association to QuArK from your Windows Registry. Note that the next time you run QuArK, it will automatically associate itself with .qrk files again.\n\nDo you want to continue ?",
5648: "Done ! To explicitely remove file associations, use the button\n'remove all associations' below.",
5649: "This documentation is in HTML format, but QuArK failed to open your web browser.\n\nTried to open : %s//Error: %s",
5650: "No .html or .htm key in Registry",
5651: "No key \"%s\" in Registry",
5652: "Cannot execute command : %s",
5653: "Operation terminated.",
5654: "To compile the QuakeC or HexenC code in this file, you must first switch to the appropriate game mode",
5655: "'%s' is not a M8 texture file//%d should be %d",
#5656: "The software 3D viewer is displaying strange colors because it does not support palettized textures. If you have a 3DFX card, select the 3DFX driver in the Configuration dialog box.",
5657: "Cannot save the setup file",
5658: "QuArK could not save your configuration :\n\n %s",
5659: "QuArK cannot save any file because your system's temporary directory is invalid.",
5660: "No texture found in '%s'",
5661: "The Heretic II texture sizes stored in '%s' seem invalid//%d x %d should be %d x %d",
5662: "Select the directory where you want to extract the files to :",
5663: "%d file(s) have been extracted to the directory '%s'.",
5664: "Cannot save skin groups in this file format.",
5665: "Cannot save frame groups in this file format.",
5666: "The attached file '%s' should be stored in the same folder as in the main file, that is, '%s'.",
5667: "The Model Component and its current Frame are incompatible : the Frame has not enough vertices",
5668: "No problem found in this map.",
5669: "There is a problem in this map :\n%s",
5670: "You cannot remove this button. It comes from the '%s' toolbox.",
5671: "The structure of the texture file is invalid. The Heretic II tool 'WAL2M8' is known to create such strange files that result in display errors if you look at textures from very far away in the game. You should use QuArK instead of WAL2M8 to make your Heretic II textures.",
5672: "'%s' has a missing section, or a section designed for another version of the game//Missing section '%s'",
5673: "Cannot convert '%s' to '%s'",
5674: "Texture size",
5675: "Enter a new size for this texture :",
5676: "Texture resize can only apply to a texture, not a texture link",
5677: "Invalid texture size. Try '%d %d'",
5678: "Data image format error",
5679: "%s contains an unsupported format (colormap %d, type %d, bpp %d)",
5680: "This operation is not supported with 24-bit images; it requires a 256-color palettized image.",
5681: "You have copied a large amount of data to the clipboard.\nDo you want to keep this data in the clipboard now ?",
5682: "The file '%s' was automatically saved but not deleted, which means that QuArK crashed. Do you want to save it ?\n\nWarning : if you answer No, the auto-saved file will be deleted !",
5683: "\nThe image quality will suffer from this operation.",
5684: "* The image is about to be shrinked.\n",
5685: "* The new image format does not support an alpha mask (transparency).\n",
5686: "* A new palette is applied to the image.\n",
5687: "* The image will be converted to the palette of %s.\n",
5688: "Invalid texture extension in Defaults.qrk",
5689: "This image has got no palette : it is a true-color 24-bit image",
5690: "Setup file was cleared. QuArK is restarted with its default configuration.\n\nDo you want to activate Colorful Captions again ?",
5691: "Invalid Sprite File!",
5692: "The File %s is not compressed\nusing stored, shrunk, imploded and deflated methods.\n\nLoading Aborted! (%d)",
# Negative numbers are never used directly by QUARK5.EXE.
-101: "Cannot drop this item into a map.",
#-102: "\nNote: to use a bitmap as a texture, you must first convert the bitmap into a texture : in the Texture Browser, choose 'Paste Special' instead of 'Paste'.",
-103: "%d texture(s) could not be found. Are you sure you want to continue ?",
-104: "This command uses OpenGL. Note that if you get a black screen it probably means you are looking at a part of your map where there is no light. In case of trouble (or to disable light computation) see the OpenGL section of the configuration dialog box and try again.\n\nIt is recommended to save your work first. Ok to load OpenGL now ?",
}
| [
"nobody@5419a3ea-6bc3-475c-b79b-167d7c3fbd5f"
] | nobody@5419a3ea-6bc3-475c-b79b-167d7c3fbd5f |
bb1484bc3df7792baec606d0259c65f2711a1bb6 | a8c0867109974ff7586597fe2c58521277ab9d4d | /LC90.py | 9d8341d685661f5f3d24ef3401b4bcb15fa0cba8 | [] | no_license | Qiao-Liang/LeetCode | 1491b01d2ddf11495fbc23a65bb6ecb74ac1cee2 | dbdb227e12f329e4ca064b338f1fbdca42f3a848 | refs/heads/master | 2023-05-06T15:00:58.939626 | 2021-04-21T06:30:33 | 2021-04-21T06:30:33 | 82,885,950 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 628 | py | class Solution(object):
def subsetsWithDup(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
nums.sort()
result = [[], [nums[0]]]
add_len = 1
for idx in range(1, len(nums)):
if nums[idx] == nums[idx - 1]:
result.extend([elem + [nums[idx]] for elem in result[-add_len:]])
else:
add_len = len(result)
result.extend([elem + [nums[idx]] for elem in result])
return result
sol = Solution()
# nums = [1, 2, 2, 3, 3]
nums = [1, 1, 2]
print(sol.subsetsWithDup(nums))
| [
"[email protected]"
] | |
99cff29f8d6ff23f00550202608b54f8610bd70c | aa9a0acc85a7328969a81527f3ed7c155a245727 | /chapter_13/Alien_Invasion/bullet.py | 26d4391e22fdf97246f3cabb1eb2a814f1b590b8 | [] | no_license | mwnickerson/python-crash-course | 7035e21e1ee60c05d1d475ebcf04bd6a93c5967a | 18784c7e3abfb74f85f8c96cb0f8e606cab6dccc | refs/heads/main | 2023-08-03T20:14:49.883626 | 2021-09-25T05:31:12 | 2021-09-25T05:31:12 | 400,644,375 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,048 | py | # bullet class
import pygame
from pygame.sprite import Sprite
class Bullet(Sprite):
"""class to manage bullets fired from the ship"""
def __init__(self, ai_game):
"""create a bullet object at the ship's current position"""
super().__init__()
self.screen = ai_game.screen
self.settings = ai_game.settings
self.color = self.settings.bullet_color
# create a bullet rect at (0,0) and then set correct position
self.rect = pygame.Rect(0, 0, self.settings.bullet_width,
self.settings.bullet_height)
self.rect.midtop = ai_game.ship.rect.midtop
# store the bullet's position as a decimal value
self.y = float(self.rect.y)
def update(self):
"""move the bullet up the screen"""
self.y -= self.settings.bullet_speed
# update the rect position
self.rect.y = self.y
def draw_bullet(self):
"""draw the bullet to the screen"""
pygame.draw.rect(self.screen, self.color, self.rect)
| [
"[email protected]"
] | |
98c75bfd7cec5597b49116f046fd1c340d21b0db | f72c9e46af5ce5ac738693daf65e67a0962a229a | /sdk/lusid/models/inflation_linked_bond.py | 54a3b08f912d4f2e382b7dfdd4b51d437eb7a6a3 | [
"MIT"
] | permissive | finbourne/lusid-sdk-python | db8ce602f8408169f6583783c80ebbef83c77807 | 32fedc00ce5a37a6fe3bd9b9962570a8a9348e48 | refs/heads/master | 2023-08-29T18:22:49.488811 | 2023-08-29T15:57:26 | 2023-08-29T15:57:26 | 125,082,278 | 11 | 11 | NOASSERTION | 2023-04-28T07:16:48 | 2018-03-13T16:31:54 | Python | UTF-8 | Python | false | false | 33,796 | py | # coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 1.0.463
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from lusid.configuration import Configuration
class InflationLinkedBond(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
required_map (dict): The key is attribute name
and the value is whether it is 'required' or 'optional'.
"""
openapi_types = {
'start_date': 'datetime',
'maturity_date': 'datetime',
'flow_conventions': 'FlowConventions',
'inflation_index_conventions': 'InflationIndexConventions',
'coupon_rate': 'float',
'identifiers': 'dict(str, str)',
'base_cpi': 'float',
'base_cpi_date': 'datetime',
'calculation_type': 'str',
'ex_dividend_days': 'int',
'index_precision': 'int',
'principal': 'float',
'principal_protection': 'bool',
'stub_type': 'str',
'rounding_conventions': 'list[RoundingConvention]',
'instrument_type': 'str'
}
attribute_map = {
'start_date': 'startDate',
'maturity_date': 'maturityDate',
'flow_conventions': 'flowConventions',
'inflation_index_conventions': 'inflationIndexConventions',
'coupon_rate': 'couponRate',
'identifiers': 'identifiers',
'base_cpi': 'baseCPI',
'base_cpi_date': 'baseCPIDate',
'calculation_type': 'calculationType',
'ex_dividend_days': 'exDividendDays',
'index_precision': 'indexPrecision',
'principal': 'principal',
'principal_protection': 'principalProtection',
'stub_type': 'stubType',
'rounding_conventions': 'roundingConventions',
'instrument_type': 'instrumentType'
}
required_map = {
'start_date': 'required',
'maturity_date': 'required',
'flow_conventions': 'required',
'inflation_index_conventions': 'required',
'coupon_rate': 'required',
'identifiers': 'optional',
'base_cpi': 'optional',
'base_cpi_date': 'optional',
'calculation_type': 'optional',
'ex_dividend_days': 'optional',
'index_precision': 'optional',
'principal': 'required',
'principal_protection': 'optional',
'stub_type': 'optional',
'rounding_conventions': 'optional',
'instrument_type': 'required'
}
def __init__(self, start_date=None, maturity_date=None, flow_conventions=None, inflation_index_conventions=None, coupon_rate=None, identifiers=None, base_cpi=None, base_cpi_date=None, calculation_type=None, ex_dividend_days=None, index_precision=None, principal=None, principal_protection=None, stub_type=None, rounding_conventions=None, instrument_type=None, local_vars_configuration=None): # noqa: E501
"""InflationLinkedBond - a model defined in OpenAPI"
:param start_date: The start date of the bond. (required)
:type start_date: datetime
:param maturity_date: The final maturity date of the instrument. This means the last date on which the instruments makes a payment of any amount. For the avoidance of doubt, that is not necessarily prior to its last sensitivity date for the purposes of risk; e.g. instruments such as Constant Maturity Swaps (CMS) often have sensitivities to rates that may well be observed or set prior to the maturity date, but refer to a termination date beyond it. (required)
:type maturity_date: datetime
:param flow_conventions: (required)
:type flow_conventions: lusid.FlowConventions
:param inflation_index_conventions: (required)
:type inflation_index_conventions: lusid.InflationIndexConventions
:param coupon_rate: Simple coupon rate. (required)
:type coupon_rate: float
:param identifiers: External market codes and identifiers for the bond, e.g. ISIN.
:type identifiers: dict(str, str)
:param base_cpi: BaseCPI value. This is optional, if not provided the BaseCPI value will be calculated from the BaseCPIDate, if that too is not present the StartDate will be used. If provided then this value will always set the BaseCPI on this bond. The BaseCPI of an inflation linked bond is calculated using the following logic: - If a BaseCPI value is provided, this is used. - Otherwise, if BaseCPIDate is provided, the CPI for this date is calculated and used. - Otherwise, the CPI for the StartDate is calculated and used. Note that if both BaseCPI and BaseCPIDate are set, the BaseCPI value will be used and the BaseCPIDate will be ignored but can still be added for informative purposes. Some bonds are issued with a BaseCPI date that does not correspond to the StartDate CPI value, in this case the value should be provided here or with the BaseCPIDate.
:type base_cpi: float
:param base_cpi_date: BaseCPIDate. This is optional. Gives the date that the BaseCPI is calculated for. Note this is an un-lagged date (similar to StartDate) so the Bond ObservationLag will be applied to this date when calculating the CPI. The BaseCPI of an inflation linked bond is calculated using the following logic: - If a BaseCPI value is provided, this is used. - Otherwise, if BaseCPIDate is provided, the CPI for this date is calculated and used. - Otherwise, the CPI for the StartDate is calculated and used. Note that if both BaseCPI and BaseCPIDate are set, the BaseCPI value will be used and the BaseCPIDate will be ignored but can still be added for informative purposes. Some bonds are issued with a BaseCPI date that does not correspond to the StartDate CPI value, in this case the value should be provided here or with the actual BaseCPI.
:type base_cpi_date: datetime
:param calculation_type: The calculation type applied to the bond coupon and principal amount. The default CalculationType is `Standard`. Supported string (enumeration) values are: [Standard, Quarterly, Ratio].
:type calculation_type: str
:param ex_dividend_days: Number of Good Business Days before the next coupon payment, in which the bond goes ex-dividend. This is not common in inflation linked bonds but has been seen with (for example) bonds issued by the Bank of Thailand.
:type ex_dividend_days: int
:param index_precision: Number of decimal places used to round IndexRatio. This defaults to 5 if not set.
:type index_precision: int
:param principal: The face-value or principal for the bond at outset. (required)
:type principal: float
:param principal_protection: If true then the principal is protected in that the redemption amount will be at least the face value (Principal). This is typically set to true for inflation linked bonds issued by the United States and France (for example). This is typically set to false for inflation linked bonds issued by the United Kingdom (post 2005). For other sovereigns this can vary from issue to issue. If not set this property defaults to true. This is sometimes referred to as Deflation protection or an inflation floor of 0%.
:type principal_protection: bool
:param stub_type: StubType. Most Inflation linked bonds have a ShortFront stub type so this is the default, however in some cases with a long front stub LongFront should be selected. StubType Both is not supported for InflationLinkedBonds. Supported string (enumeration) values are: [ShortFront, ShortBack, LongBack, LongFront, Both].
:type stub_type: str
:param rounding_conventions: Rounding conventions for analytics, if any.
:type rounding_conventions: list[lusid.RoundingConvention]
:param instrument_type: The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashFlowsLeg, Unknown, TermDeposit, ContractForDifference, EquitySwap, CashPerpetual, CapFloor, CashSettled, CdsIndex, Basket, FundingLeg, FxSwap, ForwardRateAgreement, SimpleInstrument, Repo, Equity, ExchangeTradedOption, ReferenceInstrument, ComplexBond, InflationLinkedBond, InflationSwap, SimpleCashFlowLoan (required)
:type instrument_type: str
""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._start_date = None
self._maturity_date = None
self._flow_conventions = None
self._inflation_index_conventions = None
self._coupon_rate = None
self._identifiers = None
self._base_cpi = None
self._base_cpi_date = None
self._calculation_type = None
self._ex_dividend_days = None
self._index_precision = None
self._principal = None
self._principal_protection = None
self._stub_type = None
self._rounding_conventions = None
self._instrument_type = None
self.discriminator = None
self.start_date = start_date
self.maturity_date = maturity_date
self.flow_conventions = flow_conventions
self.inflation_index_conventions = inflation_index_conventions
self.coupon_rate = coupon_rate
self.identifiers = identifiers
self.base_cpi = base_cpi
self.base_cpi_date = base_cpi_date
self.calculation_type = calculation_type
self.ex_dividend_days = ex_dividend_days
if index_precision is not None:
self.index_precision = index_precision
self.principal = principal
if principal_protection is not None:
self.principal_protection = principal_protection
self.stub_type = stub_type
self.rounding_conventions = rounding_conventions
self.instrument_type = instrument_type
@property
def start_date(self):
"""Gets the start_date of this InflationLinkedBond. # noqa: E501
The start date of the bond. # noqa: E501
:return: The start_date of this InflationLinkedBond. # noqa: E501
:rtype: datetime
"""
return self._start_date
@start_date.setter
def start_date(self, start_date):
"""Sets the start_date of this InflationLinkedBond.
The start date of the bond. # noqa: E501
:param start_date: The start_date of this InflationLinkedBond. # noqa: E501
:type start_date: datetime
"""
if self.local_vars_configuration.client_side_validation and start_date is None: # noqa: E501
raise ValueError("Invalid value for `start_date`, must not be `None`") # noqa: E501
self._start_date = start_date
@property
def maturity_date(self):
"""Gets the maturity_date of this InflationLinkedBond. # noqa: E501
The final maturity date of the instrument. This means the last date on which the instruments makes a payment of any amount. For the avoidance of doubt, that is not necessarily prior to its last sensitivity date for the purposes of risk; e.g. instruments such as Constant Maturity Swaps (CMS) often have sensitivities to rates that may well be observed or set prior to the maturity date, but refer to a termination date beyond it. # noqa: E501
:return: The maturity_date of this InflationLinkedBond. # noqa: E501
:rtype: datetime
"""
return self._maturity_date
@maturity_date.setter
def maturity_date(self, maturity_date):
"""Sets the maturity_date of this InflationLinkedBond.
The final maturity date of the instrument. This means the last date on which the instruments makes a payment of any amount. For the avoidance of doubt, that is not necessarily prior to its last sensitivity date for the purposes of risk; e.g. instruments such as Constant Maturity Swaps (CMS) often have sensitivities to rates that may well be observed or set prior to the maturity date, but refer to a termination date beyond it. # noqa: E501
:param maturity_date: The maturity_date of this InflationLinkedBond. # noqa: E501
:type maturity_date: datetime
"""
if self.local_vars_configuration.client_side_validation and maturity_date is None: # noqa: E501
raise ValueError("Invalid value for `maturity_date`, must not be `None`") # noqa: E501
self._maturity_date = maturity_date
@property
def flow_conventions(self):
"""Gets the flow_conventions of this InflationLinkedBond. # noqa: E501
:return: The flow_conventions of this InflationLinkedBond. # noqa: E501
:rtype: lusid.FlowConventions
"""
return self._flow_conventions
@flow_conventions.setter
def flow_conventions(self, flow_conventions):
"""Sets the flow_conventions of this InflationLinkedBond.
:param flow_conventions: The flow_conventions of this InflationLinkedBond. # noqa: E501
:type flow_conventions: lusid.FlowConventions
"""
if self.local_vars_configuration.client_side_validation and flow_conventions is None: # noqa: E501
raise ValueError("Invalid value for `flow_conventions`, must not be `None`") # noqa: E501
self._flow_conventions = flow_conventions
@property
def inflation_index_conventions(self):
"""Gets the inflation_index_conventions of this InflationLinkedBond. # noqa: E501
:return: The inflation_index_conventions of this InflationLinkedBond. # noqa: E501
:rtype: lusid.InflationIndexConventions
"""
return self._inflation_index_conventions
@inflation_index_conventions.setter
def inflation_index_conventions(self, inflation_index_conventions):
"""Sets the inflation_index_conventions of this InflationLinkedBond.
:param inflation_index_conventions: The inflation_index_conventions of this InflationLinkedBond. # noqa: E501
:type inflation_index_conventions: lusid.InflationIndexConventions
"""
if self.local_vars_configuration.client_side_validation and inflation_index_conventions is None: # noqa: E501
raise ValueError("Invalid value for `inflation_index_conventions`, must not be `None`") # noqa: E501
self._inflation_index_conventions = inflation_index_conventions
@property
def coupon_rate(self):
"""Gets the coupon_rate of this InflationLinkedBond. # noqa: E501
Simple coupon rate. # noqa: E501
:return: The coupon_rate of this InflationLinkedBond. # noqa: E501
:rtype: float
"""
return self._coupon_rate
@coupon_rate.setter
def coupon_rate(self, coupon_rate):
"""Sets the coupon_rate of this InflationLinkedBond.
Simple coupon rate. # noqa: E501
:param coupon_rate: The coupon_rate of this InflationLinkedBond. # noqa: E501
:type coupon_rate: float
"""
if self.local_vars_configuration.client_side_validation and coupon_rate is None: # noqa: E501
raise ValueError("Invalid value for `coupon_rate`, must not be `None`") # noqa: E501
self._coupon_rate = coupon_rate
@property
def identifiers(self):
"""Gets the identifiers of this InflationLinkedBond. # noqa: E501
External market codes and identifiers for the bond, e.g. ISIN. # noqa: E501
:return: The identifiers of this InflationLinkedBond. # noqa: E501
:rtype: dict(str, str)
"""
return self._identifiers
@identifiers.setter
def identifiers(self, identifiers):
"""Sets the identifiers of this InflationLinkedBond.
External market codes and identifiers for the bond, e.g. ISIN. # noqa: E501
:param identifiers: The identifiers of this InflationLinkedBond. # noqa: E501
:type identifiers: dict(str, str)
"""
self._identifiers = identifiers
@property
def base_cpi(self):
"""Gets the base_cpi of this InflationLinkedBond. # noqa: E501
BaseCPI value. This is optional, if not provided the BaseCPI value will be calculated from the BaseCPIDate, if that too is not present the StartDate will be used. If provided then this value will always set the BaseCPI on this bond. The BaseCPI of an inflation linked bond is calculated using the following logic: - If a BaseCPI value is provided, this is used. - Otherwise, if BaseCPIDate is provided, the CPI for this date is calculated and used. - Otherwise, the CPI for the StartDate is calculated and used. Note that if both BaseCPI and BaseCPIDate are set, the BaseCPI value will be used and the BaseCPIDate will be ignored but can still be added for informative purposes. Some bonds are issued with a BaseCPI date that does not correspond to the StartDate CPI value, in this case the value should be provided here or with the BaseCPIDate. # noqa: E501
:return: The base_cpi of this InflationLinkedBond. # noqa: E501
:rtype: float
"""
return self._base_cpi
@base_cpi.setter
def base_cpi(self, base_cpi):
"""Sets the base_cpi of this InflationLinkedBond.
BaseCPI value. This is optional, if not provided the BaseCPI value will be calculated from the BaseCPIDate, if that too is not present the StartDate will be used. If provided then this value will always set the BaseCPI on this bond. The BaseCPI of an inflation linked bond is calculated using the following logic: - If a BaseCPI value is provided, this is used. - Otherwise, if BaseCPIDate is provided, the CPI for this date is calculated and used. - Otherwise, the CPI for the StartDate is calculated and used. Note that if both BaseCPI and BaseCPIDate are set, the BaseCPI value will be used and the BaseCPIDate will be ignored but can still be added for informative purposes. Some bonds are issued with a BaseCPI date that does not correspond to the StartDate CPI value, in this case the value should be provided here or with the BaseCPIDate. # noqa: E501
:param base_cpi: The base_cpi of this InflationLinkedBond. # noqa: E501
:type base_cpi: float
"""
self._base_cpi = base_cpi
@property
def base_cpi_date(self):
"""Gets the base_cpi_date of this InflationLinkedBond. # noqa: E501
BaseCPIDate. This is optional. Gives the date that the BaseCPI is calculated for. Note this is an un-lagged date (similar to StartDate) so the Bond ObservationLag will be applied to this date when calculating the CPI. The BaseCPI of an inflation linked bond is calculated using the following logic: - If a BaseCPI value is provided, this is used. - Otherwise, if BaseCPIDate is provided, the CPI for this date is calculated and used. - Otherwise, the CPI for the StartDate is calculated and used. Note that if both BaseCPI and BaseCPIDate are set, the BaseCPI value will be used and the BaseCPIDate will be ignored but can still be added for informative purposes. Some bonds are issued with a BaseCPI date that does not correspond to the StartDate CPI value, in this case the value should be provided here or with the actual BaseCPI. # noqa: E501
:return: The base_cpi_date of this InflationLinkedBond. # noqa: E501
:rtype: datetime
"""
return self._base_cpi_date
@base_cpi_date.setter
def base_cpi_date(self, base_cpi_date):
"""Sets the base_cpi_date of this InflationLinkedBond.
BaseCPIDate. This is optional. Gives the date that the BaseCPI is calculated for. Note this is an un-lagged date (similar to StartDate) so the Bond ObservationLag will be applied to this date when calculating the CPI. The BaseCPI of an inflation linked bond is calculated using the following logic: - If a BaseCPI value is provided, this is used. - Otherwise, if BaseCPIDate is provided, the CPI for this date is calculated and used. - Otherwise, the CPI for the StartDate is calculated and used. Note that if both BaseCPI and BaseCPIDate are set, the BaseCPI value will be used and the BaseCPIDate will be ignored but can still be added for informative purposes. Some bonds are issued with a BaseCPI date that does not correspond to the StartDate CPI value, in this case the value should be provided here or with the actual BaseCPI. # noqa: E501
:param base_cpi_date: The base_cpi_date of this InflationLinkedBond. # noqa: E501
:type base_cpi_date: datetime
"""
self._base_cpi_date = base_cpi_date
@property
def calculation_type(self):
"""Gets the calculation_type of this InflationLinkedBond. # noqa: E501
The calculation type applied to the bond coupon and principal amount. The default CalculationType is `Standard`. Supported string (enumeration) values are: [Standard, Quarterly, Ratio]. # noqa: E501
:return: The calculation_type of this InflationLinkedBond. # noqa: E501
:rtype: str
"""
return self._calculation_type
@calculation_type.setter
def calculation_type(self, calculation_type):
"""Sets the calculation_type of this InflationLinkedBond.
The calculation type applied to the bond coupon and principal amount. The default CalculationType is `Standard`. Supported string (enumeration) values are: [Standard, Quarterly, Ratio]. # noqa: E501
:param calculation_type: The calculation_type of this InflationLinkedBond. # noqa: E501
:type calculation_type: str
"""
self._calculation_type = calculation_type
@property
def ex_dividend_days(self):
"""Gets the ex_dividend_days of this InflationLinkedBond. # noqa: E501
Number of Good Business Days before the next coupon payment, in which the bond goes ex-dividend. This is not common in inflation linked bonds but has been seen with (for example) bonds issued by the Bank of Thailand. # noqa: E501
:return: The ex_dividend_days of this InflationLinkedBond. # noqa: E501
:rtype: int
"""
return self._ex_dividend_days
@ex_dividend_days.setter
def ex_dividend_days(self, ex_dividend_days):
"""Sets the ex_dividend_days of this InflationLinkedBond.
Number of Good Business Days before the next coupon payment, in which the bond goes ex-dividend. This is not common in inflation linked bonds but has been seen with (for example) bonds issued by the Bank of Thailand. # noqa: E501
:param ex_dividend_days: The ex_dividend_days of this InflationLinkedBond. # noqa: E501
:type ex_dividend_days: int
"""
self._ex_dividend_days = ex_dividend_days
@property
def index_precision(self):
"""Gets the index_precision of this InflationLinkedBond. # noqa: E501
Number of decimal places used to round IndexRatio. This defaults to 5 if not set. # noqa: E501
:return: The index_precision of this InflationLinkedBond. # noqa: E501
:rtype: int
"""
return self._index_precision
@index_precision.setter
def index_precision(self, index_precision):
"""Sets the index_precision of this InflationLinkedBond.
Number of decimal places used to round IndexRatio. This defaults to 5 if not set. # noqa: E501
:param index_precision: The index_precision of this InflationLinkedBond. # noqa: E501
:type index_precision: int
"""
self._index_precision = index_precision
@property
def principal(self):
"""Gets the principal of this InflationLinkedBond. # noqa: E501
The face-value or principal for the bond at outset. # noqa: E501
:return: The principal of this InflationLinkedBond. # noqa: E501
:rtype: float
"""
return self._principal
@principal.setter
def principal(self, principal):
"""Sets the principal of this InflationLinkedBond.
The face-value or principal for the bond at outset. # noqa: E501
:param principal: The principal of this InflationLinkedBond. # noqa: E501
:type principal: float
"""
if self.local_vars_configuration.client_side_validation and principal is None: # noqa: E501
raise ValueError("Invalid value for `principal`, must not be `None`") # noqa: E501
self._principal = principal
@property
def principal_protection(self):
"""Gets the principal_protection of this InflationLinkedBond. # noqa: E501
If true then the principal is protected in that the redemption amount will be at least the face value (Principal). This is typically set to true for inflation linked bonds issued by the United States and France (for example). This is typically set to false for inflation linked bonds issued by the United Kingdom (post 2005). For other sovereigns this can vary from issue to issue. If not set this property defaults to true. This is sometimes referred to as Deflation protection or an inflation floor of 0%. # noqa: E501
:return: The principal_protection of this InflationLinkedBond. # noqa: E501
:rtype: bool
"""
return self._principal_protection
@principal_protection.setter
def principal_protection(self, principal_protection):
"""Sets the principal_protection of this InflationLinkedBond.
If true then the principal is protected in that the redemption amount will be at least the face value (Principal). This is typically set to true for inflation linked bonds issued by the United States and France (for example). This is typically set to false for inflation linked bonds issued by the United Kingdom (post 2005). For other sovereigns this can vary from issue to issue. If not set this property defaults to true. This is sometimes referred to as Deflation protection or an inflation floor of 0%. # noqa: E501
:param principal_protection: The principal_protection of this InflationLinkedBond. # noqa: E501
:type principal_protection: bool
"""
self._principal_protection = principal_protection
@property
def stub_type(self):
"""Gets the stub_type of this InflationLinkedBond. # noqa: E501
StubType. Most Inflation linked bonds have a ShortFront stub type so this is the default, however in some cases with a long front stub LongFront should be selected. StubType Both is not supported for InflationLinkedBonds. Supported string (enumeration) values are: [ShortFront, ShortBack, LongBack, LongFront, Both]. # noqa: E501
:return: The stub_type of this InflationLinkedBond. # noqa: E501
:rtype: str
"""
return self._stub_type
@stub_type.setter
def stub_type(self, stub_type):
"""Sets the stub_type of this InflationLinkedBond.
StubType. Most Inflation linked bonds have a ShortFront stub type so this is the default, however in some cases with a long front stub LongFront should be selected. StubType Both is not supported for InflationLinkedBonds. Supported string (enumeration) values are: [ShortFront, ShortBack, LongBack, LongFront, Both]. # noqa: E501
:param stub_type: The stub_type of this InflationLinkedBond. # noqa: E501
:type stub_type: str
"""
self._stub_type = stub_type
@property
def rounding_conventions(self):
"""Gets the rounding_conventions of this InflationLinkedBond. # noqa: E501
Rounding conventions for analytics, if any. # noqa: E501
:return: The rounding_conventions of this InflationLinkedBond. # noqa: E501
:rtype: list[lusid.RoundingConvention]
"""
return self._rounding_conventions
@rounding_conventions.setter
def rounding_conventions(self, rounding_conventions):
"""Sets the rounding_conventions of this InflationLinkedBond.
Rounding conventions for analytics, if any. # noqa: E501
:param rounding_conventions: The rounding_conventions of this InflationLinkedBond. # noqa: E501
:type rounding_conventions: list[lusid.RoundingConvention]
"""
self._rounding_conventions = rounding_conventions
@property
def instrument_type(self):
"""Gets the instrument_type of this InflationLinkedBond. # noqa: E501
The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashFlowsLeg, Unknown, TermDeposit, ContractForDifference, EquitySwap, CashPerpetual, CapFloor, CashSettled, CdsIndex, Basket, FundingLeg, FxSwap, ForwardRateAgreement, SimpleInstrument, Repo, Equity, ExchangeTradedOption, ReferenceInstrument, ComplexBond, InflationLinkedBond, InflationSwap, SimpleCashFlowLoan # noqa: E501
:return: The instrument_type of this InflationLinkedBond. # noqa: E501
:rtype: str
"""
return self._instrument_type
@instrument_type.setter
def instrument_type(self, instrument_type):
"""Sets the instrument_type of this InflationLinkedBond.
The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashFlowsLeg, Unknown, TermDeposit, ContractForDifference, EquitySwap, CashPerpetual, CapFloor, CashSettled, CdsIndex, Basket, FundingLeg, FxSwap, ForwardRateAgreement, SimpleInstrument, Repo, Equity, ExchangeTradedOption, ReferenceInstrument, ComplexBond, InflationLinkedBond, InflationSwap, SimpleCashFlowLoan # noqa: E501
:param instrument_type: The instrument_type of this InflationLinkedBond. # noqa: E501
:type instrument_type: str
"""
if self.local_vars_configuration.client_side_validation and instrument_type is None: # noqa: E501
raise ValueError("Invalid value for `instrument_type`, must not be `None`") # noqa: E501
allowed_values = ["QuotedSecurity", "InterestRateSwap", "FxForward", "Future", "ExoticInstrument", "FxOption", "CreditDefaultSwap", "InterestRateSwaption", "Bond", "EquityOption", "FixedLeg", "FloatingLeg", "BespokeCashFlowsLeg", "Unknown", "TermDeposit", "ContractForDifference", "EquitySwap", "CashPerpetual", "CapFloor", "CashSettled", "CdsIndex", "Basket", "FundingLeg", "FxSwap", "ForwardRateAgreement", "SimpleInstrument", "Repo", "Equity", "ExchangeTradedOption", "ReferenceInstrument", "ComplexBond", "InflationLinkedBond", "InflationSwap", "SimpleCashFlowLoan"] # noqa: E501
if self.local_vars_configuration.client_side_validation and instrument_type not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `instrument_type` ({0}), must be one of {1}" # noqa: E501
.format(instrument_type, allowed_values)
)
self._instrument_type = instrument_type
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, InflationLinkedBond):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, InflationLinkedBond):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
] | |
4a4cd0f556b488a78dbec904db4fe5b61c090926 | 0e083f405af00029c9ec31849f0f7f81c56844b5 | /configs/mmdet/detection/detection_onnxruntime_static.py | 486a734d9b14258efc894bacde418bb4ad0c5c85 | [
"Apache-2.0"
] | permissive | open-mmlab/mmdeploy | 39b9e7b611caab2c76a6142fcb99f0bf1d92ad24 | 5479c8774f5b88d7ed9d399d4e305cb42cc2e73a | refs/heads/main | 2023-09-01T21:29:25.315371 | 2023-08-31T09:59:29 | 2023-08-31T09:59:29 | 441,467,833 | 2,164 | 605 | Apache-2.0 | 2023-09-14T10:39:04 | 2021-12-24T13:04:44 | Python | UTF-8 | Python | false | false | 78 | py | _base_ = ['../_base_/base_static.py', '../../_base_/backends/onnxruntime.py']
| [
"[email protected]"
] | |
a11d1ec99d2df46d06b650b72e04650e59b214ac | a3d3fc52912e7a15ef03bda17acfc191a4536a07 | /arcade/examples/isometric_example.py | 3a25b2406c1cd8e8a58a921a2fb752fe6ea007ed | [
"MIT"
] | permissive | yasoob/arcade | 533a41ce3153ef711005ab02d3501b41a83e50aa | 5ef4858e09cf239f2808492ead8c1dd8fd6d47eb | refs/heads/master | 2020-08-21T19:23:25.886055 | 2019-10-18T21:36:36 | 2019-10-18T21:36:36 | 216,228,521 | 0 | 0 | NOASSERTION | 2019-10-19T15:33:19 | 2019-10-19T15:33:19 | null | UTF-8 | Python | false | false | 6,600 | py | """
Example of displaying an isometric map.
Isometric map created with Tiled Map Editor: https://www.mapeditor.org/
Tiles by Kenney: http://kenney.nl/assets/isometric-dungeon-tiles
If Python and Arcade are installed, this example can be run from the command line with:
python -m arcade.examples.isometric_example
"""
import arcade
import os
SPRITE_SCALING = 0.5
SCREEN_WIDTH = 800
SCREEN_HEIGHT = 600
SCREEN_TITLE = "Isometric Example"
# How many pixels to keep as a minimum margin between the character
# and the edge of the screen.
VIEWPORT_MARGIN = 200
MOVEMENT_SPEED = 5
def read_sprite_list(grid, sprite_list):
for row in grid:
for grid_location in row:
if grid_location.tile is not None:
tile_sprite = arcade.Sprite(grid_location.tile.source, SPRITE_SCALING)
tile_sprite.center_x = grid_location.center_x * SPRITE_SCALING
tile_sprite.center_y = grid_location.center_y * SPRITE_SCALING
# print(f"{grid_location.tile.source} -- ({tile_sprite.center_x:4}, {tile_sprite.center_y:4})")
sprite_list.append(tile_sprite)
class MyGame(arcade.Window):
""" Main application class. """
def __init__(self, width, height, title):
"""
Initializer
"""
super().__init__(width, height, title)
# Set the working directory (where we expect to find files) to the same
# directory this .py file is in. You can leave this out of your own
# code, but it is needed to easily run the examples using "python -m"
# as mentioned at the top of this program.
file_path = os.path.dirname(os.path.abspath(__file__))
os.chdir(file_path)
# Sprite lists
self.all_sprites_list = None
# Set up the player
self.player_sprite = None
self.wall_list = None
self.floor_list = None
self.objects_list = None
self.player_list = None
self.view_bottom = 0
self.view_left = 0
self.my_map = None
def setup(self):
""" Set up the game and initialize the variables. """
# Sprite lists
self.player_list = arcade.SpriteList()
self.wall_list = arcade.SpriteList()
self.floor_list = arcade.SpriteList()
self.objects_list = arcade.SpriteList()
# noinspection PyDeprecation
self.my_map = arcade.read_tiled_map('dungeon.tmx', SPRITE_SCALING)
# Set up the player
self.player_sprite = arcade.Sprite("images/character.png", 0.4)
px, py = arcade.isometric_grid_to_screen(self.my_map.width // 2,
self.my_map.height // 2,
self.my_map.width,
self.my_map.height,
self.my_map.tilewidth,
self.my_map.tileheight)
self.player_sprite.center_x = px * SPRITE_SCALING
self.player_sprite.center_y = py * SPRITE_SCALING
self.player_list.append(self.player_sprite)
read_sprite_list(self.my_map.layers["Floor"], self.floor_list)
read_sprite_list(self.my_map.layers["Walls"], self.wall_list)
read_sprite_list(self.my_map.layers["Furniture"], self.wall_list)
# Set the background color
if self.my_map.backgroundcolor is None:
arcade.set_background_color(arcade.color.BLACK)
else:
arcade.set_background_color(self.my_map.backgroundcolor)
# Set the viewport boundaries
# These numbers set where we have 'scrolled' to.
self.view_left = 0
self.view_bottom = 0
def on_draw(self):
"""
Render the screen.
"""
# This command has to happen before we start drawing
arcade.start_render()
# Draw all the sprites.
self.floor_list.draw()
self.player_list.draw()
self.wall_list.draw()
def on_key_press(self, key, modifiers):
"""Called whenever a key is pressed. """
if key == arcade.key.UP:
self.player_sprite.change_y = MOVEMENT_SPEED
elif key == arcade.key.DOWN:
self.player_sprite.change_y = -MOVEMENT_SPEED
elif key == arcade.key.LEFT:
self.player_sprite.change_x = -MOVEMENT_SPEED
elif key == arcade.key.RIGHT:
self.player_sprite.change_x = MOVEMENT_SPEED
def on_key_release(self, key, modifiers):
"""Called when the user releases a key. """
if key == arcade.key.UP or key == arcade.key.DOWN:
self.player_sprite.change_y = 0
elif key == arcade.key.LEFT or key == arcade.key.RIGHT:
self.player_sprite.change_x = 0
def update(self, delta_time):
""" Movement and game logic """
# Call update on all sprites (The sprites don't do much in this
# example though.)
self.player_sprite.update()
# --- Manage Scrolling ---
# Track if we need to change the viewport
changed = False
# Scroll left
left_bndry = self.view_left + VIEWPORT_MARGIN
if self.player_sprite.left < left_bndry:
self.view_left -= left_bndry - self.player_sprite.left
changed = True
# Scroll right
right_bndry = self.view_left + SCREEN_WIDTH - VIEWPORT_MARGIN
if self.player_sprite.right > right_bndry:
self.view_left += self.player_sprite.right - right_bndry
changed = True
# Scroll up
top_bndry = self.view_bottom + SCREEN_HEIGHT - VIEWPORT_MARGIN
if self.player_sprite.top > top_bndry:
self.view_bottom += self.player_sprite.top - top_bndry
changed = True
# Scroll down
bottom_bndry = self.view_bottom + VIEWPORT_MARGIN
if self.player_sprite.bottom < bottom_bndry:
self.view_bottom -= bottom_bndry - self.player_sprite.bottom
changed = True
if changed:
self.view_left = int(self.view_left)
self.view_bottom = int(self.view_bottom)
arcade.set_viewport(self.view_left,
SCREEN_WIDTH + self.view_left,
self.view_bottom,
SCREEN_HEIGHT + self.view_bottom)
def main():
""" Main method """
window = MyGame(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
window.setup()
arcade.run()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
71b070ebe37d27a064ccb16f22272cca112b34c1 | b3b1bdd2daffd372c97d9d11dc6b100bd688c9b1 | /src/criterion/divergence.py | 0b13074345617cecadd3115e367637df3d6eb60f | [] | no_license | jasonZhang892/audio_source_separation | edc9dffb9a82c45d6fd46372a634b8ea6573f0f3 | 1c29f49349d81962532bfaac29c189208e77e18a | refs/heads/main | 2023-06-06T07:34:59.716291 | 2021-05-25T07:19:14 | 2021-05-25T07:19:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,196 | py | import numpy as np
EPS=1e-12
def kl_divergence(input, target, eps=EPS):
"""
Args:
input (C, *)
Returns:
loss (*)
"""
_input = input + eps
_target = target + eps
ratio = _target / _input
loss = _target * np.log(ratio)
loss = loss.sum(dim=0)
return loss
def is_divergence(input, target, eps=EPS):
"""
Args:
input (*)
"""
_input = input + eps
_target = target + eps
ratio = _target / _input
loss = ratio - np.log(ratio) - 1
return loss
def generalized_kl_divergence(input, target, eps=EPS):
"""
Args:
input (*)
"""
_input = input + eps
_target = target + eps
ratio = _target / _input
loss = _target * np.log(ratio) + _input - _target
return loss
def beta_divergence(input, target, beta=2):
"""
Beta divergence
Args:
input (batch_size, *)
"""
beta_minus1 = beta - 1
assert beta != 0, "Use is_divergence instead."
assert beta_minus1 != 0, "Use generalized_kl_divergence instead."
loss = target * (target**beta_minus1 - input**beta_minus1) / beta_minus1 - (target**beta - input**beta) / beta
return loss | [
"[email protected]"
] | |
5a1d1526babe1aa52c0f2b0fc433efd22a8a5080 | 9b527131c291b735a163226d1daac2397c25b712 | /Lecture5/activity_sort.py | 0b3de48ee6555fdc9145628f7045924bdcce0a1e | [] | no_license | arnabs542/BigO-Coding-material | dbc8895ec6370933069b2e40e0610d4b05dddcf2 | 3b31bddb1240a407aa22f8eec78956d06b42efbc | refs/heads/master | 2022-03-19T18:32:53.667852 | 2019-11-27T23:55:04 | 2019-11-27T23:55:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 596 | py | class Activity:
def __init__(self, start, finish):
self.start = start
self.finish = finish
def activity_selection(a):
a.sort(key=lambda activity: activity.finish)
i = 0
res.append(a[0])
for j in range(1, len(a)):
if a[j].start >= a[i].finish:
res.append(a[j])
i = j
def print_activities(result):
for activity in result:
print('{} - {}'.format(activity.start, activity.finish))
if __name__ == '__main__':
a = []
res = []
n = int(input())
for i in range(n):
s, f = map(int, input().split())
a.append(Activity(s, f))
activity_selection(a)
print_activities(res)
| [
"[email protected]"
] | |
6aeb485e5f218bf264cd0085a6de182171b62a9e | 5e8d86f6ddfd516b9768e8617ced0baca8112f4c | /core-python/Core_Python/csvfile/FinalAssessment_CSV.py | 14c8146f4b97477600694bda120bf1e7ee0c9dcd | [
"MIT"
] | permissive | bharat-kadchha/tutorials | 0a96ce5a3da1a0ceb39a0d464c8f3e2ff397da7c | cd77b0373c270eab923a6db5b9f34c52543b8664 | refs/heads/master | 2022-12-23T11:49:34.042820 | 2020-10-06T03:51:20 | 2020-10-06T03:51:20 | 272,891,375 | 1 | 0 | MIT | 2020-06-17T06:04:33 | 2020-06-17T06:04:33 | null | UTF-8 | Python | false | false | 1,195 | py | import csv, os
# change your parent dir accordingly
parent_dir = "E:/GitHub/1) Git_Tutorials_Repo_Projects/core-python/Core_Python/ExCsvFiles"
def read_employees(csv_file_location):
csv.register_dialect('empDialect', skipinitialspace=True, strict=True)
employee_file = csv.DictReader(open(csv_file_location), dialect='empDialect')
employee_list = []
for data in employee_file:
employee_list.append(data)
return employee_list
def process_data(employee_list):
department_list = []
for employee_data in employee_list:
department_list.append(employee_data['Department'])
department_data = {}
for department_name in set(department_list):
department_data[department_name] = department_list.count(department_name)
return department_data
def write_report(dictionary, report_file):
with open(report_file, "w+") as f:
for k in sorted(dictionary):
f.write(str(k) + ':' + str(dictionary[k]) + '\n')
employee_list = read_employees(os.path.join(parent_dir,'Employees.csv'))
dictionary = process_data(employee_list)
write_report(dictionary, os.path.join(parent_dir,'Report.txt'))
print("Report Generated Suuccesfully") | [
"[email protected]"
] | |
5b918c1178365622b9af53d1dba706c1fe65ecc0 | 41cd1bcff0166ed3aab28a183a2837adaa2d9a07 | /allauth/socialaccount/providers/openid/models.py | 9103b37e93f41b94d02b6bbbb938e9389ebdf298 | [
"MIT"
] | permissive | thomaspurchas/django-allauth | 694dde8615b90cd4768e7f9eda79fdcf6fe3cdb6 | d7a8b9e13456180648450431057a206afa689373 | refs/heads/master | 2022-02-04T03:18:25.851391 | 2013-05-20T11:26:55 | 2013-05-20T11:26:55 | 7,754,028 | 1 | 0 | MIT | 2022-02-01T23:04:02 | 2013-01-22T14:44:56 | Python | UTF-8 | Python | false | false | 755 | py | from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class OpenIDStore(models.Model):
server_url = models.CharField(max_length=255)
handle = models.CharField(max_length=255)
secret = models.TextField()
issued = models.IntegerField()
lifetime = models.IntegerField()
assoc_type = models.TextField()
def __str__(self):
return self.server_url
@python_2_unicode_compatible
class OpenIDNonce(models.Model):
server_url = models.CharField(max_length=255)
timestamp = models.IntegerField()
salt = models.CharField(max_length=255)
date_created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.server_url
| [
"[email protected]"
] | |
fbae567a999df749f8ce0f277919dae770888389 | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Detectron/tools/generate_testdev_from_test.py | 1613d2313d82d50ece48e2e046b19b7f79711bdd | [
"MIT",
"Apache-2.0"
] | permissive | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:9683898a2074bbd67574a333a6c8d61a3e5cb1ab42b6e728fab7477d6670f1ce
size 3243
| [
"[email protected]"
] | |
4c144aed67166564a3d15f170ade14626dc5a098 | 46d8a9446d9f52136736cdeb54f7fc7a23639f10 | /ppasr/model_utils/deepspeech2/model.py | 30ae326f08d9c7597c22f54bb040653bd172fa2b | [
"Apache-2.0"
] | permissive | buyersystem/PPASR | 19b23ff490cdab79fdaa43c0eea4af8ca9b4787d | 73edad5e136bf606b0a9b429a09a41716079afd1 | refs/heads/master | 2023-08-28T06:15:50.724115 | 2021-10-27T09:57:20 | 2021-10-27T09:57:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,847 | py | from paddle import nn
from ppasr.model_utils.deepspeech2.conv import ConvStack
from ppasr.model_utils.deepspeech2.rnn import RNNStack
__all__ = ['DeepSpeech2Model']
class DeepSpeech2Model(nn.Layer):
"""DeepSpeech2模型结构
:param feat_size: 输入的特征大小
:type feat_size: int
:param vocab_size: 字典的大小,用来分类输出
:type vocab_size: int
:param num_conv_layers: 堆叠卷积层数
:type num_conv_layers: int
:param num_rnn_layers: 堆叠RNN层数
:type num_rnn_layers: int
:param rnn_size: RNN层大小
:type rnn_size: int
:return: DeepSpeech2模型
:rtype: nn.Layer
"""
def __init__(self, feat_size, vocab_size, num_conv_layers=3, num_rnn_layers=3, rnn_size=1024):
super().__init__()
# 卷积层堆
self.conv = ConvStack(feat_size, num_conv_layers)
# RNN层堆
i_size = self.conv.output_height
self.rnn = RNNStack(i_size=i_size, h_size=rnn_size, num_stacks=num_rnn_layers)
# 分类输入层
self.bn = nn.BatchNorm1D(rnn_size * 2, data_format='NLC')
self.fc = nn.Linear(rnn_size * 2, vocab_size)
def forward(self, audio, audio_len):
"""
Args:
audio (Tensor): [B, D, Tmax]
audio_len (Tensor): [B, Umax]
Returns:
logits (Tensor): [B, T, D]
x_lens (Tensor): [B]
"""
# [B, D, T] -> [B, C=1, D, T]
x = audio.unsqueeze(1)
x, x_lens = self.conv(x, audio_len)
# 将数据从卷积特征映射转换为向量序列
x = x.transpose([0, 3, 1, 2]) # [B, T, C, D]
x = x.reshape([0, 0, -1]) # [B, T, C*D]
# 删除填充部分
x = self.rnn(x, x_lens) # [B, T, D]
x = self.bn(x)
logits = self.fc(x)
return logits, x_lens | [
"[email protected]"
] | |
496e630dfed3f36556a00508c7fe6aacb46501f0 | ba895ee2765b60ddf2da15307f038c6a884da4ec | /month02/day15/ftp/tcp_client.py | 751a0c6cd45e84de2df6f55247a2d865a3c382a1 | [] | no_license | jay0613/2020-0720-note | dc53831b829f7e7437fc57937eef38ab9e3942e9 | 7b2babd30a4dd9897b7853527a07e8a8fe2ba3ea | refs/heads/master | 2022-12-06T17:01:19.542832 | 2020-08-22T10:39:06 | 2020-08-22T10:39:06 | 281,112,932 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,350 | py | from socket import *
from time import sleep
ADDR = ("124.70.187.114",8888)
dir = "/home/tarena/month02/day15/ftp/"
class FTPClient():
def __init__(self,s):
super().__init__()
self.s = s
def do_list(self):
self.s.send("LIST".encode())
data = self.s.recv(128).decode()
if data =="OK":
file = self.s.recv(1024).decode()
print(file)
else:
print("文件库为空!")
def do_get(self,filename):
data = "RETR "+filename
self.s.send(data.encode())
result = self.s.recv(128)
if result.decode() == "OK":
f = open(dir + filename, "wb")
while True:
data = self.s.recv(1024)
if data == b"##":
break
f.write(data)
f.close()
else:
print("文件不存在!")
def do_put(self, filename):
data = "STOR "+filename
self.s.send(data.encode())
result = self.s.recv(1024)
if result == b"OK":
try:
f = open(dir+filename,"rb")
except:
print("该文件不存在!")
self.s.send(b"NO")
return
else:
while True:
data = f.read(1024)
if not data:
sleep(0.1)
self.s.send(b"##")
break
self.s.send(data)
f.close()
else:
print("文件已存在!")
def main():
s = socket(AF_INET,SOCK_STREAM)
s.connect(ADDR)
t = FTPClient(s)
while True:
print("============ 命令选项==============")
print("*** list ***")
print("*** get file ***")
print("*** put file ***")
print("*** exit ***")
print("==================================")
cmd = input("请输入命令:")
if cmd == "list":
t.do_list()
elif cmd[:3] == "get":
filename = cmd.split(" ")[-1]
t.do_get(filename)
elif cmd[:3] == "put":
filename = cmd.split(" ")[-1]
t.do_put(filename)
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
273de56721095e1a544df0c7125213c4ee89326c | d043a51ff0ca2f9fb3943c3f0ea21c61055358e9 | /python3网络爬虫开发实战/Xpath/xpath1.py | 2cfe82c6a2a22b29961311f089129e8ab0fdd732 | [] | no_license | lj1064201288/dell_python | 2f7fd9dbcd91174d66a2107c7b7f7a47dff4a4d5 | 529985e0e04b9bde2c9e0873ea7593e338b0a295 | refs/heads/master | 2020-03-30T03:51:51.263975 | 2018-12-11T13:21:13 | 2018-12-11T13:21:13 | 150,707,725 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 841 | py | # 导入lxml库的etree模块
from lxml import etree
# 声明一段HTML文本
text = '''
<div>
<ul>
<li class="item-0"><a href="link1.html">first item</a></li>
<li class="item-1"><a href="link2.html">second item</a></li>
<li class="item-inactve"><a href="link3.html">third item</a></li>
<li class="item-1"><a href="link4.html">fourth item</a></li>
<li class="item-0"><a href="link5.html">fifth item</a></li>
<ul>
</div>
'''
# 调用HTML类进行初始化,这样就成功构造了一个XPath解析对象
html = etree.HTML(text)
# 调用tostring()方法即可输出修正后的HTML代码,但是结果是butes类型
result = etree.tostring(html)
# 这里利用decode()方法将其装成str类型
print(result.decode('utf-8'))
html = etree.parse('./text.html', etree.HTMLParser())
result = etree.tostring(html)
print(result.decode('utf-8'))
| [
"[email protected]"
] | |
e15e7dfe43813e6f6539b4794a7a1eaa7fd966cd | 7c45efb5a5c66305d7c4ba8994d3b077612df109 | /friday/apps/users/views.py | 57b0f5d3deb51bc1e94b7cf72bd56f04d2f251a1 | [] | no_license | globedasher/django-dojo | c245f35b276402b6df6205a8612deb0089d34612 | dc27d289b8986b4fb910ef42f7bf483c385a3b4e | refs/heads/master | 2020-07-30T04:04:02.054414 | 2018-05-01T04:32:05 | 2018-05-01T04:32:05 | 73,635,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,858 | py | from django.shortcuts import render, redirect, HttpResponse
from django.core.urlresolvers import reverse
from django.contrib import messages
import bcrypt
from .models import User
def index(request):
try:
all_users = User.objects.all()
context = { 'all_users': all_users }
return render(request, 'users/index.html', context)
except:
return render(request, 'users/index.html')
"""
The tuple_return in the login and register functions return a true if the email
is cleared by regex match and and User object. If the email is not cleared by
regex, the tuple comes back as false at [0] and an error message at [1].
"""
def login(request):
if request.method == 'POST':
tuple_return = User.objects.login(request.POST['email'],
request.POST['password'])
# tuple_return[0] is false if email didn't pass regex
if tuple_return[0] == False:
messages.error(request, "Login errors:")
# Here, tuple_retun[1] is a list of error messages to flash to user
for item in tuple_return[1]:
messages.error(request, item)
return redirect(reverse('login:index'))
# tuple_return[0] is false if email didn't pass regex
elif tuple_return[0] == True:
request.session['id'] = tuple_return[1].id
request.session['alias'] = tuple_return[1].alias
request.session['name'] = tuple_return[1].name
messages.success(request, "Successful login!")
return redirect(reverse('poke:index'))
else:
messages.error(request, "Incorrect Http request.")
return redirect(reverse('login:index'))
def logout(request):
del request.session['id']
del request.session['alias']
del request.session['name']
return redirect(reverse('login:index'))
def register(request):
if request.method == 'POST':
tuple_return = User.objects.register(request.POST)
# tuple_return[0] is false if email didn't pass regex
if tuple_return[0] == False:
messages.error(request, "Registration errors:")
# Here, tuple_retun[1] is a list of error messages to flash to user
for item in tuple_return[1]:
messages.error(request, item)
return redirect(reverse('login:index'))
# tuple_return[0] is false if email didn't pass regex
elif tuple_return[0] == True:
request.session['id'] = tuple_return[1].id
request.session['alias'] = tuple_return[1].alias
request.session['name'] = tuple_return[1].name
messages.success(request, "Successful registration!")
return redirect(reverse('poke:index'))
else:
messages.error(request, "Incorrect Http request.")
return redirect(reverse('login:index'))
| [
"[email protected]"
] | |
73fb490420320edf221354588547c2a78de90043 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03222/s439293787.py | 607583db9ccf522b5a6afe3b8aea692cb031ffb6 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,510 | py | from collections import defaultdict, deque, Counter
from heapq import heappush, heappop, heapify
import math
import bisect
import random
from itertools import permutations, accumulate, combinations, product
import sys
import string
from bisect import bisect_left, bisect_right
from math import factorial, ceil, floor
from operator import mul
from functools import reduce
sys.setrecursionlimit(2147483647)
INF = float('inf')
def LI(): return list(map(int, sys.stdin.readline().split()))
def I(): return int(sys.stdin.readline())
def LS(): return sys.stdin.readline().split()
def S(): return sys.stdin.readline().strip()
def IR(n): return [I() for i in range(n)]
def LIR(n): return [LI() for i in range(n)]
def SR(n): return [S() for i in range(n)]
def LSR(n): return [LS() for i in range(n)]
def SRL(n): return [list(S()) for i in range(n)]
def MSRL(n): return [[int(j) for j in list(S())] for i in range(n)]
mod = 1000000007
def fib(n):
a, b = 1, 1
fib_list = [1] * n
for i in range(n - 2):
a, b = a + b, a
fib_list[i + 2] = a
return fib_list
h, w, k = LI()
L = [0] + fib(w + 1)
dp = [[0] * w for _ in range(h + 1)]
dp[0][0] = 1
for i in range(h):
for j in range(w):
dp[i + 1][j - 1] = (dp[i + 1][j - 1] + dp[i][j] * L[j] * L[w - j]) % mod
if j + 1 < w:
dp[i + 1][j + 1] = (dp[i + 1][j + 1] + dp[i][j] * L[j + 1] * L[w - j - 1]) % mod
dp[i + 1][j] = (dp[i + 1][j] + dp[i][j] * L[j + 1] * L[w - j]) % mod
print(dp[-1][k - 1])
| [
"[email protected]"
] | |
b9a468c666bd6ef59366f96331d18998c6230387 | ccf94dcb6b1500fcbbd56964ae8c4832a496b8b3 | /python/baiduads-sdk-auto/baiduads/materialproduct/model/delete_product_response_wrapper.py | 543650fd904c0930db1dd7a3d4ce66501c281754 | [
"Apache-2.0"
] | permissive | baidu/baiduads-sdk | 24c36b5cf3da9362ec5c8ecd417ff280421198ff | 176363de5e8a4e98aaca039e4300703c3964c1c7 | refs/heads/main | 2023-06-08T15:40:24.787863 | 2023-05-20T03:40:51 | 2023-05-20T03:40:51 | 446,718,177 | 16 | 11 | Apache-2.0 | 2023-06-02T05:19:40 | 2022-01-11T07:23:17 | Python | UTF-8 | Python | false | false | 11,661 | py | """
dev2 api schema
'dev2.baidu.com' api schema # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from baiduads.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from baiduads.exceptions import ApiAttributeError
def lazy_import():
from baiduads.common.model.api_response_header import ApiResponseHeader
from baiduads.materialproduct.model.delete_product_response_wrapper_body import DeleteProductResponseWrapperBody
globals()['ApiResponseHeader'] = ApiResponseHeader
globals()['DeleteProductResponseWrapperBody'] = DeleteProductResponseWrapperBody
class DeleteProductResponseWrapper(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'header': (ApiResponseHeader,), # noqa: E501
'body': (DeleteProductResponseWrapperBody,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'header': 'header', # noqa: E501
'body': 'body', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""DeleteProductResponseWrapper - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
header (ApiResponseHeader): [optional] # noqa: E501
body (DeleteProductResponseWrapperBody): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""DeleteProductResponseWrapper - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
header (ApiResponseHeader): [optional] # noqa: E501
body (DeleteProductResponseWrapperBody): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| [
"[email protected]"
] | |
f23a7b75896c71cc8827fcfc02a8c5e3a5c1c7ad | a04aff1baf3dac3ad96fd78e90b0de357140db84 | /blizzard/stair_steps.py | 1d23e481ee56095938a39eda968b42996666736c | [
"MIT"
] | permissive | NigrumAquila/py_checkio | f4f66fe8e87ba88d4e9258a55521902541ca33ba | df437c2c3ad325d84714665000e3299a70e91f32 | refs/heads/master | 2022-07-30T04:33:42.107806 | 2020-05-10T09:57:58 | 2020-05-10T09:57:58 | 262,756,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 108 | py | checkio = lambda nums: max([n + checkio(nums[i:]) for i, n in zip((1, 2), nums[:2] + [0])] if nums else [0]) | [
"[email protected]"
] | |
9bebcc1eb78e239d67cbec9156fcd58d8bda1048 | bb11350c9f600d0021d81f7f6ee8c2cc5961a5f0 | /ZKDEMO/01_dule_color_led.py | 213c3601343a8b57ed3d9346195e481f1dea9bf8 | [] | no_license | atiger808/raspberry-tutorial | 045178877a456908aa8ce764aad62159d674ae79 | e877939fd1c72e09ce20497f08d854872588e1ef | refs/heads/master | 2022-12-25T19:35:08.378370 | 2020-10-14T09:56:15 | 2020-10-14T09:56:15 | 303,970,623 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,306 | py | import RPi.GPIO as GPIO
import time
color = [0xff00, 0x00ff, 0x0ff0, 0xf00f]
Rpin = 12
Gpin = 13
def setup():
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(Rpin, GPIO.OUT)
GPIO.setup(Gpin, GPIO.OUT)
GPIO.output(Rpin,GPIO.LOW)
GPIO.output(Gpin, GPIO.LOW)
global p_R, p_G
p_R = GPIO.PWM(Rpin, 2000)
p_G = GPIO.PWM(Gpin, 2000)
p_R.start(0)
p_G.start(0)
def map(x, in_min, in_max, out_min, out_max):
return (x - in_min)*(out_max - out_min)/(in_max - in_min) + out_min
def setColor(col):
R_val = col >> 8
G_val = col & 0x00ff
R_val = map(R_val, 0, 255, 0, 100)
G_val = map(G_val, 0, 255, 0, 100)
p_R.ChangeDutyCycle(R_val)
p_G.ChangeDutyCycle(G_val)
def bright(x):
GPIO.output(Rpin, 1)
GPIO.output(Gpin, 1)
p_R.ChangeDutyCycle(100)
p_G.ChangeDutyCycle(100)
time.sleep(x)
GPIO.output(Rpin, GPIO.LOW)
GPIO.output(Gpin, GPIO.LOW)
time.sleep(x)
def loop():
while True:
for col in color:
setColor(col)
time.sleep(0.5)
def destroy():
p_R.stop()
p_G.stop()
GPIO.output(Rpin, GPIO.LOW)
GPIO.cleanup()
if __name__ == '__main__':
setup()
time.sleep(5)
try:
loop()
except KeyboardInterrupt:
destroy()
| [
"[email protected]"
] | |
f47296e7b93017d932670a09a376cf7d21d82114 | 856f43a69bf77e02803cf5ea8723fe5d7c044ae9 | /pasarkita/urls.py | e68645318fa1c599e32d5172f5155cda451d0bf0 | [] | no_license | yeremiaChris/pasarkitaapp | 15cf6e21bbe262ac9360f38ab4bc0cd92f563708 | 76d190e2f6bdbf60e07575051f17fc8213cf01c4 | refs/heads/master | 2022-11-30T10:39:57.655765 | 2020-07-22T07:58:14 | 2020-07-22T07:58:14 | 279,835,842 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 278 | py | from django.urls import path
from django.contrib.auth import views as view
from . import views
urlpatterns = [
path('', views.index,name='index'),
path('register', views.register,name='register'),
path('tambah-barang', views.tambahBarang,name='tambah-barang'),
]
| [
"[email protected]"
] | |
de1313630c3a57694c37f1732d0c5f4bf47a9d68 | 27533f26cdcce35388f21755d0c53b1b1fe4d130 | /beginner level/digitscheck.py | d1a4ad093c89e9fac5e2fdb2c3af62a39b406c00 | [] | no_license | ramyasutraye/pythonprogram-1 | f89d678fb8e60bbebb25f7a45d93e520e13103c5 | 0f4b4566a82b6d4d494837e62f1748166855eb85 | refs/heads/master | 2020-04-24T01:30:42.968413 | 2018-05-01T04:20:22 | 2018-05-01T04:20:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 86 | py | a=input("enter the input")
if (a.isdigit()):
print ("True")
else:
print ("false")
| [
"[email protected]"
] | |
e9f7910d9c8ada08ccb51e63663df4796f5e7d92 | 1857aaf614cba134941b45880f30da8482b7d7b2 | /api/urls.py | 2d9524a315813aa4f71505c7983077fa8d2bf1ec | [] | no_license | HavingNoFuture/pd-diplom | ec92a372a26182d893364dbd16e51ff55208d269 | 849f139d21c2fcd35e0a9f0b4512d09276b73dc5 | refs/heads/master | 2020-06-16T18:49:28.804376 | 2019-11-22T15:47:41 | 2019-11-22T15:47:41 | 195,668,884 | 0 | 1 | null | 2019-07-07T15:42:24 | 2019-07-07T15:42:24 | null | UTF-8 | Python | false | false | 1,675 | py | """orders URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from rest_framework import routers
from rest_framework.schemas import get_schema_view
from django.urls import path, include
from api import views
state_detail = views.StateViewSet.as_view({
'get': 'retrieve',
'post': 'update_state_detail',
})
state_list = views.StateViewSet.as_view({
'get': 'list',
'post': 'update_state_list',
})
price_update = views.PriceUpdateViewSet.as_view({
'post': 'update_price',
})
urlpatterns = [
path('', include('djoser.urls')),
path('', include('djoser.urls.authtoken')),
path('partner/state/<int:pk>/', state_detail, name='state-detail'),
path('partner/state/', state_list, name='state-list'),
path('partner/update/', price_update, name='price-update'),
path('openapi', get_schema_view(
title="Shop API",
description="API for all things …",
# version="1.0.0",
urlconf='api.urls'
), name='openapi-schema'),
]
router = routers.SimpleRouter()
router.register(r'partner/order', views.OrderViewSet)
urlpatterns += router.urls
| [
"[email protected]"
] | |
6fb534f40527aab8cac971d8f3b32dd73637e3d7 | f0becfb4c3622099ce3af2fad5b831b602c29d47 | /django/myvenv/bin/epylint | 803f74f6fb86daa6e4d5a0f62e132c49186b0875 | [
"MIT"
] | permissive | boostcamp-2020/relay_06 | 9fe7c1c722405d0916b70bb7b734b7c47afff217 | a2ecfff55572c3dc9262dca5b4b2fc83f9417774 | refs/heads/master | 2022-12-02T05:51:04.937920 | 2020-08-21T09:22:44 | 2020-08-21T09:22:44 | 282,153,031 | 4 | 12 | MIT | 2022-11-27T01:13:40 | 2020-07-24T07:29:18 | Python | UTF-8 | Python | false | false | 255 | #!/Users/kobyunghwa/relay_06/django/myvenv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pylint import run_epylint
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(run_epylint())
| [
"[email protected]"
] | ||
8e8c3eb0849e2651e56d2c0ba0c6382c180a3c9a | 50ea2988e1c0dd20bee544d9185608446c681c7b | /app/snippets/urls/drf_generic_cbv.py | 4eadb91373042a452e86ad15eb65d3af793d685c | [] | no_license | wps9th-mongkyo/drf-tutorial | b725c3de77d4bc9c7ce12fa742b967ef6736369d | 18bd3cede77254a8893c8f429aa4f24afe765ccb | refs/heads/master | 2020-04-06T10:06:18.759284 | 2018-11-14T16:49:07 | 2018-11-14T16:49:07 | 157,368,423 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 312 | py | from django.urls import path
from ..views.drf_generic_cbv import *
from ..views.user import *
urlpatterns = [
path('snippets/', SnippetList.as_view()),
path('snippets/<int:pk>', SnippetDetail.as_view()),
path('users/', UserListView.as_view()),
path('users/<int:pk>', UserDetailView.as_view()),
] | [
"[email protected]"
] | |
8994a33fe39373b23b98f5bffcc40a7cbe8c40bf | 7f3205f78ae92a5b3341d458449789e38c9e7ede | /packages/fetchai/protocols/http/serialization.py | 533320118e37d64fae8803351f0958a63a256a12 | [
"Apache-2.0"
] | permissive | greencultureai/agents-aea | d9537cf440387cd8e9c29b2451f9f67a4b5c35f2 | bc4f65fc749e9cd628f3d0f91bba3d522bce82e4 | refs/heads/master | 2021-03-07T03:55:31.340188 | 2020-03-09T14:18:13 | 2020-03-09T14:18:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,143 | py | # -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2020 fetchai
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""Serialization module for http protocol."""
from typing import cast
from aea.protocols.base import Message
from aea.protocols.base import Serializer
from packages.fetchai.protocols.http import http_pb2
from packages.fetchai.protocols.http.message import HttpMessage
class HttpSerializer(Serializer):
"""Serialization for the 'http' protocol."""
def encode(self, msg: Message) -> bytes:
"""
Encode a 'Http' message into bytes.
:param msg: the message object.
:return: the bytes.
"""
msg = cast(HttpMessage, msg)
http_msg = http_pb2.HttpMessage()
http_msg.message_id = msg.message_id
dialogue_reference = msg.dialogue_reference
http_msg.dialogue_starter_reference = dialogue_reference[0]
http_msg.dialogue_responder_reference = dialogue_reference[1]
http_msg.target = msg.target
performative_id = msg.performative
if performative_id == HttpMessage.Performative.REQUEST:
performative = http_pb2.HttpMessage.Request() # type: ignore
method = msg.method
performative.method = method
url = msg.url
performative.url = url
version = msg.version
performative.version = version
headers = msg.headers
performative.headers = headers
bodyy = msg.bodyy
performative.bodyy = bodyy
http_msg.request.CopyFrom(performative)
elif performative_id == HttpMessage.Performative.RESPONSE:
performative = http_pb2.HttpMessage.Response() # type: ignore
version = msg.version
performative.version = version
status_code = msg.status_code
performative.status_code = status_code
status_text = msg.status_text
performative.status_text = status_text
headers = msg.headers
performative.headers = headers
bodyy = msg.bodyy
performative.bodyy = bodyy
http_msg.response.CopyFrom(performative)
else:
raise ValueError("Performative not valid: {}".format(performative_id))
http_bytes = http_msg.SerializeToString()
return http_bytes
def decode(self, obj: bytes) -> Message:
"""
Decode bytes into a 'Http' message.
:param obj: the bytes object.
:return: the 'Http' message.
"""
http_pb = http_pb2.HttpMessage()
http_pb.ParseFromString(obj)
message_id = http_pb.message_id
dialogue_reference = (
http_pb.dialogue_starter_reference,
http_pb.dialogue_responder_reference,
)
target = http_pb.target
performative = http_pb.WhichOneof("performative")
performative_id = HttpMessage.Performative(str(performative))
performative_content = dict()
if performative_id == HttpMessage.Performative.REQUEST:
method = http_pb.request.method
performative_content["method"] = method
url = http_pb.request.url
performative_content["url"] = url
version = http_pb.request.version
performative_content["version"] = version
headers = http_pb.request.headers
performative_content["headers"] = headers
bodyy = http_pb.request.bodyy
performative_content["bodyy"] = bodyy
elif performative_id == HttpMessage.Performative.RESPONSE:
version = http_pb.response.version
performative_content["version"] = version
status_code = http_pb.response.status_code
performative_content["status_code"] = status_code
status_text = http_pb.response.status_text
performative_content["status_text"] = status_text
headers = http_pb.response.headers
performative_content["headers"] = headers
bodyy = http_pb.response.bodyy
performative_content["bodyy"] = bodyy
else:
raise ValueError("Performative not valid: {}.".format(performative_id))
return HttpMessage(
message_id=message_id,
dialogue_reference=dialogue_reference,
target=target,
performative=performative,
**performative_content
)
| [
"[email protected]"
] | |
dc0d540c206b2cbbf1c166e46318470ac644409b | 316c473d020f514ae81b7485b10f6556cf914fc0 | /urllib/parse/demo4.py | 2b21849b6000934339e662c8417960f368438b53 | [
"Apache-2.0"
] | permissive | silianpan/seal-spider-demo | ca96b12d4b6fff8fe57f8e7822b7c0eb616fc7f3 | 7bdb77465a10a146c4cea8ad5d9ac589c16edd53 | refs/heads/master | 2023-06-20T03:47:04.572721 | 2023-05-24T06:27:13 | 2023-05-24T06:27:13 | 189,963,452 | 1 | 1 | Apache-2.0 | 2022-12-08T03:24:54 | 2019-06-03T08:15:56 | Python | UTF-8 | Python | false | false | 132 | py | from urllib.parse import urlparse
result = urlparse('http://www.baidu.com/index.html#comment', allow_fragments=False)
print(result) | [
"[email protected]"
] | |
2e5487930aa047da633bb9abe095cf99646b32ad | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_392/ch24_2019_09_11_13_10_29_212592.py | 7d5dbd4fe3971f1941fce8860d74aef5c4a84a36 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 212 | py |
def classifica_triangulo(a,b,c):
if a==b and a==c:
return 'equilátero'
elif a==b and b==c and a!=c or a==c and b==c and b!=a:
return 'isósceles'
else:
return 'escaleno'
| [
"[email protected]"
] | |
0d93dd2e7128c469c539a4bbc00ad886d5180dcb | 0366bccae8841bbf6ecaad70660aae89bb0f6394 | /8_Tuples/1_tuple_types.py | 8bfd986aaff001145630fdba8e0cc3a6ac27ce50 | [] | no_license | KobiShashs/Python | 8a5bdddcaef84b455795c5393cbacee5967493f7 | e748973ad0b3e12c5fb87648783531783282832a | refs/heads/master | 2021-04-05T20:18:57.715805 | 2020-04-02T21:51:44 | 2020-04-02T21:51:44 | 248,597,057 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 151 | py | one_value_tuple = (20)
print(one_value_tuple)
print(type(one_value_tuple))
one_value_tuple = (20,)
print(one_value_tuple)
print(type(one_value_tuple)) | [
"[email protected]"
] | |
d4d21cfed010e88eb85bf3c40848a3fb92c8625b | 9d278285f2bc899ac93ec887b1c31880ed39bf56 | /ondoc/cart/migrations/0007_auto_20190326_1349.py | 34d1c28b4f17a8472690bb6cee2e9a32d2efd8bc | [] | no_license | ronit29/docprime | 945c21f8787387b99e4916cb3ba1618bc2a85034 | 60d4caf6c52a8b70174a1f654bc792d825ba1054 | refs/heads/master | 2023-04-01T14:54:10.811765 | 2020-04-07T18:57:34 | 2020-04-07T18:57:34 | 353,953,576 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | # Generated by Django 2.0.5 on 2019-03-26 08:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cart', '0006_merge_20190326_1307'),
]
operations = [
migrations.AlterField(
model_name='cart',
name='product_id',
field=models.IntegerField(choices=[(1, 'Doctor Appointment'), (2, 'LAB_PRODUCT_ID'), (3, 'INSURANCE_PRODUCT_ID'), (4, 'SUBSCRIPTION_PLAN_PRODUCT_ID')]),
),
]
| [
"[email protected]"
] | |
bc0b53d1af3b5f30447b9706d33fd91793b72d47 | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /see_day/little_eye_and_fact/government/government_and_day/work_time.py | 7733864285181ed2b0aca7b41229ba4587d52094 | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 273 | py |
#! /usr/bin/env python
def same_point(str_arg):
feel_public_problem_from_few_problem(str_arg)
print('want_last_life')
def feel_public_problem_from_few_problem(str_arg):
print(str_arg)
if __name__ == '__main__':
same_point('know_different_hand_of_time')
| [
"[email protected]"
] | |
00e165ed69dc9db91fe96614fa3c7a6ff23bd55e | 6d25434ca8ce03f8fef3247fd4fc3a1707f380fc | /[0140][Hard][Word_Break_II]/Word_Break_II.py | 2abe1aaa6de1356b8dd434ee82f875c245a499e5 | [] | no_license | sky-dream/LeetCodeProblemsStudy | 145f620e217f54b5b124de09624c87821a5bea1b | e0fde671cdc9e53b83a66632935f98931d729de9 | refs/heads/master | 2020-09-13T08:58:30.712604 | 2020-09-09T15:54:06 | 2020-09-09T15:54:06 | 222,716,337 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,529 | py | # -*- coding: utf-8 -*-
# leetcode time cost : 50 ms
# leetcode memory cost : 15 MB
# Time Complexity: O(N*N)
# Space Complexity: O(N)
#solution 1, DFS and memorize
class Solution:
def wordBreak(self, s: str, wordDict: list) -> list:
if not s:
return []
_len, wordDict = len(s), set(wordDict) # 转换成字典用于O(1)判断in
_min, _max = 2147483647, -2147483648 # 记录字典中的单词的最长和最短长度,用于剪枝
for word in wordDict:
_min = min(_min, len(word))
_max = max(_max, len(word))
def dfs(start): # 返回s[start:]能由字典构成的所有句子
if start not in memo:
res = []
for i in range(_min, min(_max, _len-start)+1): # 剪枝,只考虑从最小长度到最大长度查找字典
if s[start: start+i] in wordDict: # 找到了
res.extend(list(map(lambda x: s[start: start+i]+' '+x, dfs(start+i)))) # 添加
memo[start] = res # 加入记忆
return memo[start]
memo = {_len: ['']} # 初始化记忆化存储
return list(map(lambda x: x[:-1], dfs(0))) # 去掉末尾多出的一个空格
def main():
s, wordDict = "catsanddog",["cat","cats","and","sand","dog"] #expect is ["cat sand dog","cats and dog"]
obj = Solution()
result = obj.wordBreak(s, wordDict)
print("return result is :",result)
if __name__ =='__main__':
main() | [
"[email protected]"
] | |
0ffa84d97f57d7a639d5357bce6f193502cc93a4 | e1787e6b167ffe1e7b03b926422437839f2e0921 | /permutation.py | 13ba44ec3745ce092bda40c385ef8aad6ad9ac6b | [] | no_license | Kennedy-Njeri/python-Algorithms | c86ec3dec0faa02c676200fcad65b2860c4e64b0 | 4e65b6c652ca09beeb4cdde706d14fbfd399eea0 | refs/heads/master | 2020-11-29T09:06:35.255011 | 2020-01-17T20:18:14 | 2020-01-17T20:18:14 | 230,075,701 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 445 | py | """"Given two strings write a function to decide if one is a permutation of the other string"""
str_1 = "driving"
str_2 = "drviign"
def is_permutation(str_1, str_2):
str_1 = str_1.replace(" ", "")
str_2 = str_2.replace(" ", "")
if len(str_1) != len(str_2):
return False
for c in str_1:
if c in str_2:
str_2 = str_2.replace(c, "")
return len(str_2) == 0
print (is_permutation(str_1, str_2))
| [
"[email protected]"
] | |
81a753ffd8cb5baca58b8a2daa8a6d8cc329da19 | a7b2be4d98565280b9e5bccb62aa26dfe8d780c8 | /env/bin/django-admin.py | 62949e376acde185f24655c3dd6df13920323d00 | [] | no_license | argen87/Shop_market | bb354d78ccbaefcbc78eddd9821b392a850ba100 | 07142b1274b707e48842a02f9a95edb603604a2f | refs/heads/main | 2023-05-09T05:36:08.429109 | 2021-06-10T14:46:47 | 2021-06-10T14:46:47 | 375,729,325 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 692 | py | #!/home/argen/PycharmProjects/shop_market/env/bin/python3
# When the django-admin.py deprecation ends, remove this script.
import warnings
from django.core import management
try:
from django.utils.deprecation import RemovedInDjango40Warning
except ImportError:
raise ImportError(
'django-admin.py was deprecated in Django 3.1 and removed in Django '
'4.0. Please manually remove this script from your virtual environment '
'and use django-admin instead.'
)
if __name__ == "__main__":
warnings.warn(
'django-admin.py is deprecated in favor of django-admin.',
RemovedInDjango40Warning,
)
management.execute_from_command_line()
| [
"[email protected]"
] | |
c50c6c1e5fec1a82e31eadbc1cc3276ebfb96d9b | deefd01b60fb0cfbeb8e8ae483f1d852f897c5f8 | /listkeeper/devices/migrations/0001_initial.py | 24af4f2cdc476b212d301d3c324e2886d895361b | [] | no_license | andrewgodwin/rfid-inventory | c0bb6f9ebe6ba53c3ec19e7ebe38ad4c1b9128c1 | fdb8b919bc4228049545f8b05773617c7d6690c9 | refs/heads/master | 2020-09-22T07:20:18.463709 | 2020-07-05T03:43:02 | 2020-07-05T03:43:02 | 225,102,557 | 30 | 4 | null | null | null | null | UTF-8 | Python | false | false | 3,767 | py | # Generated by Django 3.0 on 2019-12-08 06:30
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [("directory", "0001_initial")]
operations = [
migrations.CreateModel(
name="Device",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"name",
models.CharField(
help_text="Unique-ish device name", max_length=200
),
),
(
"type",
models.CharField(
blank=True, help_text="Device type", max_length=200
),
),
("notes", models.TextField(blank=True)),
("token", models.TextField(blank=True, help_text="Device API token")),
(
"mode",
models.CharField(
choices=[
("passive", "Passive tracking"),
("assigning", "Assigning locations"),
],
default="passive",
help_text="Current device mode",
max_length=32,
),
),
("created", models.DateTimeField(auto_now_add=True)),
("updated", models.DateTimeField(auto_now=True)),
("last_seen", models.DateTimeField(blank=True, null=True)),
(
"location",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="directory.Location",
),
),
],
),
migrations.CreateModel(
name="DeviceRead",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"tag",
models.CharField(
help_text="Type prefix, colon, hex value of tag (e.g. epc:f376ce13434a2b)",
max_length=255,
),
),
("created", models.DateTimeField(auto_now_add=True)),
("last_seen", models.DateTimeField()),
("present", models.BooleanField(default=False)),
(
"device",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="reads",
to="devices.Device",
),
),
(
"item",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="device_reads",
to="directory.Item",
),
),
],
options={"unique_together": {("device", "tag")}},
),
]
| [
"[email protected]"
] | |
3fe4b27047341c4a9fcea2e0da355659b238ff8b | 95d1dd5758076c0a9740d545a6ef2b5e5bb8c120 | /PY/basic/str_op.py | 7cec952522cfb4159b5a939c5083c84c679c3d6d | [] | no_license | icoding2016/study | 639cb0ad2fe80f43b6c93c4415dc6e8a11390c85 | 11618c34156544f26b3b27886b55c771305b2328 | refs/heads/master | 2023-08-31T14:15:42.796754 | 2023-08-31T05:28:38 | 2023-08-31T05:28:38 | 117,061,872 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,647 | py |
longstr1 = ('This is an example of a long string '
'which is devided into multiple lines in the code '
'and use bracket to group into one string (which is required to avoid compiler error)'
'but actually should display in one line.\n')
longstr_in_obj = ('a tuple', 1, 'a long string in a tuple '
'while is splitted into multiple lines in the code, but'
' should display as one line. '
'In this case, the bracket is not required.\n'
)
mlstr = """This is an example of
a multi line string.
This line has 2 space in the lead.
The last line.\n"""
mlstr2 = ('This is another example of a multi line string.\n'
'Which does\'t use triple-quatation,but the \'\\n\' '
'expression to change lines.\n'
' This line has 2 space in the lead.\n'
'The last line.\n')
s='abcd'
# str join
l = [x for x in s]
print(l)
s1=''
print(s1.join(l))
print(s+'x') # >> abcx
print(s.join('x')) # >> x , that's a wrong way to use join
print(''.join(l)) # >> abc, that's a right way to use join,
print('-'.join(l)) # >> a-b-c, that's a right way to use join,
ipsection = ['10','20','1','1']
print('.'.join(ipsection))
# cut a piece off the str
s1 = s[:3] + s[4:] # >> abce
print(s1)
# str format
for i in range(1,11):
for j in range(1,11):
print("{:<6}".format(i*j), end='')
print('')
print('{:b}'.format(10))
for i in range(1,10, 2):
print(i)
# sub string -- in
def subString(s1,s2):
if s1 in s2:
return True
return False
print(subString('abc', 'fullabcd'))
print(subString('abc', 'fullabd'))
s = 'abcdefghijk'
if 'a' in s:
print('a in ',s)
# sub string -- find
def findSubString(s1,s2):
return s2.find(s1)
print(findSubString('abc', 'fullabcd'))
# remove substring from a string
s1='ababcab'
s2='ab'
print('remove ',s2, ' from ',s1, ': ', s1.replace(s2,''))
# convert str to int
s = '123'
print('int for for {} is {}'.format(s, int(s)))
# The better way to format a str
# f'python 3 f format has the best performance'
# e.g.
# var1 = 'v1'
# var2 = 10
# f' some vars: {var1}, {var2} '
#
# >>> import timeit
# >>> timeit.timeit("""name = "Eric"
# ... age = 74
# ... '%s is %s.' % (name, age)""", number = 10000)
# 0.003324444866599663
# >>> timeit.timeit("""name = "Eric"
# ... age = 74
# ... '{} is {}.'.format(name, age)""", number = 10000)
# 0.004242089427570761
# >>> timeit.timeit("""name = "Eric"
# ... age = 74
# ... f'{name} is {age}.'""", number = 10000)
# 0.0024820892040722242
print(longstr1)
print(longstr_in_obj[2])
print(mlstr)
print(mlstr2)
print() | [
"[email protected]"
] | |
fbc5a3e9710da3cffc431b9a658319fa0d4b4578 | 6c860b5a89fcba3dad4e4d8dea9c570262fab901 | /luggage_calculator_2.py | 90e5822ff0cb6cb4469581665832e51ede3d9014 | [] | no_license | momentum-cohort-2019-05/examples | e011b0907dce22d1b58612a5e2df2030be98bdd5 | 277a213c2ef8d5499b9eb69e2c4287faac8e6f4e | refs/heads/master | 2022-09-07T21:14:29.409749 | 2019-07-11T20:18:43 | 2019-07-11T20:18:43 | 189,416,951 | 0 | 4 | null | 2022-08-23T17:52:24 | 2019-05-30T13:15:52 | PLpgSQL | UTF-8 | Python | false | false | 697 | py | # Description
# Ask the user for the weight of the current bag.
# - If the user has no more bags, stop
# - Otherwise, ask again
# Add up the weight of all bags
# If > limit (100 lbs), warn the user
total_weight = 0
weight_limit = 100
while True:
bag_weight_as_str = input(
"How much does your bag weigh in pounds? (Hit Enter if you are done) ")
if bag_weight_as_str == "":
break
bag_weight = int(bag_weight_as_str)
total_weight += bag_weight
print("Your total weight so far is " + str(total_weight) + ".")
if total_weight > weight_limit:
print("Warning! You are over the weight limit by " +
str(total_weight - weight_limit) + " pounds.")
| [
"[email protected]"
] | |
26899bb618ab25ea80fd07b6b93e9ea23828318c | ef1bf421aca35681574c03014e0c2b92da1e7dca | /test/test_modes/test_occurrences.py | f9d0d8e717d9872d33f126775173dc825cf599b5 | [
"MIT"
] | permissive | pyQode/pyqode.core | 74e67f038455ea8cde2bbc5bd628652c35aff6eb | 0ffabebe4f0397d53429024f6f44db3fe97b0828 | refs/heads/master | 2020-04-12T06:36:33.483459 | 2020-01-18T14:16:08 | 2020-01-18T14:16:08 | 7,739,074 | 24 | 25 | MIT | 2020-01-18T14:16:10 | 2013-01-21T19:46:41 | Python | UTF-8 | Python | false | false | 1,555 | py | import pytest
from pyqode.qt import QtGui
from pyqode.qt.QtTest import QTest
from pyqode.core.api import TextHelper
from pyqode.core import modes
from ..helpers import ensure_visible, ensure_connected
def get_mode(editor):
return editor.modes.get(modes.OccurrencesHighlighterMode)
def test_enabled(editor):
mode = get_mode(editor)
assert mode.enabled
mode.enabled = False
mode.enabled = True
@ensure_connected
@ensure_visible
def test_delay(editor):
mode = get_mode(editor)
assert mode.delay == 1000
mode.delay = 3000
assert mode.delay == 3000
mode.delay = 1000
assert mode.delay == 1000
@ensure_connected
@ensure_visible
def test_background(editor):
mode = get_mode(editor)
assert mode.background.name() == '#ccffcc'
mode.background = QtGui.QColor('#404040')
assert mode.background.name() == '#404040'
@ensure_connected
@ensure_visible
def test_foreground(editor):
mode = get_mode(editor)
assert mode.foreground is None
mode.foreground = QtGui.QColor('#202020')
assert mode.foreground.name() == '#202020'
@ensure_connected
@ensure_visible
@pytest.mark.xfail
def test_occurrences(editor):
for underlined in [True, False]:
editor.file.open(__file__)
assert editor.backend.running is True
mode = get_mode(editor)
mode.underlined = underlined
assert len(mode._decorations) == 0
assert mode.delay == 1000
TextHelper(editor).goto_line(16, 7)
QTest.qWait(2000)
assert len(mode._decorations) > 0
| [
"[email protected]"
] | |
1abaebbe2a53ac8c55b73c8692edd187e8ac1ca3 | 3a85089c2498ff04d1b9bce17a4b8bf6cf2380c9 | /DQM/SiStripCommissioningSources/python/__init__.py | 5cb176520939c3fb3b64280306e6166224512036 | [] | no_license | sextonkennedy/cmssw-ib | c2e85b5ffa1269505597025e55db4ffee896a6c3 | e04f4c26752e0775bd3cffd3a936b288ee7b0268 | HEAD | 2016-09-01T20:09:33.163593 | 2013-04-26T12:05:17 | 2013-04-29T16:40:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py | #Automatically created by SCRAM
import os
__path__.append(os.path.dirname(os.path.abspath(__file__).rsplit('/DQM/SiStripCommissioningSources/',1)[0])+'/cfipython/slc6_amd64_gcc480/DQM/SiStripCommissioningSources')
| [
"[email protected]"
] | |
99304e7b590358047f085b80e003dd7131ae0bf1 | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/eqpt/sysc.py | b3fbc6b2c18b997b31b196e89572ec2d501aa74a | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 15,101 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class SysC(Mo):
"""
The system controller card.
"""
meta = ClassMeta("cobra.model.eqpt.SysC")
meta.moClassName = "eqptSysC"
meta.rnFormat = "sc"
meta.category = MoCategory.REGULAR
meta.label = "System Controller Module"
meta.writeAccessMask = 0x80080000000001
meta.readAccessMask = 0x880080000000001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.eqpt.SpromLc")
meta.childClasses.add("cobra.model.eqpt.FruPowerHist1w")
meta.childClasses.add("cobra.model.fault.Inst")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.firmware.CardRunning")
meta.childClasses.add("cobra.model.eqpt.FruPowerHist15min")
meta.childClasses.add("cobra.model.eqpt.RtSysCOdDiag")
meta.childClasses.add("cobra.model.eqpt.FruPowerHist1h")
meta.childClasses.add("cobra.model.tag.Inst")
meta.childClasses.add("cobra.model.eqpt.FruPower1mo")
meta.childClasses.add("cobra.model.eqpt.FruPower1year")
meta.childClasses.add("cobra.model.eqpt.CPU")
meta.childClasses.add("cobra.model.eqpt.FruPower1w")
meta.childClasses.add("cobra.model.eqpt.FruPowerHist1year")
meta.childClasses.add("cobra.model.tag.AliasDelInst")
meta.childClasses.add("cobra.model.eqpt.EpcP")
meta.childClasses.add("cobra.model.eqpt.FruPower1h")
meta.childClasses.add("cobra.model.eqpt.FruPower1d")
meta.childClasses.add("cobra.model.eqpt.Obfl")
meta.childClasses.add("cobra.model.eqpt.RsMonPolModulePolCons")
meta.childClasses.add("cobra.model.eqpt.Fpga")
meta.childClasses.add("cobra.model.eqpt.FruPower5min")
meta.childClasses.add("cobra.model.health.Inst")
meta.childClasses.add("cobra.model.eqpt.FruPowerHist1qtr")
meta.childClasses.add("cobra.model.eqpt.Sensor")
meta.childClasses.add("cobra.model.eqpt.IndLed")
meta.childClasses.add("cobra.model.eqpt.LocLed")
meta.childClasses.add("cobra.model.eqpt.Asic")
meta.childClasses.add("cobra.model.eqpt.Dimm")
meta.childClasses.add("cobra.model.tag.ExtMngdInst")
meta.childClasses.add("cobra.model.eqpt.FruPower1qtr")
meta.childClasses.add("cobra.model.tag.AliasInst")
meta.childClasses.add("cobra.model.eqpt.FruPowerHist5min")
meta.childClasses.add("cobra.model.eqpt.FruPowerHist1mo")
meta.childClasses.add("cobra.model.eqpt.FruPower15min")
meta.childClasses.add("cobra.model.eqpt.FruPowerHist1d")
meta.childClasses.add("cobra.model.eqpt.EobcP")
meta.childClasses.add("cobra.model.eqpt.Flash")
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.RsMonPolModulePolCons", "rsmonPolModulePolCons"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPowerHist15min", "HDeqptFruPower15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPowerHist1year", "HDeqptFruPower1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPower1year", "CDeqptFruPower1year"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPowerHist1qtr", "HDeqptFruPower1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPowerHist5min", "HDeqptFruPower5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPower15min", "CDeqptFruPower15min"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPower5min", "CDeqptFruPower5min"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPower1qtr", "CDeqptFruPower1qtr"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPowerHist1mo", "HDeqptFruPower1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPowerHist1w", "HDeqptFruPower1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPowerHist1h", "HDeqptFruPower1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPower1mo", "CDeqptFruPower1mo"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPowerHist1d", "HDeqptFruPower1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPower1w", "CDeqptFruPower1w"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPower1h", "CDeqptFruPower1h"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.FruPower1d", "CDeqptFruPower1d"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.RtSysCOdDiag", "rtsysCOdDiag"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.AliasDelInst", "aliasdel-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.firmware.CardRunning", "running"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.Sensor", "sensor-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.IndLed", "indled-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.LocLed", "locled-"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.ExtMngdInst", "extmngd"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Inst", "fault-"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.Fpga", "fpga-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.Asic", "asic-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.Dimm", "dimm-"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.AliasInst", "alias"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.EobcP", "eobc-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.Flash", "flash"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.SpromLc", "splc"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.Inst", "tag-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.CPU", "cpu-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.EpcP", "epc-"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqpt.Obfl", "obfl"))
meta.parentClasses.add("cobra.model.eqpt.SysCSlot")
meta.superClasses.add("cobra.model.eqpt.Item")
meta.superClasses.add("cobra.model.nw.Item")
meta.superClasses.add("cobra.model.eqpt.Card")
meta.superClasses.add("cobra.model.eqpt.Comp")
meta.superClasses.add("cobra.model.eqpt.Fru")
meta.rnPrefixes = [
('sc', False),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "descr", "descr", 5597, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "hwVer", "hwVer", 15577, PropCategory.REGULAR)
prop.label = "Hardware Version"
prop.isOper = True
prop.range = [(0, 512)]
meta.props.add("hwVer", prop)
prop = PropMeta("str", "id", "id", 3505, PropCategory.REGULAR)
prop.label = "ID"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("id", prop)
prop = PropMeta("str", "macB", "macB", 3324, PropCategory.REGULAR)
prop.label = "MAC Address Base"
prop.isOper = True
prop.range = [(0, 512)]
meta.props.add("macB", prop)
prop = PropMeta("str", "macL", "macL", 3325, PropCategory.REGULAR)
prop.label = "MAC Address Length"
prop.isOper = True
meta.props.add("macL", prop)
prop = PropMeta("str", "mfgTm", "mfgTm", 5596, PropCategory.REGULAR)
prop.label = "Manufacturing Time"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "not-applicable"
prop._addConstant("not-applicable", "n/a", 0)
meta.props.add("mfgTm", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "model", "model", 5592, PropCategory.REGULAR)
prop.label = "Model"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("model", prop)
prop = PropMeta("str", "monPolDn", "monPolDn", 14514, PropCategory.REGULAR)
prop.label = "Monitoring policy attached to this observable object"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("monPolDn", prop)
prop = PropMeta("str", "numP", "numP", 3320, PropCategory.REGULAR)
prop.label = "Number of Ports"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 256)]
prop.defaultValue = 0
prop.defaultValueStr = "0"
meta.props.add("numP", prop)
prop = PropMeta("str", "operSt", "operSt", 3317, PropCategory.REGULAR)
prop.label = "Operational State"
prop.isOper = True
prop.defaultValue = 0
prop.defaultValueStr = "unknown"
prop._addConstant("absent", "absent", 2)
prop._addConstant("dnld", "downloading", 15)
prop._addConstant("err-pwr-down", "errored-powered-down", 11)
prop._addConstant("fail", "failed", 4)
prop._addConstant("ha-seq-fail", "ha-sequence-number-failed", 18)
prop._addConstant("init", "initializing", 13)
prop._addConstant("inserted", "inserted", 1)
prop._addConstant("mismatch", "mismatch", 3)
prop._addConstant("offline", "offline", 8)
prop._addConstant("online", "online", 7)
prop._addConstant("present", "present", 5)
prop._addConstant("pwr-down", "powered-down", 10)
prop._addConstant("pwr-up", "powered-up", 9)
prop._addConstant("reg-failure", "registration-failed", 14)
prop._addConstant("removed", "removed", 6)
prop._addConstant("srg-fail", "srg-failed", 17)
prop._addConstant("testing", "testing", 12)
prop._addConstant("unknown", "unknown", 0)
prop._addConstant("upg", "upgrading", 16)
meta.props.add("operSt", prop)
prop = PropMeta("str", "pwrSt", "pwrSt", 3327, PropCategory.REGULAR)
prop.label = "Power State"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "unknown"
prop._addConstant("deny", "deny", 3)
prop._addConstant("multi-boot-fail", "multiple-boot-failure", 4)
prop._addConstant("off", "off", 1)
prop._addConstant("on", "on", 2)
prop._addConstant("unknown", "unknown", 0)
meta.props.add("pwrSt", prop)
prop = PropMeta("str", "rdSt", "rdSt", 3318, PropCategory.REGULAR)
prop.label = "Redundancy State"
prop.isOper = True
prop.defaultValue = 0
prop.defaultValueStr = "unknown"
prop._addConstant("active", "active", 1)
prop._addConstant("standby", "standby", 2)
prop._addConstant("unknown", "unknown", 0)
meta.props.add("rdSt", prop)
prop = PropMeta("str", "rev", "rev", 5593, PropCategory.REGULAR)
prop.label = "Revision"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 512)]
prop.defaultValue = "0"
prop.defaultValueStr = "0"
meta.props.add("rev", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "ser", "ser", 5594, PropCategory.REGULAR)
prop.label = "Serial Number"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 16)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("ser", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "swCId", "swCId", 3319, PropCategory.REGULAR)
prop.label = "Software Module ID"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("swCId", prop)
prop = PropMeta("str", "type", "type", 3331, PropCategory.REGULAR)
prop.label = "Type"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 4
prop.defaultValueStr = "sysctrlcard"
prop._addConstant("extchcard", "extension-chassis-card", 5)
prop._addConstant("fabriccard", "fabric-card", 3)
prop._addConstant("fan-tray", "fan-tray", 6)
prop._addConstant("linecard", "line-card", 2)
prop._addConstant("motherboard", "motherboard", 8)
prop._addConstant("power-supply", "power-supply", 7)
prop._addConstant("supervisor", "supervisor", 1)
prop._addConstant("sysctrlcard", "system-control-card", 4)
prop._addConstant("unknown", "unknown", 0)
meta.props.add("type", prop)
prop = PropMeta("str", "upTs", "upTs", 3326, PropCategory.REGULAR)
prop.label = "Up Timestamp"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("upTs", prop)
prop = PropMeta("str", "vendor", "vendor", 5595, PropCategory.REGULAR)
prop.label = "Vendor"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 512)]
prop.defaultValue = "Cisco Systems, Inc"
prop.defaultValueStr = "Cisco Systems, Inc"
meta.props.add("vendor", prop)
meta.deploymentCategory = DeploymentCategory("module", "Module")
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
meta.deploymentQueryPaths.append(DeploymentPathMeta("EqptSlotToEPg", "EPG", "cobra.model.fv.EPg"))
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
c824b387f7e69b52f2aea7ee95f05bb9fe654c45 | c46260c40054c0499e0a6871e4d3fe2d6d8aa9c0 | /LISTING.py | 281621f7fb0be0311058b69ce8761221ebea4feb | [] | no_license | iiot-tbb/learngit | 1d6f5c234f1c36016be5489c7ec605f666bbba16 | dfa3106d05bcbb0da39b9bf71f7a0698322130f7 | refs/heads/master | 2020-08-30T01:30:07.632602 | 2019-12-07T13:09:59 | 2019-12-07T13:09:59 | 218,225,081 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 731 | py | #!/usr/bin/env python
# coding=utf-8
from timeit import Timer
def test1():
l =[]
for i in range(1000):
l = l+ [i]
def test2():
l =[]
for i in range(1000):
l.append(i)
def test3():
l =[ i for i in range(1000) ]
def test4():
l = list(range(1000))
t1 =Timer("test1()","from __main__ import test1")
print("concat ",t1.timeit(number=1000), "milliseconds")
t2 = Timer("test2()","from __main__ import test2")
print("append ",t2.timeit(number=1000),"milliseconds")
t3 = Timer("test3()", "from __main__ import test3")
print("comprehension ",t3.timeit(number=1000), "milliseconds")
t4 = Timer("test4()", "from __main__ import test4")
print("list range ",t4.timeit(number=1000), "milliseconds")
| [
"[email protected]"
] | |
226d7c640175f5819037705b351840787fd615ac | 6188f8ef474da80c9e407e8040de877273f6ce20 | /examples/assets_pandas_type_metadata/assets_pandas_type_metadata/resources/csv_io_manager.py | 68dc75f91b9b2b658c50091745e997fba2f106e1 | [
"Apache-2.0"
] | permissive | iKintosh/dagster | 99f2a1211de1f3b52f8bcf895dafaf832b999de2 | 932a5ba35263deb7d223750f211c2ddfa71e6f48 | refs/heads/master | 2023-01-24T15:58:28.497042 | 2023-01-20T21:51:35 | 2023-01-20T21:51:35 | 276,410,978 | 1 | 0 | Apache-2.0 | 2020-07-01T15:19:47 | 2020-07-01T15:13:56 | null | UTF-8 | Python | false | false | 2,782 | py | import os
import textwrap
import pandas as pd
from dagster import (
AssetKey,
MemoizableIOManager,
MetadataEntry,
TableSchemaMetadataValue,
io_manager,
)
class LocalCsvIOManager(MemoizableIOManager):
"""Translates between Pandas DataFrames and CSVs on the local filesystem."""
def __init__(self, base_dir):
self._base_dir = base_dir
def _get_fs_path(self, asset_key: AssetKey) -> str:
rpath = os.path.join(self._base_dir, *asset_key.path) + ".csv"
return os.path.abspath(rpath)
def handle_output(self, context, obj: pd.DataFrame):
"""This saves the dataframe as a CSV."""
fpath = self._get_fs_path(context.asset_key)
os.makedirs(os.path.dirname(fpath), exist_ok=True)
obj.to_csv(fpath)
with open(fpath + ".version", "w", encoding="utf8") as f:
f.write(context.version if context.version else "None")
yield MetadataEntry.int(obj.shape[0], "Rows")
yield MetadataEntry.path(fpath, "Path")
yield MetadataEntry.md(obj.head(5).to_markdown(), "Sample")
yield MetadataEntry.text(context.version, "Resolved version")
yield MetadataEntry.table_schema(
self.get_schema(context.dagster_type),
"Schema",
)
def get_schema(self, dagster_type):
schema_entry = next(
(
x
for x in dagster_type.metadata_entries
if isinstance(x.entry_data, TableSchemaMetadataValue)
),
None,
)
assert schema_entry
return schema_entry.entry_data.schema
def load_input(self, context):
"""This reads a dataframe from a CSV."""
fpath = self._get_fs_path(context.asset_key)
date_col_names = [
table_col.name
for table_col in self.get_schema(context.upstream_output.dagster_type).columns
if table_col.type == "datetime64[ns]"
]
return pd.read_csv(fpath, parse_dates=date_col_names)
def has_output(self, context) -> bool:
fpath = self._get_fs_path(context.asset_key)
version_fpath = fpath + ".version"
if not os.path.exists(version_fpath):
return False
with open(version_fpath, "r", encoding="utf8") as f:
version = f.read()
return version == context.version
@io_manager
def local_csv_io_manager(context):
return LocalCsvIOManager(context.instance.storage_directory())
def pandas_columns_to_markdown(dataframe: pd.DataFrame) -> str:
return (
textwrap.dedent(
"""
| Name | Type |
| ---- | ---- |
"""
)
+ "\n".join([f"| {name} | {dtype} |" for name, dtype in dataframe.dtypes.iteritems()])
)
| [
"[email protected]"
] | |
e0e4e7533392653a3bd4723f5ebfe1fe76de1bc8 | 556db265723b0cc30ad2917442ed6dad92fd9044 | /tensorflow/python/profiler/traceme.py | 7807afbc54b0a025c44431ee9279756620891468 | [
"MIT",
"Apache-2.0",
"BSD-2-Clause"
] | permissive | graphcore/tensorflow | c1669b489be0e045b3ec856b311b3139858de196 | 085b20a4b6287eff8c0b792425d52422ab8cbab3 | refs/heads/r2.6/sdk-release-3.2 | 2023-07-06T06:23:53.857743 | 2023-03-14T13:04:04 | 2023-03-14T13:48:43 | 162,717,602 | 84 | 17 | Apache-2.0 | 2023-03-25T01:13:37 | 2018-12-21T13:30:38 | C++ | UTF-8 | Python | false | false | 1,143 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TraceMe allows the profiler to trace Python events."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.profiler.trace import Trace as TraceMe
def traceme_wrapper(func):
name = getattr(func, '__qualname__', None)
if not name:
name = func.__name__
def wrapper(*args, **kwargs):
with TraceMe(name):
return func(*args, **kwargs)
return wrapper
| [
"[email protected]"
] | |
34007746b1f587a5fbfc161816d0df49bbeda33c | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2176/60898/304593.py | dd9b9604eea880c2c36c822e8be0863c455da893 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 220 | py | s1 = input()
l1 = len(s1)
s2 = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
a =""
for i in range(52):
for j in range(l1):
if s1[l1-j-1]==s2[i]:
b=str(l1-j)
a=a+b+" "
print(a) | [
"[email protected]"
] | |
f8b8200530e6ac229ce51345487daaf93aa19c13 | 6b14d9a64a578239e5612e6098320b61b45c08d9 | /SEP27/03.py | da54b196d586a25bb2a7fc1b0f68d526ccca9bd1 | [
"MIT"
] | permissive | Razdeep/PythonSnippets | 498c403140fec33ee2f0dd84801738f1256ee9dd | 76f9313894f511c487a99bc38bdf0fe5e594caf5 | refs/heads/master | 2020-03-26T08:56:23.067022 | 2018-11-26T05:36:36 | 2018-11-26T05:36:36 | 144,726,845 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 321 | py | # Using sqlite3
import sqlite3
conn=sqlite3.connect('Example.db')
# conn.execute('Create table student(name,address,age,mob)')
print('Table created')
conn.execute('insert into student values("Raj","ssd","sdf","adgg")')
print('Row has been inserted')
student=conn.execute('select * from student').fetchall()
print(student) | [
"[email protected]"
] | |
194d7c03a55bf69115a1a87ec700ccc939cc4a70 | ef9a1edc55a8dc13c7dc0081334d9b0e5b5643ed | /explorer/migrations/0004_auto_20170815_1500.py | 1b277b7d3a2988b7195fc9606bca3d45696ee5ae | [
"MIT"
] | permissive | LCOGT/serol | 450a7650a4ad70d2f1402d58e3098d6cdfc8cda7 | b4698dc90cc59587068e352e1e523025087cca62 | refs/heads/master | 2023-09-04T08:08:01.557747 | 2023-08-14T12:24:06 | 2023-08-14T12:24:06 | 98,443,901 | 0 | 0 | MIT | 2023-05-23T03:39:31 | 2017-07-26T16:29:55 | Python | UTF-8 | Python | false | false | 1,086 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-15 15:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('explorer', '0003_challenge_category'),
]
operations = [
migrations.DeleteModel(
name='Target',
),
migrations.AddField(
model_name='challenge',
name='active',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='challenge',
name='avm_code',
field=models.CharField(blank=True, max_length=50, null=True),
),
migrations.AlterField(
model_name='challenge',
name='category',
field=models.TextField(blank=True, help_text='Astronomical object type', null=True),
),
migrations.AlterField(
model_name='challenge',
name='description',
field=models.TextField(help_text='Research page info'),
),
]
| [
"[email protected]"
] | |
7e7d78b5c7010025c138d03323e04d0ec123e87a | e1c5b001b7031d1ff204d4b7931a85366dd0ce9c | /EMu/2017/data_reskim/script/Batch_reskim_all.py | c2503d17c492d171b32fbe7503db4a8bf1b7a58b | [] | no_license | fdzyffff/IIHE_code | b9ff96b5ee854215e88aec43934368af11a1f45d | e93a84777afad69a7e63a694393dca59b01c070b | refs/heads/master | 2020-12-30T16:03:39.237693 | 2020-07-13T03:06:53 | 2020-07-13T03:06:53 | 90,961,889 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,833 | py | import os
import sys
MYDIR=os.getcwd()
file_root_dir_dic={
#[isData,isZToTT,isTTbin,isWWbin,
"../Data/rB_1" :[[True,False,False,False],[0]],
"../Data/rC_1" :[[True,False,False,False],[0]],
"../Data/rD_1" :[[True,False,False,False],[0]],
"../Data/rE_1" :[[True,False,False,False],[0]],
"../Data/rF_1" :[[True,False,False,False],[0]],
}
reskim_dic={
}
def my_walk_dir(my_dir,my_list):
for tmp_file in os.listdir(my_dir):
tmp_file_name = my_dir+'/'+tmp_file
if os.path.isfile(tmp_file_name):
if 'failed' in tmp_file_name:continue
if not '.root' in tmp_file_name:continue
my_list.append(tmp_file_name[3:])
else:
my_walk_dir(tmp_file_name,my_list)
return my_list
def make_dic(check_dir = ""):
print "%s making file list %s"%("#"*15,"#"*15)
n_total = 0
for file_dir in file_root_dir_dic:
tmp_name = ""
for i in file_dir.split("/"):
if (not i == ".") and (not i == ".."):
tmp_name += "%s_"%(i)
file_list = my_walk_dir(file_dir,[])
file_list.sort()
if check_dir == "":
reskim_dic[tmp_name] = [file_root_dir_dic[file_dir][0],file_list,file_root_dir_dic[file_dir][1],[]]
else:
reskim_dic[tmp_name] = [file_root_dir_dic[file_dir][0],file_list,file_root_dir_dic[file_dir][1],my_walk_dir(check_dir,[])]
print " %s : %d"%(file_dir,len(reskim_dic[tmp_name][1]))
n_total += len(reskim_dic[tmp_name][1])
print " Total root files : %d"%(n_total)
def make_sub(label,n_file_per_job):
print "%s making jobs script, %d root files/job %s"%("#"*15,n_file_per_job,"#"*15)
try:
if isCheck:
tmp_dir='check_sub_%s'%(label)
else:
tmp_dir='sub_%s'%(label)
os.mkdir(tmp_dir)
except:
pass
try:
os.system('mkdir %s/sub_err'%tmp_dir)
os.system('mkdir %s/sub_out'%tmp_dir)
os.system('mkdir %s/sub_job'%tmp_dir)
os.system('mkdir %s/BigSub'%tmp_dir)
except:
print "err!"
pass
i=0
tmp_bigsubname = "BigSubmit_%s.jobb"%(label)
BigSub_job = open(MYDIR+'/'+tmp_dir+'/'+tmp_bigsubname,'w')
sub_log_name = "sub_log_%s.log"%(label)
sub_log = open(MYDIR+'/'+tmp_dir+'/'+sub_log_name,'w')
n_total_job = 0
for reskim in reskim_dic:
isData = reskim_dic[reskim][0][0]
isDYbin = reskim_dic[reskim][0][1]
isTTbin = reskim_dic[reskim][0][2]
isWWbin = reskim_dic[reskim][0][3]
triggerVersion = reskim_dic[reskim][2][0]
n_job = 0
sub_n_total_job = 0
n_start = True
job_text = ""
i = 0
for root_file in reskim_dic[reskim][1]:
sample_name = root_file.split("/")[-3]
subdir_name = root_file.split("/")[-2]
file_name = root_file.split("/")[-1]
output_name = "ntuples/batchdata_loop_2/data_%s_%s_%s_%s"%(label,sample_name,subdir_name,file_name)
tmp_label = reskim
if n_start:
n_start=False
job_text = ""
job_text+=("curr_dir=%s\n"%(MYDIR))
job_text+=("cd %s\n"%(MYDIR))
job_text+=("source env2.sh\n")
job_text+=("cd ../\n")
if (not isCheck) or (not output_name in reskim_dic[reskim][3]):
job_text+=("python reskim_all.py -r %s -o %s --isData %s --isDYbin %s --isTTbin %s --isWWbin %s -t %s\n"%(root_file, output_name, isData, isDYbin, isTTbin, isWWbin, triggerVersion))
n_job+=1
i+=1
if (n_job%n_file_per_job==0 and n_job>0) or (i >= len(reskim_dic[reskim][1])):
n_job=0
n_start=True
n_total_job += 1
sub_n_total_job += 1
tmp_label = "%s%s"%(reskim,sub_n_total_job)
tmp_jobname="sub_%s.jobb"%(tmp_label)
tmp_job=open(MYDIR+'/'+tmp_dir+'/sub_job/'+tmp_jobname,'w')
tmp_job.write(job_text)
tmp_job.close()
os.system("chmod +x %s"%(MYDIR+'/'+tmp_dir+'/'+"sub_job/"+tmp_jobname))
sub_log_command = "qsub -q localgrid -e %s/sub_err/err_%s_%s.dat -o %s/sub_out/out_%s_%s.dat %s"%(tmp_dir,label,tmp_label,tmp_dir,label,tmp_label,MYDIR+'/'+tmp_dir+'/sub_job/'+tmp_jobname)
#os.system(sub_log_command)
sub_log.write("%s\n"%(sub_log_command))
BigSub_job.write("qsub -q localgrid %s\n"%(MYDIR+'/'+tmp_dir+'/sub_job/'+tmp_jobname))
os.system("chmod +x %s"%(MYDIR+'/'+tmp_dir+'/'+tmp_bigsubname))
print "%d jobs created"%(n_total_job)
isCheck = True
isCheck = False
make_dic()
#make_dic("../ntuples/batchdata_loop_2")
make_sub("2017_SingleMuon",200)
| [
"[email protected]"
] | |
614267896a7e2c9ff12416977b11b5f2d10d7ab3 | 8039137e257c587e4f37dea4f421607de040009c | /test_autoarray/geometry/test_geometry_util.py | 640d59879b3694ff2064e8e1e73958206b477762 | [
"MIT"
] | permissive | jonathanfrawley/PyAutoArray_copy | d15cafa8ad93c19e9991b0c98bc5192be520fcdc | c21e8859bdb20737352147b9904797ac99985b73 | refs/heads/master | 2023-04-20T22:43:18.765267 | 2021-05-12T14:23:47 | 2021-05-12T14:23:47 | 367,427,246 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35,175 | py | import autoarray as aa
import numpy as np
import pytest
class TestCoordinates1D:
def test__central_pixel__depends_on_shape_pixel_scale_and_origin(self):
central_pixel_coordinates = aa.util.geometry.central_pixel_coordinates_1d_from(
shape_slim=(3,)
)
assert central_pixel_coordinates == (1,)
central_pixel_coordinates = aa.util.geometry.central_pixel_coordinates_1d_from(
shape_slim=(4,)
)
assert central_pixel_coordinates == (1.5,)
def test__pixel_coordinates_1d_from(self):
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(1.0,), shape_slim=(2,), pixel_scales=(2.0,)
)
assert pixel_coordinates == (1,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(1.0,), shape_slim=(2,), pixel_scales=(2.0,)
)
assert pixel_coordinates == (1,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(-1.0,), shape_slim=(2,), pixel_scales=(2.0,)
)
assert pixel_coordinates == (0,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(0.0,), shape_slim=(3,), pixel_scales=(3.0,)
)
assert pixel_coordinates == (1,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(3.0,), shape_slim=(3,), pixel_scales=(3.0,)
)
assert pixel_coordinates == (2,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(-3.0,), shape_slim=(3,), pixel_scales=(3.0,)
)
assert pixel_coordinates == (0,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(3.0,), shape_slim=(3,), pixel_scales=(3.0,)
)
assert pixel_coordinates == (2,)
def test__pixel_coordinates_1d_from__scaled_are_pixel_corners(self):
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(-1.99,), shape_slim=(2,), pixel_scales=(2.0,)
)
assert pixel_coordinates == (0,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(-0.01,), shape_slim=(2,), pixel_scales=(2.0,)
)
assert pixel_coordinates == (0,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(0.01,), shape_slim=(2,), pixel_scales=(2.0,)
)
assert pixel_coordinates == (1,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(-1.99,), shape_slim=(2,), pixel_scales=(2.0,)
)
assert pixel_coordinates == (0,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(-0.01,), shape_slim=(2,), pixel_scales=(2.0,)
)
assert pixel_coordinates == (0,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(0.01,), shape_slim=(2,), pixel_scales=(2.0,)
)
assert pixel_coordinates == (1,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(1.99,), shape_slim=(2,), pixel_scales=(2.0,)
)
assert pixel_coordinates == (1,)
def test__pixel_coordinates_1d_from___scaled_are_pixel_centres__nonzero_centre(
self,
):
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(0.0,),
shape_slim=(2,),
pixel_scales=(2.0,),
origins=(1.0,),
)
assert pixel_coordinates == (0,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(2.0,),
shape_slim=(2,),
pixel_scales=(2.0,),
origins=(1.0,),
)
assert pixel_coordinates == (1,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(0.0,),
shape_slim=(3,),
pixel_scales=(3.0,),
origins=(3.0,),
)
assert pixel_coordinates == (0,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(3.0,),
shape_slim=(3,),
pixel_scales=(3.0,),
origins=(3.0,),
)
assert pixel_coordinates == (1,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(6.0,),
shape_slim=(3,),
pixel_scales=(3.0,),
origins=(3.0,),
)
assert pixel_coordinates == (2,)
def test__pixel_coordinates_1d_from__scaled_are_pixel_corners__nonzero_centre(
self,
):
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(-0.99,),
shape_slim=(2,),
pixel_scales=(2.0,),
origins=(1.0,),
)
assert pixel_coordinates == (0,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(0.99,),
shape_slim=(2,),
pixel_scales=(2.0,),
origins=(1.0,),
)
assert pixel_coordinates == (0,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(1.01,),
shape_slim=(2,),
pixel_scales=(2.0,),
origins=(1.0,),
)
assert pixel_coordinates == (1,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(2.99,),
shape_slim=(2,),
pixel_scales=(2.0,),
origins=(1.0,),
)
assert pixel_coordinates == (1,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(1.01,),
shape_slim=(2,),
pixel_scales=(2.0,),
origins=(1.0,),
)
assert pixel_coordinates == (1,)
pixel_coordinates = aa.util.geometry.pixel_coordinates_1d_from(
scaled_coordinates_1d=(2.99,),
shape_slim=(2,),
pixel_scales=(2.0,),
origins=(1.0,),
)
assert pixel_coordinates == (1,)
def test__scaled_coordinates_1d_from___scaled_are_pixel_centres__nonzero_centre(
self,
):
scaled_coordinates = aa.util.geometry.scaled_coordinates_1d_from(
pixel_coordinates_1d=(0,), shape_slim=(3,), pixel_scales=(3.0,)
)
assert scaled_coordinates == (-3.0,)
scaled_coordinates = aa.util.geometry.scaled_coordinates_1d_from(
pixel_coordinates_1d=(1,), shape_slim=(3,), pixel_scales=(3.0,)
)
assert scaled_coordinates == (0.0,)
scaled_coordinates = aa.util.geometry.scaled_coordinates_1d_from(
pixel_coordinates_1d=(2,), shape_slim=(3,), pixel_scales=(3.0,)
)
assert scaled_coordinates == (3.0,)
scaled_coordinates = aa.util.geometry.scaled_coordinates_1d_from(
pixel_coordinates_1d=(0,),
shape_slim=(2,),
pixel_scales=(2.0,),
origins=(1.0,),
)
assert scaled_coordinates == (0.0,)
scaled_coordinates = aa.util.geometry.scaled_coordinates_1d_from(
pixel_coordinates_1d=(1,),
shape_slim=(2,),
pixel_scales=(2.0,),
origins=(1.0,),
)
assert scaled_coordinates == (2.0,)
scaled_coordinates = aa.util.geometry.scaled_coordinates_1d_from(
pixel_coordinates_1d=(0,),
shape_slim=(3,),
pixel_scales=(3.0,),
origins=(3.0,),
)
assert scaled_coordinates == (0.0,)
scaled_coordinates = aa.util.geometry.scaled_coordinates_1d_from(
pixel_coordinates_1d=(1,),
shape_slim=(3,),
pixel_scales=(3.0,),
origins=(3.0,),
)
assert scaled_coordinates == (3.0,)
scaled_coordinates = aa.util.geometry.scaled_coordinates_1d_from(
pixel_coordinates_1d=(2,),
shape_slim=(3,),
pixel_scales=(3.0,),
origins=(3.0,),
)
assert scaled_coordinates == (6.0,)
class TestCoordinates2D:
def test__central_pixel__depends_on_shape_pixel_scale_and_origin(self):
central_pixel_coordinates = aa.util.geometry.central_pixel_coordinates_2d_from(
shape_native=(3, 3)
)
assert central_pixel_coordinates == (1, 1)
central_pixel_coordinates = aa.util.geometry.central_pixel_coordinates_2d_from(
shape_native=(3, 3)
)
assert central_pixel_coordinates == (1, 1)
central_pixel_coordinates = aa.util.geometry.central_pixel_coordinates_2d_from(
shape_native=(4, 4)
)
assert central_pixel_coordinates == (1.5, 1.5)
central_pixel_coordinates = aa.util.geometry.central_pixel_coordinates_2d_from(
shape_native=(4, 4)
)
assert central_pixel_coordinates == (1.5, 1.5)
def test__pixel_coordinates_2d_from(self):
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(1.0, -1.0),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (0, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(1.0, 1.0),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (0, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-1.0, -1.0),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (1, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-1.0, 1.0),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (1, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(3.0, -3.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
)
assert pixel_coordinates == (0, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(3.0, 0.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
)
assert pixel_coordinates == (0, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(3.0, 3.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
)
assert pixel_coordinates == (0, 2)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.0, -3.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
)
assert pixel_coordinates == (1, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.0, 0.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
)
assert pixel_coordinates == (1, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.0, 3.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
)
assert pixel_coordinates == (1, 2)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-3.0, -3.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
)
assert pixel_coordinates == (2, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-3.0, 0.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
)
assert pixel_coordinates == (2, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-3.0, 3.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
)
assert pixel_coordinates == (2, 2)
def test__pixel_coordinates_2d_from__scaled_are_pixel_corners(self):
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(1.99, -1.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (0, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(1.99, -0.01),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (0, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.01, -1.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (0, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.01, -0.01),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (0, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(2.01, 0.01),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (0, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(2.01, 1.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (0, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.01, 0.01),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (0, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.01, 1.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (0, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-0.01, -1.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (1, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-0.01, -0.01),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (1, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-1.99, -1.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (1, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-1.99, -0.01),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (1, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-0.01, 0.01),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (1, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-0.01, 1.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (1, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-1.99, 0.01),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (1, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-1.99, 1.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
)
assert pixel_coordinates == (1, 1)
def test__pixel_coordinates_2d_from___scaled_are_pixel_centres__nonzero_centre(
self,
):
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(2.0, 0.0),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (0, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(2.0, 2.0),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (0, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.0, 0.0),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (1, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.0, 2.0),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (1, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(6.0, 0.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert pixel_coordinates == (0, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(6.0, 3.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert pixel_coordinates == (0, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(6.0, 6.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert pixel_coordinates == (0, 2)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(3.0, 0.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert pixel_coordinates == (1, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(3.0, 3.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert pixel_coordinates == (1, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(3.0, 6.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert pixel_coordinates == (1, 2)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.0, 0.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert pixel_coordinates == (2, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.0, 3.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert pixel_coordinates == (2, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.0, 6.0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert pixel_coordinates == (2, 2)
def test__pixel_coordinates_2d_from__scaled_are_pixel_corners__nonzero_centre(
self,
):
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(2.99, -0.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (0, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(2.99, 0.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (0, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(1.01, -0.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (0, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(1.01, 0.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (0, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(3.01, 1.01),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (0, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(3.01, 2.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (0, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(1.01, 1.01),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (0, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(1.01, 2.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (0, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.99, -0.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (1, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.99, 0.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (1, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-0.99, -0.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (1, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-0.99, 0.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (1, 0)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.99, 1.01),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (1, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(0.99, 2.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (1, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-0.99, 1.01),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (1, 1)
pixel_coordinates = aa.util.geometry.pixel_coordinates_2d_from(
scaled_coordinates_2d=(-0.99, 2.99),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert pixel_coordinates == (1, 1)
def test__scaled_coordinates_2d_from___scaled_are_pixel_centres__nonzero_centre(
self,
):
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(0, 0), shape_native=(3, 3), pixel_scales=(3.0, 3.0)
)
assert scaled_coordinates == (3.0, -3.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(0, 1), shape_native=(3, 3), pixel_scales=(3.0, 3.0)
)
assert scaled_coordinates == (3.0, 0.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(0, 2), shape_native=(3, 3), pixel_scales=(3.0, 3.0)
)
assert scaled_coordinates == (3.0, 3.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(1, 0), shape_native=(3, 3), pixel_scales=(3.0, 3.0)
)
assert scaled_coordinates == (0.0, -3.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(1, 1), shape_native=(3, 3), pixel_scales=(3.0, 3.0)
)
assert scaled_coordinates == (0.0, 0.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(1, 2), shape_native=(3, 3), pixel_scales=(3.0, 3.0)
)
assert scaled_coordinates == (0.0, 3.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(2, 0), shape_native=(3, 3), pixel_scales=(3.0, 3.0)
)
assert scaled_coordinates == (-3.0, -3.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(2, 1), shape_native=(3, 3), pixel_scales=(3.0, 3.0)
)
assert scaled_coordinates == (-3.0, 0.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(2, 2), shape_native=(3, 3), pixel_scales=(3.0, 3.0)
)
assert scaled_coordinates == (-3.0, 3.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(0, 0),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert scaled_coordinates == (2.0, 0.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(0, 1),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert scaled_coordinates == (2.0, 2.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(1, 0),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert scaled_coordinates == (0.0, 0.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(1, 1),
shape_native=(2, 2),
pixel_scales=(2.0, 2.0),
origins=(1.0, 1.0),
)
assert scaled_coordinates == (0.0, 2.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(0, 0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert scaled_coordinates == (6.0, 0.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(0, 1),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert scaled_coordinates == (6.0, 3.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(0, 2),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert scaled_coordinates == (6.0, 6.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(1, 0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert scaled_coordinates == (3.0, 0.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(1, 1),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert scaled_coordinates == (3.0, 3.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(1, 2),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert scaled_coordinates == (3.0, 6.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(2, 0),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert scaled_coordinates == (0.0, 0.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(2, 1),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert scaled_coordinates == (0.0, 3.0)
scaled_coordinates = aa.util.geometry.scaled_coordinates_2d_from(
pixel_coordinates_2d=(2, 2),
shape_native=(3, 3),
pixel_scales=(3.0, 3.0),
origins=(3.0, 3.0),
)
assert scaled_coordinates == (0.0, 6.0)
class TestTransforms:
def test__transform_2d_grid_to_reference_frame(self):
grid_2d = np.array([[0.0, 1.0], [1.0, 1.0], [1.0, 0.0]])
transformed_grid_2d = aa.util.geometry.transform_grid_2d_to_reference_frame(
grid_2d=grid_2d, centre=(0.0, 0.0), angle=0.0
)
assert transformed_grid_2d == pytest.approx(
np.array([[0.0, 1.0], [1.0, 1.0], [1.0, 0.0]])
)
transformed_grid_2d = aa.util.geometry.transform_grid_2d_to_reference_frame(
grid_2d=grid_2d, centre=(0.0, 0.0), angle=45.0
)
assert transformed_grid_2d == pytest.approx(
np.array(
[
[-np.sqrt(2) / 2.0, np.sqrt(2) / 2.0],
[0.0, np.sqrt(2)],
[np.sqrt(2) / 2.0, np.sqrt(2) / 2.0],
]
)
)
transformed_grid_2d = aa.util.geometry.transform_grid_2d_to_reference_frame(
grid_2d=grid_2d, centre=(0.0, 0.0), angle=90.0
)
assert transformed_grid_2d == pytest.approx(
np.array([[-1.0, 0.0], [-1.0, 1.0], [0.0, 1.0]])
)
transformed_grid_2d = aa.util.geometry.transform_grid_2d_to_reference_frame(
grid_2d=grid_2d, centre=(0.0, 0.0), angle=180.0
)
assert transformed_grid_2d == pytest.approx(
np.array([[0.0, -1.0], [-1.0, -1.0], [-1.0, 0.0]])
)
transformed_grid_2d = aa.util.geometry.transform_grid_2d_to_reference_frame(
grid_2d=grid_2d, centre=(5.0, 10.0), angle=0.0
)
assert transformed_grid_2d == pytest.approx(
np.array([[-5.0, -9.0], [-4.0, -9.0], [-4.0, -10.0]])
)
transformed_grid_2d = aa.util.geometry.transform_grid_2d_to_reference_frame(
grid_2d=grid_2d, centre=(5.0, 10.0), angle=90.0
)
assert transformed_grid_2d == pytest.approx(
np.array([[9.0, -5.0], [9.0, -4.0], [10.0, -4.0]])
)
def test__transform_2d_grid_from_reference_frame(self):
grid_2d = np.array([[0.0, 1.0], [1.0, 1.0], [1.0, 0.0]])
transformed_grid_2d = aa.util.geometry.transform_grid_2d_from_reference_frame(
grid_2d=grid_2d, centre=(0.0, 0.0), angle=0.0
)
assert transformed_grid_2d == pytest.approx(
np.array([[0.0, 1.0], [1.0, 1.0], [1.0, 0.0]])
)
transformed_grid_2d = aa.util.geometry.transform_grid_2d_from_reference_frame(
grid_2d=grid_2d, centre=(0.0, 0.0), angle=45.0
)
assert transformed_grid_2d == pytest.approx(
np.array(
[
[np.sqrt(2) / 2.0, np.sqrt(2) / 2.0],
[np.sqrt(2), 0.0],
[np.sqrt(2) / 2.0, -np.sqrt(2) / 2.0],
]
)
)
transformed_grid_2d = aa.util.geometry.transform_grid_2d_from_reference_frame(
grid_2d=grid_2d, centre=(2.0, 2.0), angle=90.0
)
assert transformed_grid_2d == pytest.approx(
np.array([[3.0, 2.0], [3.0, 1.0], [2.0, 1.0]])
)
transformed_grid_2d = aa.util.geometry.transform_grid_2d_to_reference_frame(
grid_2d=grid_2d, centre=(8.0, 5.0), angle=137.0
)
original_grid_2d = aa.util.geometry.transform_grid_2d_from_reference_frame(
grid_2d=transformed_grid_2d, centre=(8.0, 5.0), angle=137.0
)
assert grid_2d == pytest.approx(original_grid_2d, 1.0e-4)
| [
"[email protected]"
] | |
5ea0cc825f93fc42950aba4b149681a669002a5c | 8f64d50494507fd51c0a51010b84d34c667bd438 | /BeautyForMe/myvenv/Lib/site-packages/phonenumbers/shortdata/region_AF.py | 840519d2de85f67d5d2e0916a6bd49c9e9b2a04f | [
"MIT"
] | permissive | YooInKeun/CAU_CSE_Capstone_3 | 5a4a61a916dc13c8635d25a04d59c21279678477 | 51405c4bed2b55661aa0708c8acea17fe72aa701 | refs/heads/master | 2022-12-11T15:39:09.721019 | 2021-07-27T08:26:04 | 2021-07-27T08:26:04 | 207,294,862 | 6 | 1 | MIT | 2022-11-22T04:52:11 | 2019-09-09T11:37:13 | Python | UTF-8 | Python | false | false | 937 | py | """Auto-generated file, do not edit by hand. AF metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_AF = PhoneMetadata(id='AF', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='[14]\\d\\d(?:\\d{2})?', possible_length=(3, 5)),
toll_free=PhoneNumberDesc(national_number_pattern='1(?:02|19)', example_number='102', possible_length=(3,)),
emergency=PhoneNumberDesc(national_number_pattern='1(?:02|19)', example_number='102', possible_length=(3,)),
short_code=PhoneNumberDesc(national_number_pattern='1(?:02|19)|40404', example_number='102', possible_length=(3, 5)),
carrier_specific=PhoneNumberDesc(national_number_pattern='404\\d\\d', example_number='40400', possible_length=(5,)),
sms_services=PhoneNumberDesc(national_number_pattern='404\\d\\d', example_number='40400', possible_length=(5,)),
short_data=True)
| [
"[email protected]"
] | |
10f907d3baaaa51c57acbf31d33adc0870fafb74 | 25ebc03b92df764ff0a6c70c14c2848a49fe1b0b | /daily/20191121/example_jinja2/01inherit/main.py | f6a2a6121ddb3ef66ab19e547bff0ea2c8a5a968 | [] | no_license | podhmo/individual-sandbox | 18db414fafd061568d0d5e993b8f8069867dfcfb | cafee43b4cf51a321f4e2c3f9949ac53eece4b15 | refs/heads/master | 2023-07-23T07:06:57.944539 | 2023-07-09T11:45:53 | 2023-07-09T11:45:53 | 61,940,197 | 6 | 0 | null | 2022-10-19T05:01:17 | 2016-06-25T11:27:04 | Python | UTF-8 | Python | false | false | 249 | py | import os.path
from jinja2 import Environment, FileSystemLoader
here = os.path.dirname(__file__)
e = Environment(loader=FileSystemLoader(here))
t = e.get_template("main.j2")
print(t.render())
e.compile_templates(here, zip=None, log_function=print)
| [
"[email protected]"
] | |
20f7c68abb20697f37faf94315775c04cc2e88e0 | e47bc9571c59b1c6e8aeb4231a286ab8577802d4 | /easy/700-search-in-a-binary-search-tree.py | 80810f66956e8a17a8d0435cfb5517b67981d3fe | [
"MIT"
] | permissive | changmeng72/leecode_python3 | d0176502dfaf3c8b455ec491c72979dd25b66b3e | 8384f52f0dd74b06b1b6aefa277dde6a228ff5f3 | refs/heads/main | 2023-05-27T10:35:43.465283 | 2021-06-09T00:20:59 | 2021-06-09T00:20:59 | 375,127,027 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 899 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def searchBST(self, root: TreeNode, val: int) -> TreeNode:
while root!=None:
if root==None:
return None
if root.val==val:
return root
if val< root.val:
root = root.left
else:
root = root.right
return root
"""
class Solution:
def searchBST(self, root: TreeNode, val: int) -> TreeNode:
if root==None:
return None
if root.val==val:
return root
if val< root.val:
return self.searchBST(root.left,val)
else:
return self.searchBST(root.right,val)
""" | [
"[email protected]"
] | |
222d3a8b5fe6ead7ed774a31e3384f17c77c532b | e82b761f53d6a3ae023ee65a219eea38e66946a0 | /All_In_One/addons/io_scs_tools/internals/persistent/initialization.py | 7f4da8dcb5af84372c4c4687960620d3e72755b6 | [] | no_license | 2434325680/Learnbgame | f3a050c28df588cbb3b14e1067a58221252e2e40 | 7b796d30dfd22b7706a93e4419ed913d18d29a44 | refs/heads/master | 2023-08-22T23:59:55.711050 | 2021-10-17T07:26:07 | 2021-10-17T07:26:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,303 | py | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Copyright (C) 2013-2014: SCS Software
import bpy
import os
from bpy.app.handlers import persistent
from io_scs_tools.internals import preview_models as _preview_models
from io_scs_tools.internals.callbacks import open_gl as _open_gl_callback
from io_scs_tools.internals.callbacks import lighting_east_lock as _lighting_east_lock_callback
from io_scs_tools.internals.containers import config as _config_container
from io_scs_tools.internals.connections.wrappers import group as _connections_group_wrapper
from io_scs_tools.utils import get_scs_globals as _get_scs_globals
from io_scs_tools.utils import info as _info_utils
from io_scs_tools.utils.printout import lprint
@persistent
def initialise_scs_dict(scene):
"""Parts and Variants data initialisation (persistent).
Things which this function does:
1. copies all the settings to current world
2. checks object identities
3. updates shaders presets path and reloads them
Cases when it should be run:
1. Blender startup -> SCS tools needs to configured
2. Opening .blend file -> because all the configs needs to be moved to current world
3. addon reloading and enable/disable -> for SCS tools this is the same as opening Blender
:param scene: Current Blender Scene
:type scene: bpy.types.Scene
"""
# SCREEN CHECK...
if bpy.context.screen:
lprint("I Initialization of SCS scene, BT version: " + _info_utils.get_tools_version())
# NOTE: covers: start-up, reload, enable/disable and it should be immediately removed
# from handlers as soon as it's executed for the first time
if initialise_scs_dict in bpy.app.handlers.scene_update_post:
bpy.app.handlers.scene_update_post.remove(initialise_scs_dict)
# INITIALIZE CUSTOM CONNECTIONS DRAWING SYSTEM
_connections_group_wrapper.init()
# release lock as user might saved blender file during engaged lock.
# If that happens config lock property gets saved to blend file and if user opens that file again,
# lock will be still engaged and no settings could be applied without releasing lock here.
_config_container.release_config_lock()
# USE SETTINGS FROM CONFIG...
# NOTE: Reapplying the settings from config file to the currently opened Blender file datablock.
# The thing is, that every Blend file holds its own copy of SCS Global Settings from the machine on which it got saved.
# The SCS Global Settings needs to be overwritten upon each file load to reflect the settings from local config file,
# but also upon every SCS Project Base Path change.
_config_container.apply_settings()
# GLOBAL PATH CHECK...
if _get_scs_globals().scs_project_path != "":
if not os.path.isdir(_get_scs_globals().scs_project_path):
lprint("\nW The Project Path %r is NOT VALID!\n\tPLEASE SELECT A VALID PATH TO THE PROJECT BASE FOLDER.\n",
(_get_scs_globals().scs_project_path,))
# CREATE PREVIEW MODEL LIBRARY
_preview_models.init()
# ADD DRAW HANDLERS
_open_gl_callback.enable(mode=_get_scs_globals().drawing_mode)
# ENABLE LIGHTING EAST LOCK HANDLER
# Blender doesn't call update on properties when file is opened,
# so in case lighting east was locked in saved blend file, we have to manually enable callback for it
# On the other hand if user previously had east locked and now loaded the file without it,
# again we have to manually disable callback.
if _get_scs_globals().lighting_east_lock:
_lighting_east_lock_callback.enable()
else:
_lighting_east_lock_callback.disable()
# as last notify user if his Blender version is outdated
if not _info_utils.is_blender_able_to_run_tools():
message = "Your Blender version %s is outdated, all SCS Blender Tools functionalities were internally disabled.\n\t " \
"Please update Blender before continue, minimal required version for SCS Blender Tools is: %s!"
message = message % (_info_utils.get_blender_version()[0], _info_utils.get_required_blender_version())
# first report error with blender tools printing system
lprint("E " + message)
# then disable add-on as it's not usable in the case Blender is out-dated
bpy.ops.wm.addon_disable('INVOKE_DEFAULT', module="io_scs_tools")
# and as last show warning message in the form of popup menu for user to see info about outdated Blender
# As we don't have access to our 3D view report operator anymore,
# we have to register our ShowWarningMessage class back and invoke it.
from io_scs_tools.operators.wm import ShowWarningMessage
bpy.utils.register_class(ShowWarningMessage)
bpy.ops.wm.show_warning_message('INVOKE_DEFAULT',
is_modal=True,
title="SCS Blender Tools Initialization Problem",
message="\n\n" + message.replace("\t ", "") + "\n\n", # some nasty formatting for better visibility
width=580, # this is minimal width to properly fit in given message
height=bpy.context.window.height if bpy.context and bpy.context.window else 200)
| [
"[email protected]"
] | |
7a9a6945f9f95e49f44ee1506c40205fac83cef9 | e8274f167fd219ef78241ba8ea89e5d5875ed794 | /cloud/nova/nova/db/sqlalchemy/migration.py | 31c40234a4065b76995aba1d11758b1ed73855fc | [
"Apache-2.0"
] | permissive | virt2x/folsomCloud | 02db0147f7e0f2ab0375faf4f36ca08272084152 | e6fd612dd77f35a72739cf4d4750e9795c0fa508 | refs/heads/master | 2021-01-01T17:26:28.405651 | 2013-10-17T12:36:04 | 2013-10-17T12:36:04 | 13,647,787 | 0 | 1 | null | 2020-07-24T08:25:22 | 2013-10-17T12:10:24 | Python | UTF-8 | Python | false | false | 3,873 | py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import distutils.version as dist_version
import os
from nova.db import migration
from nova.db.sqlalchemy.session import get_engine
from nova import exception
from nova import flags
from nova.openstack.common import log as logging
import migrate
from migrate.versioning import util as migrate_util
import sqlalchemy
LOG = logging.getLogger(__name__)
@migrate_util.decorator
def patched_with_engine(f, *a, **kw):
url = a[0]
engine = migrate_util.construct_engine(url, **kw)
try:
kw['engine'] = engine
return f(*a, **kw)
finally:
if isinstance(engine, migrate_util.Engine) and engine is not url:
migrate_util.log.debug('Disposing SQLAlchemy engine %s', engine)
engine.dispose()
# TODO(jkoelker) When migrate 0.7.3 is released and nova depends
# on that version or higher, this can be removed
MIN_PKG_VERSION = dist_version.StrictVersion('0.7.3')
if (not hasattr(migrate, '__version__') or
dist_version.StrictVersion(migrate.__version__) < MIN_PKG_VERSION):
migrate_util.with_engine = patched_with_engine
# NOTE(jkoelker) Delay importing migrate until we are patched
from migrate import exceptions as versioning_exceptions
from migrate.versioning import api as versioning_api
from migrate.versioning.repository import Repository
_REPOSITORY = None
def db_sync(version=None):
if version is not None:
try:
version = int(version)
except ValueError:
raise exception.NovaException(_("version should be an integer"))
current_version = db_version()
repository = _find_migrate_repo()
if version is None or version > current_version:
return versioning_api.upgrade(get_engine(), repository, version)
else:
return versioning_api.downgrade(get_engine(), repository,
version)
def db_version():
repository = _find_migrate_repo()
try:
return versioning_api.db_version(get_engine(), repository)
except versioning_exceptions.DatabaseNotControlledError:
meta = sqlalchemy.MetaData()
engine = get_engine()
meta.reflect(bind=engine)
tables = meta.tables
if len(tables) == 0:
db_version_control(migration.INIT_VERSION)
return versioning_api.db_version(get_engine(), repository)
else:
# Some pre-Essex DB's may not be version controlled.
# Require them to upgrade using Essex first.
raise exception.NovaException(
_("Upgrade DB using Essex release first."))
def db_version_control(version=None):
repository = _find_migrate_repo()
versioning_api.version_control(get_engine(), repository, version)
return version
def _find_migrate_repo():
"""Get the path for the migrate repository."""
global _REPOSITORY
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
assert os.path.exists(path)
if _REPOSITORY is None:
_REPOSITORY = Repository(path)
return _REPOSITORY
| [
"[email protected]"
] | |
adb1de6d94b004b03c0ea92ee70b9e9286246180 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/HH3C-IP-ADDRESS-MIB.py | 16d5881816e772866058d16066514c3e11c3f183 | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 9,982 | py | #
# PySNMP MIB module HH3C-IP-ADDRESS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HH3C-IP-ADDRESS-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:14:33 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint")
hh3cCommon, = mibBuilder.importSymbols("HH3C-OID-MIB", "hh3cCommon")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
InetAddressType, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddress")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
NotificationType, TimeTicks, Bits, Gauge32, ObjectIdentity, ModuleIdentity, Counter64, MibIdentifier, Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn, Unsigned32, Integer32, IpAddress, iso = mibBuilder.importSymbols("SNMPv2-SMI", "NotificationType", "TimeTicks", "Bits", "Gauge32", "ObjectIdentity", "ModuleIdentity", "Counter64", "MibIdentifier", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Unsigned32", "Integer32", "IpAddress", "iso")
RowStatus, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "DisplayString", "TextualConvention")
hh3cIpAddrMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 25506, 2, 67))
hh3cIpAddrMIB.setRevisions(('2005-11-22 00:00',))
if mibBuilder.loadTexts: hh3cIpAddrMIB.setLastUpdated('200511220000Z')
if mibBuilder.loadTexts: hh3cIpAddrMIB.setOrganization('Hangzhou H3C Tech. Co., Ltd.')
hh3cIpAddressObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1))
hh3cIpAddressConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1))
hh3cIpAddrSetTable = MibTable((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 1), )
if mibBuilder.loadTexts: hh3cIpAddrSetTable.setStatus('current')
hh3cIpAddrSetEntry = MibTableRow((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 1, 1), ).setIndexNames((0, "HH3C-IP-ADDRESS-MIB", "hh3cIpAddrSetIfIndex"), (0, "HH3C-IP-ADDRESS-MIB", "hh3cIpAddrSetAddrType"), (0, "HH3C-IP-ADDRESS-MIB", "hh3cIpAddrSetAddr"))
if mibBuilder.loadTexts: hh3cIpAddrSetEntry.setStatus('current')
hh3cIpAddrSetIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: hh3cIpAddrSetIfIndex.setStatus('current')
hh3cIpAddrSetAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 1, 1, 2), InetAddressType())
if mibBuilder.loadTexts: hh3cIpAddrSetAddrType.setStatus('current')
hh3cIpAddrSetAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 1, 1, 3), InetAddress())
if mibBuilder.loadTexts: hh3cIpAddrSetAddr.setStatus('current')
hh3cIpAddrSetMask = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 1, 1, 4), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hh3cIpAddrSetMask.setStatus('current')
hh3cIpAddrSetSourceType = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("assignedIp", 1))).clone('assignedIp')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hh3cIpAddrSetSourceType.setStatus('current')
hh3cIpAddrSetCatalog = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("primary", 1), ("sub", 2))).clone('primary')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hh3cIpAddrSetCatalog.setStatus('current')
hh3cIpAddrSetRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 1, 1, 7), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hh3cIpAddrSetRowStatus.setStatus('current')
hh3cIpAddrReadTable = MibTable((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 2), )
if mibBuilder.loadTexts: hh3cIpAddrReadTable.setStatus('current')
hh3cIpAddrReadEntry = MibTableRow((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 2, 1), ).setIndexNames((0, "HH3C-IP-ADDRESS-MIB", "hh3cIpAddrReadIfIndex"), (0, "HH3C-IP-ADDRESS-MIB", "hh3cIpAddrReadAddrType"), (0, "HH3C-IP-ADDRESS-MIB", "hh3cIpAddrReadAddr"))
if mibBuilder.loadTexts: hh3cIpAddrReadEntry.setStatus('current')
hh3cIpAddrReadIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: hh3cIpAddrReadIfIndex.setStatus('current')
hh3cIpAddrReadAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 2, 1, 2), InetAddressType())
if mibBuilder.loadTexts: hh3cIpAddrReadAddrType.setStatus('current')
hh3cIpAddrReadAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 2, 1, 3), InetAddress())
if mibBuilder.loadTexts: hh3cIpAddrReadAddr.setStatus('current')
hh3cIpAddrReadMask = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 2, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hh3cIpAddrReadMask.setStatus('current')
hh3cIpAddrReadSourceType = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("assignedIp", 1), ("cluster", 2), ("dhcp", 3), ("bootp", 4), ("negotiate", 5), ("unnumbered", 6), ("vrrp", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hh3cIpAddrReadSourceType.setStatus('current')
hh3cIpAddrReadCatalog = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("primary", 1), ("sub", 2), ("cluster", 3), ("vrrp", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hh3cIpAddrReadCatalog.setStatus('current')
hh3cIpv4AddrTable = MibTable((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 3), )
if mibBuilder.loadTexts: hh3cIpv4AddrTable.setStatus('current')
hh3cIpv4AddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 3, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: hh3cIpv4AddrEntry.setStatus('current')
hh3cIpv4AddrAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 3, 1, 1), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hh3cIpv4AddrAddr.setStatus('current')
hh3cIpv4AddrMask = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 3, 1, 2), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hh3cIpv4AddrMask.setStatus('current')
hh3cIpv4AddrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 25506, 2, 67, 1, 1, 3, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hh3cIpv4AddrRowStatus.setStatus('current')
hh3cIpAddrNotify = MibIdentifier((1, 3, 6, 1, 4, 1, 25506, 2, 67, 2))
hh3cIpAddrNotifyScalarObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 25506, 2, 67, 2, 1))
hh3cIpAddrNotifyIfIndex = MibScalar((1, 3, 6, 1, 4, 1, 25506, 2, 67, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hh3cIpAddrNotifyIfIndex.setStatus('current')
hh3cIpAddrOldIpAddress = MibScalar((1, 3, 6, 1, 4, 1, 25506, 2, 67, 2, 1, 2), InetAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hh3cIpAddrOldIpAddress.setStatus('current')
hh3cIpAddrNewIpAddress = MibScalar((1, 3, 6, 1, 4, 1, 25506, 2, 67, 2, 1, 3), InetAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hh3cIpAddrNewIpAddress.setStatus('current')
hh3cIpAddrNotifyObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 25506, 2, 67, 2, 2))
hh3cIpAddrNotifyObjectsPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 25506, 2, 67, 2, 2, 0))
hh3cIpAddressChangeNotify = NotificationType((1, 3, 6, 1, 4, 1, 25506, 2, 67, 2, 2, 0, 1)).setObjects(("HH3C-IP-ADDRESS-MIB", "hh3cIpAddrNotifyIfIndex"), ("HH3C-IP-ADDRESS-MIB", "hh3cIpAddrOldIpAddress"), ("HH3C-IP-ADDRESS-MIB", "hh3cIpAddrNewIpAddress"))
if mibBuilder.loadTexts: hh3cIpAddressChangeNotify.setStatus('current')
mibBuilder.exportSymbols("HH3C-IP-ADDRESS-MIB", hh3cIpAddrReadAddr=hh3cIpAddrReadAddr, hh3cIpAddrOldIpAddress=hh3cIpAddrOldIpAddress, hh3cIpAddrSetAddr=hh3cIpAddrSetAddr, hh3cIpAddrSetAddrType=hh3cIpAddrSetAddrType, hh3cIpAddrSetSourceType=hh3cIpAddrSetSourceType, hh3cIpAddrReadAddrType=hh3cIpAddrReadAddrType, hh3cIpAddressChangeNotify=hh3cIpAddressChangeNotify, hh3cIpAddrMIB=hh3cIpAddrMIB, hh3cIpAddrSetTable=hh3cIpAddrSetTable, hh3cIpv4AddrRowStatus=hh3cIpv4AddrRowStatus, hh3cIpAddrReadSourceType=hh3cIpAddrReadSourceType, hh3cIpAddrNewIpAddress=hh3cIpAddrNewIpAddress, hh3cIpAddrSetEntry=hh3cIpAddrSetEntry, hh3cIpAddrNotify=hh3cIpAddrNotify, hh3cIpAddrReadEntry=hh3cIpAddrReadEntry, hh3cIpv4AddrMask=hh3cIpv4AddrMask, hh3cIpAddrNotifyObjectsPrefix=hh3cIpAddrNotifyObjectsPrefix, hh3cIpv4AddrTable=hh3cIpv4AddrTable, hh3cIpAddressObjects=hh3cIpAddressObjects, hh3cIpv4AddrAddr=hh3cIpv4AddrAddr, hh3cIpAddrNotifyIfIndex=hh3cIpAddrNotifyIfIndex, hh3cIpAddrSetCatalog=hh3cIpAddrSetCatalog, hh3cIpAddressConfig=hh3cIpAddressConfig, PYSNMP_MODULE_ID=hh3cIpAddrMIB, hh3cIpAddrSetRowStatus=hh3cIpAddrSetRowStatus, hh3cIpAddrReadCatalog=hh3cIpAddrReadCatalog, hh3cIpAddrNotifyObjects=hh3cIpAddrNotifyObjects, hh3cIpAddrReadTable=hh3cIpAddrReadTable, hh3cIpAddrNotifyScalarObjects=hh3cIpAddrNotifyScalarObjects, hh3cIpAddrSetMask=hh3cIpAddrSetMask, hh3cIpAddrSetIfIndex=hh3cIpAddrSetIfIndex, hh3cIpAddrReadIfIndex=hh3cIpAddrReadIfIndex, hh3cIpv4AddrEntry=hh3cIpv4AddrEntry, hh3cIpAddrReadMask=hh3cIpAddrReadMask)
| [
"[email protected]"
] | |
533cc85f23f1baa1d29a5f2efd413bc56d46819a | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/bob/4b1ce8efbadf4ae08434623c951e8a03.py | 23c859cb84bcc66a0a8d02897ece826b32d14a4c | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 589 | py | def hey(question):
"""
Inputs:
question (string) - A question to ask Bob.
Outputs:
answer (string) - Bob's response to the question.
Bob answers 'Sure.' if you ask him a question.
He answers 'Whoa, chill out!' if you yell at him.
He says 'Fine. Be that way!' if you address him without actually saying anything.
He answers 'Whatever.' to anything else.
"""
if (question.strip() == ''):
answer = 'Fine. Be that way!'
elif (question.isupper()):
answer = 'Whoa, chill out!'
elif (question[-1] == '?'):
answer = 'Sure.'
else:
answer = 'Whatever.'
return answer
| [
"[email protected]"
] | |
2e218c49d71cdead57bbbf5de7b87264ce4b5167 | 58cea667b91271f8d61a70b3eff93ef56814d99a | /reservations/urls.py | c8433daa084bbe716861fd4932cf700c5f47ed5d | [] | no_license | BKLemontea/AirBnB_CloneCoding | 06e900d0c2f1735ebc539172569205956bab7b52 | b88f9bb6d1bca6a10cb48206872c0a392ce436bf | refs/heads/master | 2022-12-11T00:17:47.066796 | 2020-05-04T12:56:28 | 2020-05-04T12:56:28 | 235,279,197 | 0 | 0 | null | 2022-12-10T20:47:58 | 2020-01-21T07:21:38 | Python | UTF-8 | Python | false | false | 333 | py | from django.urls import path
from . import views
app_name = "reservations"
urlpatterns = [
path("create/<int:room>/<int:year>-<int:month>-<int:day>/", views.create, name="create"),
path("<int:pk>/", views.ReservationDetail.as_view(), name="detail"),
path("<int:pk>/<str:verb>/", views.edit_reservation, name="edit"),
]
| [
"[email protected]"
] | |
0dd093c2129194b810bfb2ebb4684574b589f6ef | f3df69d552c0749d054f77a1f739bb13c70b23c0 | /Boundary Detection and Object Matching/p6.py | e4864c8a0f2b6bed08c7671778204230ac45e152 | [] | no_license | thomas-liao/Computer-Vision-Projects | a13e5b1122f7089c5ee49c60d87f9db7b0887aa7 | 66a6cf269dbcad23831f15dfb5695cc1c1f2084b | refs/heads/master | 2021-10-24T01:14:03.387306 | 2019-03-21T06:57:27 | 2019-03-21T06:57:27 | 109,339,021 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,601 | py | import math
import numpy as np
def p6(edge_image, edge_thresh): # return [edge_thresh_image, hough_image]
height = len(edge_image)
width = len(edge_image[0])
ret_img = [[0 for x in range(width)] for y in range(height)]
rou_points = 500
theta_points = 500
for i in range(height):
for j in range(width):
if edge_image[i][j] < edge_thresh:
continue
else:
ret_img[i][j] = 255
rou_max = int(math.ceil(math.sqrt(width**2 + height**2)))
# rou: from -rou_max to rou_max
# thetas: form -pi/2 to pi/2
rou_vector = np.linspace(-rou_max, rou_max, rou_points)
thetas = np.linspace(-math.pi / 2, math.pi / 2, theta_points)
# parameter space
parameter_space = [[0 for x in range(len(thetas))] for y in range(2*rou_max)]
for i in range(len(ret_img)):
for j in range(len(ret_img[0])):
if ret_img[i][j] == 0:
continue
for k in range(len(thetas)):
rou = int(round(j*math.cos(thetas[k]) + i*math.sin(thetas[k])) + rou_max)
parameter_space[rou][k] += 1
# scale parameter space to range 0 ~ 255
max_vote = 0
m = len(parameter_space)
n = len(parameter_space[0])
for i in range(m):
for j in range(n):
k = parameter_space[i][j]
if k > max_vote:
max_vote = k
for i in range(m):
for j in range(n):
parameter_space[i][j] = int(math.floor(255.0 * parameter_space[i][j] / max_vote))
return ret_img, parameter_space
| [
"[email protected]"
] | |
4f27d04ab2fceaec0820eec914370a8e5c241bbf | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /large_case_or_government/small_time.py | b02f0a7c20c3165a30a6c3a143c2d12f005cf4e1 | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 215 | py |
#! /usr/bin/env python
def place(str_arg):
year(str_arg)
print('know_person_by_different_point')
def year(str_arg):
print(str_arg)
if __name__ == '__main__':
place('go_long_place_into_high_eye')
| [
"[email protected]"
] | |
d34e57dc3fad50b488fcbad29d3b477d302341fd | e2348943fcb5ea22137187c23cd39f7138bed725 | /practise/form_demo/forms.py | 21eee2a72f75d43acb27ad3ce278652a542ef653 | [] | no_license | Gaurav41/Django-Practise | 6bf8d7c58ad5624990aa8d707f72e504f778dbfa | 36754e54d0ae2be86855620916cc9262e382e597 | refs/heads/master | 2023-07-26T06:51:38.002692 | 2021-09-07T14:19:29 | 2021-09-07T14:19:29 | 403,638,583 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,486 | py | from django import forms
import django
from django.db.models.base import Model
from django.forms import fields, widgets
from django.forms.fields import BooleanField, CharField
from django.forms import ModelForm
from .models import Customer
# https://docs.djangoproject.com/en/3.2/topics/forms/
class NameForm(forms.Form):
your_name = forms.CharField(label='Your name', max_length=10)
your_age = forms.IntegerField(label='Your age')
message = forms.CharField(widget=forms.Textarea)
email = forms.EmailField()
booleanField = forms.BooleanField(required=False)
class CustomerForm(ModelForm):
# Extra Validator if any
password= CharField(min_length=8,required=False)
class Meta:
model = Customer
fields = '__all__'
labels = {'first_name':'Enter yout first name','last_name':'Enter yout last name'}
error_messages ={'first_name':{'required':'Name required'},
'last_name':{'required':'Last name required'},
'password':{'required':'Password is required'}
}
widgets={'first_name':forms.TextInput,
'last_name':forms.TextInput(attrs={'id':'ls_name','class':'myclass',
'placeholder':'Write your last name here',}),
'password':forms.PasswordInput()
}
| [
"[email protected]"
] | |
8a173bcb6121ee95083280933b1dad376b3685f5 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_cockatoo.py | 44cd2b849cd52c15c8be04daf76e0becb8196798 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 381 | py |
#calss header
class _COCKATOO():
def __init__(self,):
self.name = "COCKATOO"
self.definitions = [u'an Australian bird with a decorative crest (= growth of feathers) on its head and a powerful beak']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
] | |
3d629e559a04bc09cbc9a4cf103d4b96bee693c3 | ba4f68fb01aa32970dadea67cc8d039b4c0f6d9e | /python/armstrong_numbers.py | a4ac5f11afc0ef9619fc0db4c3370fac8ba44b30 | [] | no_license | campbellmarianna/Code-Challenges | 12a7808563e36b1a2964f10ae64618c0be41b6c0 | 12e21c51665d81cf1ea94c2005f4f9d3584b66ec | refs/heads/master | 2021-08-03T23:23:58.297437 | 2020-05-15T07:13:46 | 2020-05-15T07:13:46 | 168,234,828 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 914 | py | # Exercism Problem: Armstrong Numbers
# Find out if a number is the sum of its own digits each raised to the power of
# number of digits
'''
Technical Interview Problem Solving Strategy
1. Generate reasonable test inputs
2. Understand the problem = Solve it!
a. Simplify the problem if needed
3. Find a pattern in your solution
4. Make a plan - Write pseudocode
5. Follow a plan - Write real code
6. Check your work - Test your code
'''
def is_armstrong_number(number): # 153
sum = 0
result = 0
string_version = str(number)
power = len(string_version)
# get the sum
for i in range(0, power):
individual_num = int(string_version[i])
result = individual_num**power
sum += result
# check if the sum equals the given number
if sum == number:
return True
else:
return False
if __name__ == '__main__':
print(is_armstrong_number(153))
| [
"[email protected]"
] | |
753ec27efafeb634d3084d7139d39eb30acf94ec | bd696223aaf5404987df11832b4c17c916b9690f | /py_sample/django_rest_tutorial/snippets/permissions.py | 6c42c1910d183fa5df19ddac3d74f8bee85a6473 | [] | no_license | wararaki718/scrapbox3 | 000a285477f25c1e8a4b6017b6ad06c76f173342 | 9be5dc879a33a1988d9f6611307c499eec125dc2 | refs/heads/master | 2023-06-16T08:46:32.879231 | 2021-07-17T14:12:54 | 2021-07-17T14:12:54 | 280,590,887 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | py | from rest_framework import permissions
class IsOwnerOrReadOnly(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
if request.method in permissions.SAFE_METHODS:
return True
return obj.owner == request.user
| [
"[email protected]"
] | |
3bb12ba94fcbec290decd82b09f9fec9e679e487 | 6cc37dfc44880f57823bb9523ea5f8206d5e3f22 | /python_OOP/labs_and_homeworks/03_attributes_and_methods_exercise/gym_04/project/customer.py | ff5ecd94c9f32fe85c41758770709746349e3848 | [] | no_license | dimitar-daskalov/SoftUni-Courses | 70d265936fd86712a7bfe0586ec6ebd1c7384f77 | 2054bc58ffb5f41ed86f5d7c98729b101c3b1368 | refs/heads/main | 2023-05-31T06:44:35.498399 | 2021-07-11T10:16:08 | 2021-07-11T10:16:08 | 322,896,365 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 489 | py | class Customer:
id_count = 0
def __init__(self, name: str, address: str, email: str):
self.name = name
self.address = address
self.email = email
self.id = self.get_next_id()
@staticmethod
def get_next_id():
Customer.id_count += 1
next_id = Customer.id_count
return next_id
def __repr__(self):
return f"Customer <{self.id}> {self.name}; " \
f"Address: {self.address}; Email: {self.email}" | [
"[email protected]"
] | |
aa54ea0160cb8d5f0aa9751e2ec10309491e862a | f933e6a2c30003df771f902924880e5f531ba57f | /src/deltapy/communication/pyro/__init__.py | 0af9dbc76e1753625c3cdf247208b8a21b8e7e44 | [] | no_license | hamed1361554/sportmagazine-server | 861e624912ffc2e623b027e693111d4bcb10a709 | a2ee333d2a4fe9821f3d24ee15d458f226ffcde5 | refs/heads/master | 2020-04-17T13:57:00.624899 | 2017-07-25T03:30:24 | 2017-07-25T03:30:24 | 67,815,549 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 436 | py | '''
Created on Sep 15, 2009
@author: Abi.Mohammadi & Majid.Vesal
'''
from deltapy.packaging.package import Package
import deltapy.communication.services as communication
from deltapy.communication.pyro.factory import PyroFactory
class PyroPackage(Package):
def load(self):
Package.load(self)
communication.register_factory('pyro', PyroFactory())
def unload(self):
Package.unload(self) | [
"[email protected]"
] | |
962b0aaba50613ca402feaf5fa33193831e19e07 | 8f2f83bc1381d4ce7fc968aec72fa400aae4155d | /api/network/requestmodels/__init__.py | 4a07f17722a06027b304b663cfb2a276909b7f29 | [
"MIT"
] | permissive | nifrali/pyStratis | c855fb33be77064c9a741255e324003319a4789f | b1a80bf155b7941e9ef8fc2ea93fa1b08a0c4366 | refs/heads/master | 2023-06-20T16:02:30.863589 | 2021-07-01T19:24:18 | 2021-07-01T19:24:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 149 | py | from .clearbannedrequest import ClearBannedRequest
from .disconnectpeerrequest import DisconnectPeerRequest
from .setbanrequest import SetBanRequest
| [
"[email protected]"
] | |
e62e850eb5cc7ca5d2fff2ccb3c73f00f13a4362 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/cirq_new/cirq_program/startCirq_Class575.py | d1049a69516cdaa111be622c5a515f85597de048 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,102 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=4
# total number=18
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.Y.on(input_qubit[2])) # number=13
c.append(cirq.H.on(input_qubit[1])) # number=7
c.append(cirq.H.on(input_qubit[2])) # number=3
c.append(cirq.H.on(input_qubit[3])) # number=4
c.append(cirq.H.on(input_qubit[0])) # number=10
c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=11
c.append(cirq.H.on(input_qubit[0])) # number=12
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=6
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=8
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=9
c.append(cirq.Y.on(input_qubit[1])) # number=14
c.append(cirq.Y.on(input_qubit[1])) # number=15
c.append(cirq.X.on(input_qubit[2])) # number=16
c.append(cirq.X.on(input_qubit[2])) # number=17
# circuit end
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2820
info = cirq.final_state_vector(circuit)
qubits = round(log2(len(info)))
frequencies = {
np.binary_repr(i, qubits): round((info[i]*(info[i].conjugate())).real,3)
for i in range(2 ** qubits)
}
writefile = open("../data/startCirq_Class575.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | [
"[email protected]"
] | |
c37692c9ddac4769c86a3c1e7eb5200ea316963f | fbfcdc0bf7ea2b85f962504488b8f4c8d8504683 | /recruiter_15575/wsgi.py | 9a9ab76df567e542412a53d35f7a2a2a4dc2df1f | [] | no_license | crowdbotics-apps/recruiter-15575 | 58a82d1f1aad4f13c4044e06f32903afcd864a11 | 440fb9165fb0165246601d4733618bc800565ba0 | refs/heads/master | 2022-12-14T08:19:32.672896 | 2020-04-07T23:12:22 | 2020-04-07T23:12:22 | 253,933,271 | 0 | 0 | null | 2021-06-12T06:01:42 | 2020-04-07T23:12:06 | Python | UTF-8 | Python | false | false | 407 | py | """
WSGI config for recruiter_15575 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'recruiter_15575.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
03fada5c669da9504b6aa423e88fff759e2862bc | b17f269a24e98f6a28c54cf49569c305b4f1dac3 | /src/nix_ray/__init__.py | 50c69155f0044cf26776bd7a17a94faf26e40df9 | [] | no_license | qknight/nix-ray | 981d41e6706b455e3e8c1c9572e595a654833617 | 08b0ca2d139abbf700c04483f2ec3046aa284a49 | refs/heads/master | 2016-09-10T17:35:25.964041 | 2013-07-27T10:21:55 | 2013-07-27T10:23:01 | 11,617,136 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 809 | py | """xin - prototypical unified cli for nix
"""
import tpv.cli
import tpv.pkg_resources
from plumbum import FG
from plumbum.cmd import ls, grep, wc, git
class NixRay(tpv.cli.Command):
"""nix-ray
Assistant for debugging the phases of a nix expression
"""
VERSION = 0
entry_point_group="nix_ray.commands"
verbose = tpv.cli.Flag(["v", "verbose"],
help="If given, I will be very talkative")
def __call__(self, filename=None):
self.help()
# chain = ls['-la'] | grep['a'] | wc
# print(chain)
# chain & FG
# if self.verbose:
# print "Yadda " * 200
@tpv.cli.switch(['f', 'foo'], int)
def foo(self, bar):
"""foomagic
"""
self.bar = bar
print(bar)
app = NixRay.run
| [
"[email protected]"
] | |
800a07049a25c104919188f12cd07941255d2647 | 7e574c25506a7b4f68b873fa63d2d38f52c7c56e | /utils/correspondence_tools/correspondence_plotter.py | 64ec2758a8b598aec99938a40c9ba5d79121d7f1 | [] | no_license | vcowwy/SuperGlue_paddle | 9912e6dfda34dfc42cfb34d4e2de8eaed827ed15 | 4cd5280dbb228498e2b3c6df1f4621c320af47c7 | refs/heads/master | 2023-08-21T15:58:20.122712 | 2021-10-29T11:10:26 | 2021-10-29T11:10:26 | 420,937,959 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,466 | py | import matplotlib.image as mpimg
import matplotlib.pyplot as plt
from matplotlib.patches import Circle
def plot_correspondences(images, uv_a, uv_b, use_previous_plot=None,
circ_color='g', show=True):
if use_previous_plot is None:
fig, axes = plt.subplots(nrows=2, ncols=2)
else:
fig, axes = use_previous_plot[0], use_previous_plot[1]
fig.set_figheight(10)
fig.set_figwidth(15)
pixel_locs = [uv_a, uv_b, uv_a, uv_b]
axes = axes.flat[0:]
if use_previous_plot is not None:
axes = [axes[1], axes[3]]
images = [images[1], images[3]]
pixel_locs = [pixel_locs[1], pixel_locs[3]]
for ax, img, pixel_loc in zip(axes[0:], images, pixel_locs):
ax.set_aspect('equal')
if isinstance(pixel_loc[0], int) or isinstance(pixel_loc[0], float):
circ = Circle(pixel_loc, radius=10, facecolor=circ_color,
edgecolor='white', fill=True, linewidth=2.0, linestyle='solid')
ax.add_patch(circ)
else:
for x, y in zip(pixel_loc[0], pixel_loc[1]):
circ = Circle((x, y), radius=10, facecolor=circ_color,
edgecolor='white', fill=True, linewidth=2.0, linestyle=\
'solid')
ax.add_patch(circ)
ax.imshow(img)
if show:
plt.show()
return None
else:
return fig, axes
def plot_correspondences_from_dir(log_dir, img_a, img_b, uv_a, uv_b,
use_previous_plot=None, circ_color='g', show=True):
img1_filename = log_dir + '/images/' + img_a + '_rgb.png'
img2_filename = log_dir + '/images/' + img_b + '_rgb.png'
img1_depth_filename = log_dir + '/images/' + img_a + '_depth.png'
img2_depth_filename = log_dir + '/images/' + img_b + '_depth.png'
images = [img1_filename, img2_filename, img1_depth_filename,
img2_depth_filename]
images = [mpimg.imread(x) for x in images]
return plot_correspondences(images, uv_a, uv_b, use_previous_plot=\
use_previous_plot, circ_color=circ_color, show=show)
def plot_correspondences_direct(img_a_rgb, img_a_depth, img_b_rgb,
img_b_depth, uv_a, uv_b, use_previous_plot=None, circ_color='g', show=True
):
images = [img_a_rgb, img_b_rgb, img_a_depth, img_b_depth]
return plot_correspondences(images, uv_a, uv_b, use_previous_plot=\
use_previous_plot, circ_color=circ_color, show=show)
| [
"[email protected]"
] | |
7e3bfaabf0e7e46b0a25c2d41ce8a0a1e4281a74 | 925a067ff1473cf45ad8aa9cf99db4311a7799ed | /archive/get_data_modules_3.py | c9f61dd2cc18f30b1ac3542e9c449adb11015e7e | [] | no_license | ORNL-Fusion/Collector-Probes | fd7250738e797befa06fad487e9d2498b61436a5 | 16e15a0d3dcaa8a88da25aaf3ea126e9eb2a5f96 | refs/heads/master | 2022-09-03T01:02:39.520659 | 2022-08-28T13:28:53 | 2022-08-28T13:28:53 | 95,914,293 | 1 | 3 | null | 2019-11-14T15:00:39 | 2017-06-30T18:34:29 | Python | UTF-8 | Python | false | false | 7,404 | py | import openpyxl as xl
from MDSplus import *
import sys
# Used to get location of .scn files.
from Tkinter import Tk
from tkFileDialog import askopenfilename
def get_RBS(tree, letter_probes, shot):
# Grab the RBS data.
print "\nLoading RBS Excel file... This may take a minute."
rbs_file = xl.load_workbook("RBS_excel_file.xlsx", data_only=True)
print "RBS Excel file loaded."
rbs_probe_list = rbs_file.get_sheet_names()
# Remove unecessary sheets.
rbs_probe_list.remove('refs')
rbs_probe_list.remove('RCX')
rbs_probe_list.remove('SUMMARY')
rbs_probe_list.remove('Sheet6')
# Check if RBS data available for the selected probes.
for letter_probe in letter_probes:
tmp_name = letter_probe + 'U' + str(shot)
if (tmp_name not in rbs_probe_list):
print 'RBS data not available for ' + tmp_name + '.'
tmp_name = letter_probe + 'D' + str(shot)
if (tmp_name not in rbs_probe_list):
print 'RBS data not available for ' + tmp_name + '.'
# Collect data from Excel sheet and put them into a signal.
for letter_probe in letter_probes:
for u_or_d in ['U', 'D']:
name = letter_probe + u_or_d + str(shot)
# Pass through if there isn't RBS data.
if (name not in rbs_probe_list): continue
print "Assigning RBS data to " + name + " probe..."
# Grab the corresponding RBS sheet.
sheet = rbs_file.get_sheet_by_name(name)
# Fill in run data, microcoul, w_counts and w_areal density.
count = 0
for row in 'BCDEFGHIJKLMNOPQRSTUV':
count = count + 1
if count < 10:
count_str = '0' + str(count)
else:
count_str = str(count)
# Run data.
rbs_cells = sheet[row + '2': row + '513']
rbs_vals = []
for index in range(0,512):
rbs_vals.append(rbs_cells[index][0].value)
# If "NoneType" (i.e. blank cell), skip over.
if (rbs_vals[0] is None):
print "Column " + row + " blank."
continue
path = '\\DP_PROBES::TOP.' + letter_probe + '.' + letter_probe + u_or_d + '.RBS.RUN' + count_str + ':' + 'SIGNAL'
my_node = tree.getNode(path)
#sig_expr = Data.compile("BUILD_SIGNAL($VALUE, BUILD_WITH_UNITS($1,'COUNTS'), \
# BUILD_WITH_UNITS($2,'CHANNEL'))", rbs_vals, range(1,513))
#my_node.putData(sig_expr)
raw = Int32Array(rbs_vals)
raw = raw.setUnits('Counts')
dim = Int32Array(range(1,513))
dim = dim.setUnits('Channel')
sig = Signal('$VALUE', raw, dim)
my_node.putData(sig)
# W Counts data.
wCount = sheet[row + '515'].value
path = '\\DP_PROBES::TOP.' + letter_probe + '.' + letter_probe + u_or_d + '.RBS.RUN' + count_str + ':' + 'w_counts'
my_node = tree.getNode(path)
wCount = Int32(wCount)
wCount = wCount.setUnits('Counts')
my_node.putData(wCount)
# Microcoulomb data.
microcol = sheet[row + '516'].value
path = '\\DP_PROBES::TOP.' + letter_probe + '.' + letter_probe + u_or_d + '.RBS.RUN' + count_str + ':' + 'microcol'
my_node = tree.getNode(path)
my_node.putData(microcol)
# W Areal Density
w_areal = sheet[row + '517'].value
w_areal_error = sheet[row + '518'].value
path = '\\DP_PROBES::TOP.' + letter_probe + '.' + letter_probe + u_or_d + '.RBS.RUN' + count_str + ':' + 'w_areal'
my_node = tree.getNode(path)
w_areal = Float64(w_areal)
w_areal = w_areal.setUnits('W/cm^2')
w_areal_error = Float64(w_areal_error)
w_areal - w_areal.setError(w_areal_error)
#expr = Data.compile("BUILD_WITH_UNITS(BUILD_WITH_ERROR($1, $2), 'W/cm^2')", w_areal, w_areal_error)
my_node.putData(w_areal)
# Location
loc = sheet[row + '525'].value
path = '\\DP_PROBES::TOP.' + letter_probe + '.' + letter_probe + u_or_d + '.RBS.RUN' + count_str + ':' + 'loc'
my_node = tree.getNode(path)
loc = Int32(loc)
loc = loc.setUnits('mm')
my_node.putData(loc)
def get_ICPMS(tree, letter_probes, shot):
# Ask user which probe data is being inserted for.
another = True
while (another == True):
while (True):
print "Which probe is ICP-MS data being added for? Please select from the following: \nAD, AU, BD, BU, CD, CU"
print "Enter 'q' to quit."
probe = raw_input("--> ")
if (probe == 'q'): break
elif probe not in ['AD', 'AU', 'BD', 'BU', 'CD', 'CU']:
print "Error: Incorrect entry. Please try again."
else: break
# Get the location of the ICPMS measurements for the samples.
if (probe == 'q'): break
locations = input("Enter in measured locations, separated by commas: ")
concentrations = input("Enter in concentrations used for this probe, separated by commas: ")
# Get the .scn files for each ppb at each location.
conc_files_all = []
for location in locations:
conc_files = []
for conc in concentrations:
print "Select .scn file for " + str(conc) + " ppb at " + str(location) + " mm..."
Tk().withdraw()
filename = askopenfilename()
conc_files.append(filename)
conc_files_all.append(conc_files)
# Get the standard used for this probe.
standards = []
print "Select the five standard .scn files used."
for value in range(1,6):
print "Standard " + str(value) + "..."
standards.append(askopenfilename())
# Start filling in the tree. Starting with the locations.
for number in range(1, len(locations)+1):
print "Adding data for location " + str(location[number-1])
path = '\\DP_PROBES::TOP.' + probe[0] + '.' + probe + '.ICPMS.LOC' + str(number) + ':POSITION'
my_node = tree.getNode(path)
my_node.putData(locations[number-1])
# Then fill in concentration values.
for sample in range(1, len(concentrations)+1):
path = '\\DP_PROBES::TOP.' + probe[0] + '.' + probe + '.ICPMS.LOC' + str(number) + '.SPECTRUM' + str(sample) + ':CONC'
my_node = tree.getNode(path)
my_node.putData(concentrations[sample-1])
# Then the .scn files.
for m in conc_files_all:
for n in m:
print "Adding file: " + str(n)
with open(n) as f:
content = f.readlines()
content = [x.strip() for x in content]
counts = [float(x) for x in content[4:len(content)-2]]
path = '\\DP_PROBES::TOP.' + probe[0] + '.' + probe + '.ICPMS.LOC' + str(number) + '.SPECTRUM' + str(sample) + ':DATA'
my_node = tree.getNode(path)
sig_expr = Data.compile("BUILD_SIGNAL($VALUE, BUILD_WITH_UNITS($1,'COUNTS'), \
BUILD_WITH_UNITS($2,'CHANNEL'))", counts, range(0,len(counts)))
my_node.putData(sig_expr)
# Then the standard .scn files.
count = 0
for m in standards:
count = count + 1
print "Adding standard: " + str(m)
with open(m) as f:
content = f.readlines()
content = [x.strip() for x in content]
counts = [float(x) for x in content[4:len(content)-2]]
path = '\\DP_PROBES::TOP.' + probe[0] + '.' + probe + '.ICPMS.LOC' + str(number) + '.STANDARDS.STANDARD' + str(count) + ':DATA'
my_node = tree.getNode(path)
sig_expr = Data.compile("BUILD_SIGNAL($VALUE, BUILD_WITH_UNITS($1,'COUNTS'), \
BUILD_WITH_UNITS($2,'CHANNEL'))", counts, range(0,len(counts)))
my_node.putData(sig_expr)
print ""
# Ask if user wants to select data for another probe.
print "Insert data for another probe (y/n)?"
answer = None
while (answer not in ['y', 'n']):
answer = raw_input("--> ")
if (answer == 'y'):
another = True
break
elif (answer == 'n'):
another = False
break
else:
print "Please answer (y/n)."
| [
"[email protected]"
] | |
d5a97378f8e2eb8a234eefd79c834da56ef3a6b1 | 79ee8affe61807c0d732418b74e8b21447e64342 | /todo_list/migrations/0001_initial.py | 342d94714a86342deab7d3b9b3fccad667a5f783 | [] | no_license | ayushkr07/todo_list_app | 59e6b2ce804927bf65d910408f8c9e53052bc8d8 | 07fd51bbfd395e0a08126578046687b74c894dc4 | refs/heads/master | 2020-04-28T10:07:20.748492 | 2019-03-18T05:08:18 | 2019-03-18T05:08:18 | 175,191,927 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 551 | py | # Generated by Django 2.1.7 on 2019-03-13 17:49
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='List',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('item', models.CharField(max_length=200)),
('completed', models.BooleanField(default=False)),
],
),
]
| [
"[email protected]"
] | |
77a55aceafc61a2b332a9c7a43ed54432760f692 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnundisguisedli.py | 1d2ffad0e1f3ca7c736fb64ed10fc23340bdbd4d | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 43 | py | ii = [('WadeJEB.py', 1), ('MackCNH.py', 1)] | [
"[email protected]"
] | |
fa648d0af3f80e130d85710fa227f2d494b2f82d | 6206ad73052b5ff1b6690c225f000f9c31aa4ff7 | /Code/Checking Existence of Edge Length Limited Paths.py | dbffe95d8d1cced22bb8ba46105d264463746769 | [] | no_license | mws19901118/Leetcode | 7f9e3694cb8f0937d82b6e1e12127ce5073f4df0 | 752ac00bea40be1e3794d80aa7b2be58c0a548f6 | refs/heads/master | 2023-09-01T10:35:52.389899 | 2023-09-01T03:37:22 | 2023-09-01T03:37:22 | 21,467,719 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,517 | py | class UnionFind: #Union find.
def __init__(self, x: int):
self.label = x
self.parent = []
def find(self) -> 'UnionFind':
if not self.parent:
return self
self.parent = self.parent.find()
return self.parent
def union(self, uf: 'UnionFind') -> None:
if self.find().label != uf.find().label:
self.find().parent = uf.find()
class Solution:
def distanceLimitedPathsExist(self, n: int, edgeList: List[List[int]], queries: List[List[int]]) -> List[bool]:
sortedQuries = sorted([(x, y, q, i) for i, (x, y, q) in enumerate(queries)], key = lambda x: x[2]) #Sort queires by limit in query in ascending order and keep the original order.
edgeList.sort(key = lambda x: x[2]) #Sort edges by weight in asceding order.
ufs = [UnionFind(i) for i in range(n)] #Create a union find for each node.
result = [False] * len(queries) #Initialize result.
index = 0 #Initialize the pointer traversing edge list.
for x, y, q, i in sortedQuries: #Traverse sortedQuries.
while index < len(edgeList) and edgeList[index][2] < q: #Traverse edge list while current edge has smaller weight than the query limit.
ufs[edgeList[index][0]].union(ufs[edgeList[index][1]]) #Union the 2 nodes of the edge.
index += 1
result[i] = ufs[x].find().label == ufs[y].find().label #There is a path if parent of x and parent of y has same label, and all the edges in path are smaller than query limit.
return result #Return result.
| [
"[email protected]"
] | |
f9aa8e3db126e5369da93cb33fa60275690f08dd | 91cff2fb42de0f20d2acebf22266bfe185aba9f1 | /build/pyrobot/robots/LoCoBot/locobot_navigation/base_navigation/catkin_generated/pkg.develspace.context.pc.py | 0531b4bd8bb7d38a5866c291b0d3923dd1afdba1 | [] | no_license | Tiga002/PyRobot_V2 | b98b47a6c2015715c150e3df6617f22783472350 | a72373cee6cff1baab7e248b4b5ea5811a666cec | refs/heads/master | 2023-01-05T07:08:29.072177 | 2019-12-20T10:15:36 | 2019-12-20T10:15:36 | 229,241,878 | 0 | 0 | null | 2023-01-04T13:43:05 | 2019-12-20T10:16:53 | Common Lisp | UTF-8 | Python | false | false | 391 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "move_base".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "base_navigation"
PROJECT_SPACE_DIR = "/home/developer/low_cost_ws/devel"
PROJECT_VERSION = "0.0.0"
| [
"[email protected]"
] | |
66977c45cda0eb5e23cabfc7c44c6ef0bfb39a3f | 819160b5c8992aa53ad3446010eac85bdaacb814 | /trident/models/tensorflow_resnet.py | c3e1f64bcb6d3c11a793e5ab5f3976aa39c1aab5 | [
"MIT"
] | permissive | sohoscripts/trident | 5e1d7cd9fabc34830a312743087b6b5f262bcd27 | 700fc9638d4fd7915f655546bfbe3092aaf695d1 | refs/heads/master | 2023-01-27T12:18:00.511840 | 2020-11-30T00:24:52 | 2020-11-30T00:24:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,406 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import inspect
import itertools
import math
import os
from functools import reduce
from functools import wraps
from itertools import repeat
import tensorflow as tf
from trident.backend.common import *
from trident.backend.tensorflow_backend import *
from trident.backend.tensorflow_ops import *
from trident.data.image_common import *
from trident.data.utils import download_model_from_google_drive,download_file_from_google_drive
from trident.layers.tensorflow_activations import get_activation, Identity, Relu
from trident.layers.tensorflow_blocks import *
from trident.layers.tensorflow_layers import *
from trident.layers.tensorflow_normalizations import get_normalization, BatchNorm
from trident.layers.tensorflow_pooling import GlobalAvgPool2d, MaxPool2d
from trident.optims.tensorflow_trainer import *
__all__ = ['basic_block','bottleneck', 'ResNet','ResNet50','ResNet101','ResNet152']
_session = get_session()
_epsilon=_session.epsilon
_trident_dir=_session.trident_dir
dirname = os.path.join(_trident_dir, 'models')
if not os.path.exists(dirname):
try:
os.makedirs(dirname)
except OSError:
# Except permission denied and potential race conditions
# in multi-threaded environments.
pass
def basic_block(num_filters=64,base_width=64,strides=1,expansion = 4,conv_shortcut=False,use_bias=False,name=None):
shortcut = Identity()
if strides>1 or conv_shortcut is True:
shortcut =Conv2d_Block((1,1),num_filters=num_filters,strides=strides,auto_pad=True,padding_mode='zero',normalization='batch',activation=None,use_bias=use_bias,name=name + '_downsample')
return ShortCut2d(Sequential(Conv2d_Block((3,3),num_filters=num_filters,strides=strides,auto_pad=True,padding_mode='zero',normalization='batch',activation='relu',use_bias=use_bias,name=name + '_0_conv'),
Conv2d_Block((3,3),num_filters=num_filters,strides=1,auto_pad=True,padding_mode='zero',normalization='batch',activation=None,use_bias=use_bias,name=name + '_1_conv')),
shortcut,activation='relu',name=name)
def bottleneck(num_filters=64,strides=1,expansion = 4,conv_shortcut=True,use_bias=False,name=None):
#width = int(num_filters * (base_width / 64.)) * 1#groups'
shortcut = Identity()
shortcut_name='0'
if conv_shortcut is True:
shortcut =Conv2d_Block((1,1),num_filters=num_filters*expansion,strides=strides,auto_pad=True,padding_mode='zero',normalization='batch',activation=None,use_bias=use_bias)
shortcut_name = '0'
return ShortCut2d({shortcut_name:shortcut,
'1':Sequential(Conv2d_Block((1,1),num_filters=num_filters ,strides=strides,auto_pad=True,padding_mode='zero',normalization='batch',activation='relu',use_bias=use_bias),
Conv2d_Block((3, 3), num_filters=num_filters , strides=1, auto_pad=True,padding_mode='zero',normalization='batch', activation='relu',use_bias=use_bias,name=name),
Conv2d_Block((1,1),num_filters=num_filters*expansion,strides=1,auto_pad=True,padding_mode='zero',normalization='batch',activation=None,use_bias=use_bias,name=name)),
},activation='relu',name=name)
def ResNet(block, layers, input_shape=(224, 224,3), num_classes=1000, use_bias=False, include_top=True, model_name='',
**kwargs):
"""Instantiates the ResNet, ResNetV2, and ResNeXt architecture.
Args
block: a function that returns output tensor for the stacked residual blocks.
layers: list of integer, the number of repeat units in each blocks.
input_shape: optional shape tuple, only to be specified
if `include_top` is False (otherwise the input shape
has to be `(224, 224, 3)`
It should have exactly 3 inputs channels.
num_classes: optional number of classes to classify images
into, only to be specified if `include_top` is True, and
if no `weights` argument is specified.
use_bias: whether to use biases for convolutional layers or not
(True for ResNet and ResNetV2, False for ResNeXt).
include_top: whether to include the fully-connected layer at the top of the network.
model_name: string, model name.
Returns
A Keras model instance.
Raises
ValueError: in case of invalid argument for `weights`,
or invalid input shape.
"""
def _make_layer(block, num_filters, blocklayers, strides=1, dilate=False,use_bias=use_bias,layer_name=''):
layers = OrderedDict()
layers['0']=block(num_filters=num_filters, strides=strides, expansion = 4, conv_shortcut=True,use_bias=use_bias, name=layer_name+'1')
for k in range(1, blocklayers):
layers['{0}'.format(k)]=block(num_filters=num_filters, strides=1, expansion = 4, conv_shortcut=False, use_bias=use_bias,name=layer_name+'{0}'.format(k+1))
laters_block=Sequential(layers)
laters_block._name=layer_name
return laters_block
flow_list=[]
resnet = Sequential()
resnet.add_module('conv1',Conv2d_Block((7,7),64,strides=2,use_bias=use_bias,auto_pad=True,padding_mode='zero',normalization='batch',activation='relu',name='first_block'))
resnet.add_module('maxpool',(MaxPool2d((3,3),strides=2,auto_pad=True,padding_mode='zero')))
resnet.add_module('layer1',(_make_layer(block, 64, layers[0],strides=1, dilate=None,use_bias=use_bias,layer_name='layer1' )))
resnet.add_module('layer2',(_make_layer(block, 128, layers[1], strides=2, dilate=None,use_bias=use_bias,layer_name='layer2' )))
resnet.add_module('layer3',(_make_layer(block, 256, layers[2], strides=2, dilate=None,use_bias=use_bias,layer_name='layer3' )))
resnet.add_module('layer4' ,(_make_layer(block, 512, layers[3], strides=2, dilate=None,use_bias=use_bias,layer_name='layer4' )))
resnet.add_module('avg_pool',GlobalAvgPool2d(name='avg_pool'))
if include_top:
resnet.add_module('fc',Dense(num_classes,activation=None,name='fc'))
resnet.add_module('softmax', SoftMax(name='softmax'))
resnet._name=model_name
model=ImageClassificationModel(input_shape=input_shape,output=resnet)
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)) ,'imagenet_labels1.txt'), 'r', encoding='utf-8-sig') as f:
labels = [l.rstrip() for l in f]
model.class_names=labels
input_np_shape=to_numpy(input_shape)
model.preprocess_flow=[resize((input_np_shape[0],input_np_shape[1]),keep_aspect=True), to_bgr(), normalize([103.939, 116.779, 123.68], [1, 1, 1])]
#model.summary()
return model
#
# def ResNet18(include_top=True,
# weights='imagenet',
# input_shape=None,
# classes=1000,
# **kwargs):
# if input_shape is not None and len(input_shape)==3:
# input_shape=tuple(input_shape)
# else:
# input_shape=(3, 224, 224)
# resnet18 = ResNet(basic_block, [2, 2, 2, 2], input_shape, model_name='resnet18')
def ResNet50(include_top=True,
pretrained=True,
freeze_features=False,
input_shape=None,
classes=1000,
**kwargs):
if input_shape is not None and len(input_shape)==3:
input_shape=tuple(input_shape)
else:
input_shape=(224, 224,3)
input_shape=to_tensor(input_shape)
resnet50 =ResNet(bottleneck, [3, 4, 6, 3], input_shape,num_classes=classes,include_top=include_top, model_name='resnet50')
if pretrained==True:
download_model_from_google_drive('1vReSW_l8fldyYQ6ay5HCYFGoMaGbdW2T',dirname,'resnet50_tf.pth')
recovery_model=load(os.path.join(dirname,'resnet50_tf.pth'))
recovery_model = fix_layer(recovery_model)
if freeze_features:
recovery_model.trainable = False
recovery_model.fc.trainable = True
recovery_model.eval()
if include_top==False:
recovery_model.remove_at(-1)
recovery_model.remove_at(-1)
else:
if classes!=1000:
recovery_model.remove_at(-1)
recovery_model.remove_at(-1)
recovery_model.add_module('fc', Dense(classes, activation=None, name='fc'))
recovery_model.add_module('softmax', SoftMax(name='softmax'))
recovery_model.signature=None
if recovery_model.signature != recovery_model._signature:
recovery_model.signature = recovery_model._signature
resnet50.model=recovery_model
if resnet50.signature!=resnet50.model.signature:
resnet50.signature = resnet50.model.signature
return resnet50
def ResNet101(include_top=True,
pretrained=True,
freeze_features=False,
input_shape=None,
classes=1000,
**kwargs):
if input_shape is not None and len(input_shape)==3:
input_shape=tuple(input_shape)
else:
input_shape=(224, 224,3)
input_shape = to_tensor(input_shape)
resnet101 =ResNet(bottleneck, [3, 4, 23, 3], input_shape,num_classes=classes,include_top=include_top, use_bias=True,model_name='resnet101')
if pretrained==True:
download_model_from_google_drive('13QYdFX3CvsNiegi-iUX1PUC0KKKgPNwr',dirname,'resnet101_tf.pth')
recovery_model=load(os.path.join(dirname,'resnet101_tf.pth'))
recovery_model = fix_layer(recovery_model)
if freeze_features:
recovery_model.trainable = False
recovery_model.fc.trainable = True
recovery_model.eval()
if include_top == False:
recovery_model.remove_at(-1)
recovery_model.remove_at(-1)
else:
if classes != 1000:
recovery_model.remove_at(-1)
recovery_model.remove_at(-1)
recovery_model.add_module('fc', Dense(classes, activation=None, name='fc'))
recovery_model.add_module('softmax', SoftMax(name='softmax'))
recovery_model.signature = None
if recovery_model.signature != recovery_model._signature:
recovery_model.signature = recovery_model._signature
resnet101.model = recovery_model
if resnet101.signature != resnet101.model.signature:
resnet101.signature = resnet101.model.signature
return resnet101
def ResNet152(include_top=True,
pretrained=True,
freeze_features=False,
input_shape=None,
classes=1000,
**kwargs):
if input_shape is not None and len(input_shape)==3:
input_shape=tuple(input_shape)
else:
input_shape=(224, 224,3)
input_shape = to_tensor(input_shape)
resnet152 =ResNet(bottleneck, [3, 8, 36, 3], input_shape,num_classes=classes,include_top=include_top, model_name='resnet152')
if pretrained==True:
download_model_from_google_drive('1TeVBB5ynW9E4_EgxIdjugLT8oaXnQH_c',dirname,'resnet152.pth')
recovery_model=load(os.path.join(dirname,'resnet152.pth'))
recovery_model = fix_layer(recovery_model)
if freeze_features:
recovery_model.trainable = False
recovery_model.fc.trainable = True
recovery_model.eval()
if include_top == False:
recovery_model.remove_at(-1)
recovery_model.remove_at(-1)
else:
if classes != 1000:
recovery_model.remove_at(-1)
recovery_model.remove_at(-1)
recovery_model.add_module('fc', Dense(classes, activation=None, name='fc'))
recovery_model.add_module('softmax', SoftMax(name='softmax'))
recovery_model.signature = None
if recovery_model.signature != recovery_model._signature:
recovery_model.signature = recovery_model._signature
resnet152.model=recovery_model
if resnet152.signature != resnet152.model.signature:
resnet152.signature = resnet152.model.signature
return resnet152
#
#
# resnet34=ResNet(basic_block, [3, 4, 6, 3], (3, 224, 224))
# resnet50=ResNet(bottleneck, [3, 4, 6, 3], (3, 224, 224))
# resnet101=ResNet(bottleneck, [3, 4, 23, 3], (3, 224, 224))
# resnet152=ResNet(bottleneck, [3, 8, 36, 3], (3, 224, 224))
| [
"[email protected]"
] | |
bdd295f75d50ad198736a7cf6f05a69a0ad5c018 | 8f994b92a9a4eafccb9767cbf93396b486817e8a | /datacardInputs/inputs_sig_ratio_z4l_mass4l.py | 2b219fe0fe47f5edb78fc0040bc40829a75dee2f | [] | no_license | HengneHiggs/HZZ4lFidXsec | c9dda7f1c11aeb8f2bebcee11915be7809b6bd99 | a3b3586d61d79298d953b87c31e567a98566418c | refs/heads/master | 2021-01-13T01:59:15.003951 | 2014-12-13T00:22:00 | 2014-12-13T00:22:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,216 | py | acc = {'SMZ4l_2e2mu_mass4l_genbin0_recobin0': 0.008919096693404195, 'qqZZst_4mu_mass4l_genbin0_recobin0': 0.02954182651629709, 'SMZ4l_4mu_mass4l_genbin0_recobin0': 0.02308311469386178, 'qqZZtchan_4l_mass4l_genbin0_recobin0': 0.000494641297076523, 'qqZZtchan_4mu_mass4l_genbin0_recobin0': 0.000815745098899432, 'SMZ4l_4l_mass4l_genbin0_recobin0': 0.013908572021055646, 'qqZZtchan_2e2mu_mass4l_genbin0_recobin0': 0.00037367183848153226, 'qqZZst_4l_mass4l_genbin0_recobin0': 0.018726546751245143, 'SMZ4l_4e_mass4l_genbin0_recobin0': 0.01619731103648311, 'qqZZtchan_4e_mass4l_genbin0_recobin0': 0.0005482476888718841, 'qqZZst_4e_mass4l_genbin0_recobin0': 0.02110748594695014, 'qqZZst_2e2mu_mass4l_genbin0_recobin0': 0.012982798009967975}
dacc = {'SMZ4l_2e2mu_mass4l_genbin0_recobin0': 0.0015930582650839603, 'qqZZst_4mu_mass4l_genbin0_recobin0': 0.004348613066068139, 'SMZ4l_4mu_mass4l_genbin0_recobin0': 0.0038567340884207396, 'qqZZtchan_4l_mass4l_genbin0_recobin0': 8.826465957932545e-05, 'qqZZtchan_4mu_mass4l_genbin0_recobin0': 0.0002605841486435906, 'SMZ4l_4l_mass4l_genbin0_recobin0': 0.0014508951779616743, 'qqZZtchan_2e2mu_mass4l_genbin0_recobin0': 9.93035997687213e-05, 'qqZZst_4l_mass4l_genbin0_recobin0': 0.0016794228128230752, 'SMZ4l_4e_mass4l_genbin0_recobin0': 0.003242046892494513, 'qqZZtchan_4e_mass4l_genbin0_recobin0': 0.0002008875737510541, 'qqZZst_4e_mass4l_genbin0_recobin0': 0.003691727074032068, 'qqZZst_2e2mu_mass4l_genbin0_recobin0': 0.001918064383634538}
eff = {'SMZ4l_2e2mu_mass4l_genbin0_recobin0': 0.5324374509227061, 'qqZZst_4mu_mass4l_genbin0_recobin0': 0.7647118253545526, 'SMZ4l_4mu_mass4l_genbin0_recobin0': 0.7637169141387499, 'qqZZtchan_4l_mass4l_genbin0_recobin0': 0.5785148382372584, 'qqZZtchan_4mu_mass4l_genbin0_recobin0': 0.7682675885370512, 'SMZ4l_4l_mass4l_genbin0_recobin0': 0.5707380627114262, 'qqZZtchan_2e2mu_mass4l_genbin0_recobin0': 0.5552834954547099, 'qqZZst_4l_mass4l_genbin0_recobin0': 0.5727388751657205, 'SMZ4l_4e_mass4l_genbin0_recobin0': 0.34417475614024295, 'qqZZtchan_4e_mass4l_genbin0_recobin0': 0.3730916641924613, 'qqZZst_4e_mass4l_genbin0_recobin0': 0.35090161488425176, 'qqZZst_2e2mu_mass4l_genbin0_recobin0': 0.5395884133863983}
deff = {'SMZ4l_2e2mu_mass4l_genbin0_recobin0': 0.08951800409536552, 'qqZZst_4mu_mass4l_genbin0_recobin0': 0.06338317136288672, 'SMZ4l_4mu_mass4l_genbin0_recobin0': 0.07180902011464198, 'qqZZtchan_4l_mass4l_genbin0_recobin0': 0.08813579192252341, 'qqZZtchan_4mu_mass4l_genbin0_recobin0': 0.13484050683810947, 'SMZ4l_4l_mass4l_genbin0_recobin0': 0.051996547838851825, 'qqZZtchan_2e2mu_mass4l_genbin0_recobin0': 0.13208539113649417, 'qqZZst_4l_mass4l_genbin0_recobin0': 0.04478497591739029, 'SMZ4l_4e_mass4l_genbin0_recobin0': 0.09587512405027539, 'qqZZtchan_4e_mass4l_genbin0_recobin0': 0.17725771325282302, 'qqZZst_4e_mass4l_genbin0_recobin0': 0.08436717785344361, 'qqZZst_2e2mu_mass4l_genbin0_recobin0': 0.07412026269393789}
outinratio = {'SMZ4l_2e2mu_mass4l_genbin0_recobin0': 0.07949113576480786, 'qqZZst_4mu_mass4l_genbin0_recobin0': 0.09236481944081944, 'SMZ4l_4mu_mass4l_genbin0_recobin0': 0.08334927391081061, 'qqZZtchan_4l_mass4l_genbin0_recobin0': 0.16824291610371592, 'qqZZtchan_4mu_mass4l_genbin0_recobin0': 0.12439507549020318, 'SMZ4l_4l_mass4l_genbin0_recobin0': 0.08835778500419493, 'qqZZtchan_2e2mu_mass4l_genbin0_recobin0': 0.20137293759918887, 'qqZZst_4l_mass4l_genbin0_recobin0': 0.10911794093859707, 'SMZ4l_4e_mass4l_genbin0_recobin0': 0.12154954853725738, 'qqZZtchan_4e_mass4l_genbin0_recobin0': 0.19325332383013585, 'qqZZst_4e_mass4l_genbin0_recobin0': 0.13928460826840344, 'qqZZst_2e2mu_mass4l_genbin0_recobin0': 0.11875061143924305}
doutinratio = {'SMZ4l_2e2mu_mass4l_genbin0_recobin0': 0.07202625387850395, 'qqZZst_4mu_mass4l_genbin0_recobin0': 0.054276677409211625, 'SMZ4l_4mu_mass4l_genbin0_recobin0': 0.05812533020451309, 'qqZZtchan_4l_mass4l_genbin0_recobin0': 0.1040355183264885, 'qqZZtchan_4mu_mass4l_genbin0_recobin0': 0.13635640275886945, 'SMZ4l_4l_mass4l_genbin0_recobin0': 0.04312059428665133, 'qqZZtchan_2e2mu_mass4l_genbin0_recobin0': 0.17544373835094842, 'qqZZst_4l_mass4l_genbin0_recobin0': 0.041616537051247626, 'SMZ4l_4e_mass4l_genbin0_recobin0': 0.127004400345593, 'qqZZtchan_4e_mass4l_genbin0_recobin0': 0.2881491790632618, 'qqZZst_4e_mass4l_genbin0_recobin0': 0.11887774798881327, 'qqZZst_2e2mu_mass4l_genbin0_recobin0': 0.07378800732966806}
nfid = {'SMZ4l_2e2mu_mass4l_genbin0_recobin0': 31.06615695842457, 'qqZZst_4mu_mass4l_genbin0_recobin0': 44.78679409764795, 'SMZ4l_4mu_mass4l_genbin0_recobin0': 34.99508415148472, 'qqZZtchan_4l_mass4l_genbin0_recobin0': 31.390061776594678, 'qqZZtchan_4mu_mass4l_genbin0_recobin0': 9.791709946163232, 'SMZ4l_4l_mass4l_genbin0_recobin0': 90.61710768831315, 'qqZZtchan_2e2mu_mass4l_genbin0_recobin0': 14.154301413268227, 'qqZZst_4l_mass4l_genbin0_recobin0': 122.00716946490783, 'SMZ4l_4e_mass4l_genbin0_recobin0': 24.55586657840387, 'qqZZtchan_4e_mass4l_genbin0_recobin0': 7.444050417163216, 'qqZZst_4e_mass4l_genbin0_recobin0': 31.999916995567084, 'qqZZst_2e2mu_mass4l_genbin0_recobin0': 45.2204583716928}
nfs = {'SMZ4l_2e2mu_mass4l_genbin0_recobin0': -34395.85636710699, 'qqZZst_4mu_mass4l_genbin0_recobin0': 1516.0468860292303, 'SMZ4l_4mu_mass4l_genbin0_recobin0': -10487.347261693856, 'qqZZtchan_4l_mass4l_genbin0_recobin0': 63460.25283800457, 'qqZZtchan_4mu_mass4l_genbin0_recobin0': 12003.394147723086, 'SMZ4l_4l_mass4l_genbin0_recobin0': -56945.05432940632, 'qqZZtchan_2e2mu_mass4l_genbin0_recobin0': 37878.96211495683, 'qqZZst_4l_mass4l_genbin0_recobin0': 6515.198508598254, 'SMZ4l_4e_mass4l_genbin0_recobin0': -12061.850700605468, 'qqZZtchan_4e_mass4l_genbin0_recobin0': 13577.896575324661, 'qqZZst_4e_mass4l_genbin0_recobin0': 1516.0458747191924, 'qqZZst_2e2mu_mass4l_genbin0_recobin0': 3483.105747849831}
nrecofid = {'SMZ4l_2e2mu_mass4l_genbin0_recobin0': 16.540785420908268, 'qqZZst_4mu_mass4l_genbin0_recobin0': 34.24899106619087, 'SMZ4l_4mu_mass4l_genbin0_recobin0': 26.72633767819778, 'qqZZtchan_4l_mass4l_genbin0_recobin0': 18.15961651094422, 'qqZZtchan_4mu_mass4l_genbin0_recobin0': 7.522653387993087, 'SMZ4l_4l_mass4l_genbin0_recobin0': 51.71863249054054, 'qqZZtchan_2e2mu_mass4l_genbin0_recobin0': 7.859649964479121, 'qqZZst_4l_mass4l_genbin0_recobin0': 69.87824900148476, 'SMZ4l_4e_mass4l_genbin0_recobin0': 8.451509391434493, 'qqZZtchan_4e_mass4l_genbin0_recobin0': 2.77731315847201, 'qqZZst_4e_mass4l_genbin0_recobin0': 11.228822549906504, 'qqZZst_2e2mu_mass4l_genbin0_recobin0': 24.40043538538739}
nreconotfid = {'SMZ4l_2e2mu_mass4l_genbin0_recobin0': 1.3148458195499737, 'qqZZst_4mu_mass4l_genbin0_recobin0': 3.1634018758589577, 'SMZ4l_4mu_mass4l_genbin0_recobin0': 2.227620839772925, 'qqZZtchan_4l_mass4l_genbin0_recobin0': 3.0552268371264426, 'qqZZtchan_4mu_mass4l_genbin0_recobin0': 0.9357810360860327, 'SMZ4l_4l_mass4l_genbin0_recobin0': 4.569743810310152, 'qqZZtchan_2e2mu_mass4l_genbin0_recobin0': 1.582720801848521, 'qqZZst_4l_mass4l_genbin0_recobin0': 7.624970647436594, 'SMZ4l_4e_mass4l_genbin0_recobin0': 1.0272771509872536, 'qqZZtchan_4e_mass4l_genbin0_recobin0': 0.5367249991918888, 'qqZZst_4e_mass4l_genbin0_recobin0': 1.5640021501791423, 'qqZZst_2e2mu_mass4l_genbin0_recobin0': 2.8975666213984947}
| [
"[email protected]"
] | |
6c702bd35453fddd8cd392dcb6b4206937b473df | 17b70c6444ab0f2ca1bfd7315f70d9ce39d7064f | /blog/migrations/0005_replycomment.py | 6c8fbc2cc4ec2c4091b343bcc229bfe40ebb3394 | [] | no_license | GannTrader/django_blog | 7de873cdae9d561c8afdb03a80d658ebb625f189 | 53157c049e31b96cbd5f9bfc1b909062b6382313 | refs/heads/master | 2022-12-10T12:04:47.291119 | 2020-09-13T07:55:04 | 2020-09-13T07:55:04 | 295,065,089 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 986 | py | # Generated by Django 2.2 on 2020-09-13 07:50
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('blog', '0004_auto_20200913_1436'),
]
operations = [
migrations.CreateModel(
name='ReplyComment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=255)),
('email', models.EmailField(max_length=254)),
('reply', models.TextField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('status', models.CharField(choices=[('active', 'active'), ('inactive', 'inactive')], default='inactive', max_length=255)),
('comment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.Comment')),
],
),
]
| [
"="
] | = |
a4a78d5183b570dffa756439c5f4df49bbf81d7d | d11a5d080ff0f31a56fdd8edbd0e40d6b45bb546 | /cerberus/tests/test_legacy.py | 59bd7b845e4de4599100c919860269030c6331c1 | [
"ISC",
"Python-2.0"
] | permissive | pyeve/cerberus | f9458f9e7c9a851f2c7eac248ce9293526d42ea1 | 8e1adba83749e2f1f84f9e2f99d56c2710a36293 | refs/heads/1.3.x | 2023-08-19T08:30:32.140320 | 2023-08-09T19:02:54 | 2023-08-09T19:02:54 | 6,155,413 | 2,364 | 202 | ISC | 2023-08-09T11:18:33 | 2012-10-10T10:03:49 | Python | UTF-8 | Python | false | false | 30 | py | # -*- coding: utf-8 -*-
pass
| [
"[email protected]"
] | |
bbde6b3bf2b4db5905785b96fe1034677c867327 | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/AlipayTradeRepaybillCreateModel.py | 96df27422c98e20818899a35633bf9ef183f4a11 | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 4,348 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.RepayTradeDetail import RepayTradeDetail
class AlipayTradeRepaybillCreateModel(object):
def __init__(self):
self._bill_amount = None
self._bill_product = None
self._out_bill_no = None
self._repay_expire_date = None
self._repay_trade_details = None
self._user_id = None
@property
def bill_amount(self):
return self._bill_amount
@bill_amount.setter
def bill_amount(self, value):
self._bill_amount = value
@property
def bill_product(self):
return self._bill_product
@bill_product.setter
def bill_product(self, value):
self._bill_product = value
@property
def out_bill_no(self):
return self._out_bill_no
@out_bill_no.setter
def out_bill_no(self, value):
self._out_bill_no = value
@property
def repay_expire_date(self):
return self._repay_expire_date
@repay_expire_date.setter
def repay_expire_date(self, value):
self._repay_expire_date = value
@property
def repay_trade_details(self):
return self._repay_trade_details
@repay_trade_details.setter
def repay_trade_details(self, value):
if isinstance(value, list):
self._repay_trade_details = list()
for i in value:
if isinstance(i, RepayTradeDetail):
self._repay_trade_details.append(i)
else:
self._repay_trade_details.append(RepayTradeDetail.from_alipay_dict(i))
@property
def user_id(self):
return self._user_id
@user_id.setter
def user_id(self, value):
self._user_id = value
def to_alipay_dict(self):
params = dict()
if self.bill_amount:
if hasattr(self.bill_amount, 'to_alipay_dict'):
params['bill_amount'] = self.bill_amount.to_alipay_dict()
else:
params['bill_amount'] = self.bill_amount
if self.bill_product:
if hasattr(self.bill_product, 'to_alipay_dict'):
params['bill_product'] = self.bill_product.to_alipay_dict()
else:
params['bill_product'] = self.bill_product
if self.out_bill_no:
if hasattr(self.out_bill_no, 'to_alipay_dict'):
params['out_bill_no'] = self.out_bill_no.to_alipay_dict()
else:
params['out_bill_no'] = self.out_bill_no
if self.repay_expire_date:
if hasattr(self.repay_expire_date, 'to_alipay_dict'):
params['repay_expire_date'] = self.repay_expire_date.to_alipay_dict()
else:
params['repay_expire_date'] = self.repay_expire_date
if self.repay_trade_details:
if isinstance(self.repay_trade_details, list):
for i in range(0, len(self.repay_trade_details)):
element = self.repay_trade_details[i]
if hasattr(element, 'to_alipay_dict'):
self.repay_trade_details[i] = element.to_alipay_dict()
if hasattr(self.repay_trade_details, 'to_alipay_dict'):
params['repay_trade_details'] = self.repay_trade_details.to_alipay_dict()
else:
params['repay_trade_details'] = self.repay_trade_details
if self.user_id:
if hasattr(self.user_id, 'to_alipay_dict'):
params['user_id'] = self.user_id.to_alipay_dict()
else:
params['user_id'] = self.user_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayTradeRepaybillCreateModel()
if 'bill_amount' in d:
o.bill_amount = d['bill_amount']
if 'bill_product' in d:
o.bill_product = d['bill_product']
if 'out_bill_no' in d:
o.out_bill_no = d['out_bill_no']
if 'repay_expire_date' in d:
o.repay_expire_date = d['repay_expire_date']
if 'repay_trade_details' in d:
o.repay_trade_details = d['repay_trade_details']
if 'user_id' in d:
o.user_id = d['user_id']
return o
| [
"[email protected]"
] | |
247193480074898ebc24c9ec752b45c047cab22c | 4711f3c69992f6bd19eba1d5d201189539a8bcec | /market/urls.py | 03608bd572e5e37968b2c2680beda9fc589a9969 | [] | no_license | ajithkjames/OpenMart | 3d98ec380a8f2fbc24670fba29a5c83c331fd728 | 64bfe714e5a435f8a06a3509ef2213fda227924c | refs/heads/master | 2021-07-05T07:50:30.443277 | 2017-09-28T12:19:56 | 2017-09-28T12:19:56 | 104,214,654 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | from django.conf.urls import url, include
from rest_framework import routers
from rest_framework.routers import DefaultRouter
from market.views import CategoryViewSet, AdvertisementViewSet
router = routers.DefaultRouter()
router.register(r'category', CategoryViewSet, 'categories')
router.register(r'advertisement', AdvertisementViewSet, 'advertisements')
urlpatterns = [
]
urlpatterns += router.urls | [
"[email protected]"
] | |
a2d82a587ab3df76aa9288380a7ef8423865f5da | c83d0f00a67c4b418f1b4868ab18493fda109e8e | /tests/conftest.py | 745be785e0cc4e64ce048d09a8c3446ae7d83f15 | [] | no_license | rshk-archive/datacat-poc-141007 | b8d09f4fb88dec5bf7837244d7d7904a67e31030 | eb91b0d039dc36ea264d75850cab4834831f042c | refs/heads/master | 2020-06-04T19:54:36.398365 | 2014-10-08T15:28:16 | 2014-10-08T15:28:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,011 | py | import os
import random
import time
from urlparse import urlparse
import shutil
import pytest
import py.path
TESTS_ROOT_DIR = py.path.local(__file__).dirpath()
TESTS_DATA_DIR = TESTS_ROOT_DIR.join('data')
POSTGRES_ENV_NAME = 'POSTGRES_URL'
def _celery_testing_conf():
return dict(
CELERY_BROKER_URL='redis://localhost:6399/0',
CELERY_RESULT_BACKEND='redis://localhost:6399/0',
CELERY_ACCEPT_CONTENT=['json', 'msgpack', 'yaml'],
CELERY_ALWAYS_EAGER=True,
)
def _celery_testing_conf_py():
return "\n".join("{0} = {1!r}".format(key, val)
for key, val in _celery_testing_conf().iteritems()) + "\n"
@pytest.fixture(scope='module')
def postgres_conf():
if POSTGRES_ENV_NAME not in os.environ:
raise RuntimeError(
"Missing configuration: the {0} environment variable is required"
" in order to be able to create a PostgreSQL database for running"
" tests. Please set it to something like: ``postgresql://"
"user:password@host:port/database``."
.format(POSTGRES_ENV_NAME))
url = urlparse(os.environ[POSTGRES_ENV_NAME])
return {
'database': url.path.split('/')[1],
'user': url.username,
'password': url.password,
'host': url.hostname,
'port': url.port or 5432,
}
@pytest.fixture(scope='module')
def postgres_admin_db(request, postgres_conf):
from datacat.db import connect
conn = connect(**postgres_conf)
request.addfinalizer(lambda: conn.close())
return conn
@pytest.fixture(scope='module')
def postgres_user_conf(request, postgres_conf):
from datacat.db import connect
conn = connect(**postgres_conf)
conn.autocommit = True
randomcode = random.randint(0, 999999)
name = 'dtctest_{0:06d}'.format(randomcode)
# Note: we need to use separate transactions to perform
# administrative activities such as creating/dropping databases
# and roles.
# For this reason, we need to set the connection isolation level
# to "autocommit"
with conn.cursor() as cur:
cur.execute("""
CREATE ROLE "{name}" LOGIN
PASSWORD %(password)s;
""".format(name=name), dict(password=name))
cur.execute("""
CREATE DATABASE "{name}"
WITH OWNER "{name}"
ENCODING = 'UTF-8';
""".format(name=name))
def cleanup():
conn.autocommit = True
with conn.cursor() as cur:
# Kill all connections to database first
cur.execute("""
SELECT pg_terminate_backend(pg_stat_activity.pid)
FROM pg_stat_activity
WHERE pg_stat_activity.datname = '{name}'
AND pid <> pg_backend_pid();
""".format(name=name))
cur.execute('DROP DATABASE "{name}";'.format(name=name))
cur.execute('DROP ROLE "{name}";'.format(name=name))
request.addfinalizer(cleanup)
conf = postgres_conf.copy()
conf['user'] = name
conf['password'] = name
conf['database'] = name
# HACK to create PostGIS extension, used by some plugins
_conf = postgres_conf.copy()
_conf['database'] = name
_conn = connect(**_conf)
_conn.autocommit = True
with _conn.cursor() as cur:
cur.execute("CREATE EXTENSION postgis;")
return conf
@pytest.fixture
def postgres_user_db(request, postgres_user_conf):
from datacat.db import connect
conn = connect(**postgres_user_conf)
conn.autocommit = False
request.addfinalizer(lambda: conn.close())
return conn
@pytest.fixture
def postgres_user_db_ac(request, postgres_user_conf):
"""User database with autocommit on"""
from datacat.db import connect
conn = connect(**postgres_user_conf)
conn.autocommit = True
request.addfinalizer(lambda: conn.close())
return conn
@pytest.fixture(scope='module')
def app_config(postgres_user_conf):
from flask.config import Config
from datacat.settings import testing
conf = Config('')
conf.from_object(testing)
conf['DATABASE'] = postgres_user_conf
return conf
@pytest.fixture(scope='module')
def configured_app(request, app_config):
from datacat.web.core import make_app
app_config.update(_celery_testing_conf())
app = make_app(app_config)
app.debug = True
return app
@pytest.yield_fixture(scope='module')
def configured_app_ctx(configured_app):
ctx = configured_app.app_context()
ctx.push()
yield configured_app
ctx.pop()
@pytest.fixture(scope='module')
def redis_instance(request):
import subprocess
import tempfile
tempdir = tempfile.mkdtemp()
command = ['redis-server', '--port', '6399']
proc = subprocess.Popen(command, cwd=tempdir)
def cleanup():
proc.terminate()
proc.wait()
shutil.rmtree(tempdir)
request.addfinalizer(cleanup)
time.sleep(1)
return ('localhost', 6399)
@pytest.fixture
def data_dir():
return TESTS_DATA_DIR
| [
"[email protected]"
] | |
2417d121fd937b274579e82c572a60f4e7013fbf | 58f6127876282bc27d28bb8f7a21a1148a4ed474 | /tests/test_main.py | 7a16bcbbb78d86d3ee861e38a9dc9fc38dc84454 | [
"MIT"
] | permissive | yoursbest/simple_calculator | cb8374e3cb0216bcc39b988f62492586b43fcfad | da3b88a743789ed4668981f8533094a11a7dd808 | refs/heads/master | 2022-12-14T03:39:37.471959 | 2020-03-15T12:43:51 | 2020-09-09T09:43:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,965 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
from simple_calculator.main import SimpleCalculator
def test_add_two_numbers():
calculator = SimpleCalculator()
result = calculator.add(4, 5)
assert result == 9
def test_add_three_numbers():
calculator = SimpleCalculator()
result = calculator.add(4, 5, 6)
assert result == 15
def test_add_many_numbers():
numbers = range(100)
calculator = SimpleCalculator()
result = calculator.add(*numbers)
assert result == 4950
def test_subtract_two_numbers():
calculator = SimpleCalculator()
result = calculator.sub(10, 3)
assert result == 7
def test_mul_two_numbers():
calculator = SimpleCalculator()
result = calculator.mul(6, 4)
assert result == 24
def test_mul_many_numbers():
numbers = range(1, 10)
calculator = SimpleCalculator()
result = calculator.mul(*numbers)
assert result == 362880
def test_div_two_numbers_float():
calculator = SimpleCalculator()
result = calculator.div(13, 2)
assert result == 6.5
def test_div_by_zero_returns_inf():
calculator = SimpleCalculator()
result = calculator.div(5, 0)
assert result == float('inf')
def test_mul_by_zero_raises_exception():
calculator = SimpleCalculator()
with pytest.raises(ValueError):
calculator.mul(3, 0)
def test_avg_correct_average():
calculator = SimpleCalculator()
result = calculator.avg([2, 5, 12, 98])
assert result == 29.25
def test_avg_removes_upper_outliers():
calculator = SimpleCalculator()
result = calculator.avg([2, 5, 12, 98], ut=90)
assert result == pytest.approx(6.333333)
def test_avg_removes_lower_outliers():
calculator = SimpleCalculator()
result = calculator.avg([2, 5, 12, 98], lt=10)
assert result == pytest.approx(55)
def test_avg_uppper_threshold_is_included():
calculator = SimpleCalculator()
result = calculator.avg([2, 5, 12, 98], ut=98)
assert result == 29.25
def test_avg_lower_threshold_is_included():
calculator = SimpleCalculator()
result = calculator.avg([2, 5, 12, 98], lt=2)
assert result == 29.25
def test_avg_empty_list():
calculator = SimpleCalculator()
result = calculator.avg([])
assert result == 0
def test_avg_manages_empty_list_after_outlier_removal():
calculator = SimpleCalculator()
result = calculator.avg([12, 98], lt=15, ut=90)
assert result == 0
def test_avg_manages_empty_list_before_outlier_removal():
calculator = SimpleCalculator()
result = calculator.avg([], lt=15, ut=90)
assert result == 0
def test_avg_manages_zero_value_lower_outlier():
calculator = SimpleCalculator()
result = calculator.avg([-1, 0, 1], lt=0)
assert result == 0.5
def test_avg_manages_zero_value_upper_outlier():
calculator = SimpleCalculator()
result = calculator.avg([-1, 0, 1], ut=0)
assert result == -0.5
| [
"[email protected]"
] | |
db10f1354db46d48e201db222bb29f36ff4b5d31 | 967c707b0e675968052006346b67147d59078e6b | /191.number-of-1-bits.py | 29eb5d2ada559090b418b43794b80a53f749fd54 | [] | no_license | hahaliu/LeetCode-Python3 | 40330f8ee2c9613d75bd70eb77b41767893c2fd1 | 1caeab8dc1c2a7c1c2beee3f09ef4b953d276033 | refs/heads/master | 2020-04-23T19:09:49.958096 | 2018-10-16T07:49:49 | 2018-10-16T07:49:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 430 | py | # ex2tron's blog:
# http://ex2tron.wang
# class Solution(object):
# def hammingWeight(self, n):
# """
# :type n: int
# :rtype: int
# """
# return (bin(n)[2:]).count('1')
# 别人的代码:
class Solution:
def hammingWeight(self, n):
result = 0
while n:
n &= n - 1
result += 1
return result
print(Solution().hammingWeight(11))
| [
"[email protected]"
] | |
d4f8c80b80c63ec2b8463a94cbfe57c292b2ddfa | 3cd75f3d62911ba3d2114f95203e81d91be32877 | /4day/Book07.py | 1ef18971c44708423b3b1e89ca7e86ad3f998849 | [] | no_license | kukukuni/Python_ex | 3667e2fe1db3a161d9e6acd5d8145a3e692f8e89 | e56d10bbcf3dc33b4422462a5b3c2dedb082b8c3 | refs/heads/master | 2022-11-05T13:58:55.770984 | 2019-04-14T00:57:18 | 2019-04-14T00:57:18 | 181,244,073 | 0 | 1 | null | 2022-10-23T06:38:06 | 2019-04-14T00:50:00 | Jupyter Notebook | UTF-8 | Python | false | false | 879 | py | # Book07.py
class Book :
def __init__(self,t,a,p):
self.__title__ = t # public 변수앞에__두개면 권한을 주겠다는 의미 뒤에 두개는 public
self.__author_ = a # private --> 메소드 우회접근 밖에서 외부접근 차단
self.__price = p # private --> 메소드 우회접근
self.category = '' # 방치 (public)
def pBook(self):
print(self.__title__+','+self.__author_+','+str(self.__price))
def setTitle(self,t): self.__title__ = t
def setAuthor(self,a): self.__author_ = a #__author_를 접근할 수 있는 메소드를 만듬.
# author를 보호하고 우회
def getAuthor(self): print(self.__author_)
def setPrice(self,p): self.__price =p
b1 = Book("파이썬","홍길동",30000)
b1.pBook()
b1.setAuthor("김연아"); b1.getAuthor()
| [
"[email protected]"
] | |
9dd418e1cae52b2087ffbdb0d2e3209d14152d7c | c2e15137cd3ddfe574d06ed313f4c4039835a48b | /ACIS_sci_run/plot_sci_run_trends.py | ccedbba09d9fd445230ca782b1f973ae936a3b9b | [] | no_license | chandra-mta/MTA | df57845577ac50f21c4b1775a441804d78060e97 | 60015d4fbbcc7e00595152fb21a8c55e2366a982 | refs/heads/master | 2023-08-18T13:05:33.397195 | 2023-08-11T14:28:40 | 2023-08-11T14:28:40 | 4,586,218 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,950 | py | #!/usr/bin/env /data/mta/Script/Python3.8/envs/ska3-shiny/bin/python
#################################################################################
# #
# plot_sci_run_trends.py: pdate science run trend plots #
# #
# author: t. isobe ([email protected]) #
# #
# last update: Feb 26, 2021 #
# #
#################################################################################
import os
import sys
import re
import string
import random
import operator
import time
import matplotlib as mpl
mpl.use('Agg')
from pylab import *
import matplotlib.pyplot as plt
import matplotlib.font_manager as font_manager
import matplotlib.lines as lines
path = '/data/mta/Script/ACIS/Acis_sci_run/house_keeping/dir_list_py_t'
with open(path, 'r') as f:
data = [line.strip() for line in f.readlines()]
for ent in data:
atemp = re.split(':', ent)
var = atemp[1].strip()
line = atemp[0].strip()
exec("%s = %s" %(var, line))
#
#--- append a path to a private folder to python directory
#
sys.path.append(bin_dir)
sys.path.append(mta_dir)
#
#--- converTimeFormat contains MTA time conversion routines
#
import mta_common_functions as mcf
import acis_sci_run_functions as asrf
#-----------------------------------------------------------------------------------------------
#-- plot_sci_run_trends: pdate science run trend plots --
#-----------------------------------------------------------------------------------------------
def plot_sci_run_trends(tyear=''):
"""
update science run trend plots
input: tyear --- the year of the data
output: <web_dir>Year<year>/<type>_out.png
"""
if tyear == '':
tyear = int(float((time.strftime('%Y', time.gmtime()))))
cout_dir = 'Year' + str(tyear)
#
#--- plot trends for the year
#
plot_events(cout_dir)
#
#--- plot long term trends
#
plot_events('Long_term')
#
#--- update html pages
#
today = time.strftime("%Y:%m:%d", time.gmtime())
atemp = re.split(':', today)
year = int(float(atemp[0]))
month = int(float(atemp[1]))
mday = int(float(atemp[2]))
if year != tyear:
month = 12
mday = 31
asrf.acis_sci_run_print_html(web_dir, tyear, month, mday)
#-----------------------------------------------------------------------------------------------
#--- plot_events: control sub for plotting each data group ---
#-----------------------------------------------------------------------------------------------
def plot_events(data_dir):
"""
control function to create plots for each sub data set
input: data_dir --- the directory name where the data located (e.g. Year2013/)
output: png plot file such as te3_3_out.png
"""
ifile = web_dir + data_dir + '/cc3_3_out'
outname = ifile + '.png'
acis_sci_run_plot(ifile, outname)
ifile = web_dir + data_dir + '/te3_3_out'
outname = ifile + '.png'
acis_sci_run_plot(ifile, outname)
ifile = web_dir + data_dir + '/te5_5_out'
outname = ifile + '.png'
acis_sci_run_plot(ifile, outname)
ifile = web_dir + data_dir + '/te_raw_out'
outname = ifile + '.png'
acis_sci_run_plot(ifile, outname)
#-----------------------------------------------------------------------------------------------
#-- acis_sci_run_plot: sets up the parameters for the given file and create plots ---
#-----------------------------------------------------------------------------------------------
def acis_sci_run_plot(ifile, outname):
"""
this function sets up the parameters for the given file and create plots
input: ifile --- data file name
outname --- plot output file name
output: <outname>.png
"""
#
#--- read input data
#
data = mcf.read_data_file(ifile)
#
#--- if there is no data a copy an "no data" plot
#
if len(data) == 0:
cmd = 'cp ' + house_keeping + 'no_data.png ' + outname
os.system(cmd)
return False
col = []
date_list = []
count_list = []
err_list = []
drop_list = []
xmakerInd = 0 #--- used to mark whether this is a plot for a long term (if so, 1)
for ent in data:
col = re.split('\t+|\s+', ent)
try:
val = float(col[6])
if val > 0:
m = re.search(':', col[1])
#
#--- for each year, change date format to ydate (date format in the data file is: 112:00975.727)
#
if m is not None:
atemp = re.split(':', col[1])
date = float(atemp[0]) + float(atemp[1])/86400.0
#
#---- for the case of long term: the date format is already in a fractional year date
#
else:
date = float(col[1])
xmakerInd = 1
#
#--- convert event rate and error rate in an appropreate units
#
evt = float(col[7])/float(val)/1000.0
err = float(col[8])/float(val)
#
#--- save needed data
#
date_list.append(date)
count_list.append(evt)
err_list.append(err)
drop_list.append(float(col[9]))
except:
pass
if len(date_list) > 0:
#
#--- set plotting range
#
(xmin, xmax) = set_min_max(date_list)
if xmakerInd == 1: #--- if it is a long term, x axis in year (in interger)
xmin = int(xmin)
xmax = int(xmax) + 1
(ymin1, ymax1) = set_min_max(count_list)
#
#--- if the data set is te_raw_out, set the y plotting range to fixed size: 0 - 10
#
m1 = re.search(ifile, 'te_raw_out')
if m1 is not None:
ymin1 = 0
ymax1 = 10
(ymin2, ymax2) = set_min_max(err_list)
(ymin3, ymax3) = set_min_max(drop_list)
yminSet = [ymin1, ymin2, ymin3]
ymaxSet = [ymax1, ymax2, ymax3]
xSets = [date_list, date_list, date_list]
ySets = [count_list, err_list, drop_list]
if xmakerInd == 0:
xname = 'Time (Day of Year)'
else:
xname = 'Time (Year)'
yLabel = ['Events/sec', 'Events/sec', 'Percent']
entLabels= ['Events per Second (Science Run)','Errors (Science Run)','Percentage of Exposures Dropped (Science Run)']
#
#--- calling actual plotting routine
#
plotPanel(xmin, xmax, yminSet, ymaxSet, xSets, ySets, xname, yLabel, entLabels, outname)
#-----------------------------------------------------------------------------------------------
#--- set_min_max: set min and max of plotting range ---
#-----------------------------------------------------------------------------------------------
def set_min_max(data):
"""
set min and max of the plotting range; 10% larger than actual min and max of the data set
Input: data --- one dimentioinal data set
Output (pmin, pmanx): min and max of plotting range
"""
try:
pmin = min(data)
pmax = max(data)
diff = pmax - pmin
pmin = pmin - 0.1 * diff
if pmin < 0:
pmin = 0
pmax = pmax + 0.1 * diff
if pmin == pmax:
pmax = pmin + 1
except:
pmin = 0
pmax = 1
return (pmin, pmax)
#-----------------------------------------------------------------------------------------------
#--- plotPanel: plots multiple data in separate panels ---
#-----------------------------------------------------------------------------------------------
def plotPanel(xmin, xmax, yminSet, ymaxSet, xSets, ySets, xname, yLabel, entLabels, ofile):
"""
This function plots multiple data in separate panels.
Input: xmin, xmax, ymin, ymax: plotting area
xSets: a list of lists containing x-axis data
ySets: a list of lists containing y-axis data
xname: a name of x-axis
yname: a name of y-axis
entLabels: a list of the names of each data
Output: a png plot: out.png
"""
#
#--- set line color list
#
colorList = ('blue', 'green', 'red', 'aqua', 'lime', 'fuchsia', 'maroon', 'black', 'yellow', 'olive')
#
#--- clean up the plotting device
#
plt.close('all')
#
#---- set a few parameters
#
mpl.rcParams['font.size'] = 9
props = font_manager.FontProperties(size=9)
plt.subplots_adjust(hspace=0.08)
tot = len(entLabels)
#
#--- start plotting each data
#
for i in range(0, tot):
axNam = 'ax' + str(i)
#
#--- setting the panel position
#
j = i + 1
if i == 0:
line = str(tot) + '1' + str(j)
else:
line = str(tot) + '1' + str(j) + ', sharex=ax0'
line = str(tot) + '1' + str(j)
exec("%s = plt.subplot(%s)" % (axNam, line))
exec("%s.set_autoscale_on(False)" % (axNam)) #---- these three may not be needed for the new pylab, but
exec("%s.set_xbound(xmin,xmax)" % (axNam)) #---- they are necessary for the older version to set
exec("%s.set_xlim(left=xmin, right=xmax, auto=False)" % (axNam))
exec("%s.set_ylim(bottom=yminSet[i], top=ymaxSet[i], auto=False)" % (axNam))
xdata = xSets[i]
ydata = ySets[i]
#
#---- actual data plotting
#
p, = plt.plot(xdata, ydata, color=colorList[i], lw =0, markersize=4.0, marker='o')
#
#--- add legend
#
leg = legend([p], [entLabels[i]], prop=props, loc=2)
leg.get_frame().set_alpha(0.5)
exec("%s.set_ylabel(yLabel[i], size=8)" % (axNam))
#
#--- add x ticks label only on the last panel
#
for i in range(0, tot):
ax = 'ax' + str(i)
if i != tot-1:
line = eval("%s.get_xticklabels()" % (ax))
for label in line:
label.set_visible(False)
else:
pass
xlabel(xname)
#
#--- set the size of the plotting area in inch (width: 10.0in, height 2.08in x number of panels)
#
fig = matplotlib.pyplot.gcf()
height = (2.00 + 0.08) * tot
fig.set_size_inches(10.0, height)
plt.subplots_adjust(hspace=0.08)
#
#--- save the plot in png format
#
plt.savefig(ofile, format='png', dpi=200)
#--------------------------------------------------------------------
if __name__ == '__main__':
if len(sys.argv) > 1:
tyear = int(float(sys.argv[1]))
else:
tyear = ''
plot_sci_run_trends(tyear)
| [
"[email protected]"
] | |
9abfe1720861b0f602574f7e87a97ac4aca860f8 | 206e2b8a6df8a2a9375fe188d74ffaa287484029 | /poc/simple/publish.py | bc24442105aee93ed71a8f8ffe6afdbe5ea865ab | [] | no_license | chaeplin/watchman | 2af63eaf78f333b0e69e51063cc0cda8131e0e89 | 68e0893883a57bf8d703d7191e66f45407eccb75 | refs/heads/master | 2021-01-22T08:02:50.814892 | 2017-09-07T13:23:50 | 2017-09-07T13:23:50 | 81,871,983 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,914 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import io, os, sys
import time
import socket
import simplejson as json
import psutil
import paho.mqtt.client as mqtt
def get_load_average():
try:
raw_average = os.getloadavg()
load_average = { '1min': raw_average[0], '5min': raw_average[1], '15min': raw_average[2] }
return load_average
except:
return None
def get_cpu_percent():
try:
raw_percent = psutil.cpu_times_percent(interval=1, percpu=False)
cpu_percent = round(100 - raw_percent.idle, 1)
return cpu_percent
except:
return None
def get_virtual_memory():
try:
raw_vmem = psutil.virtual_memory()
vmem_usage = raw_vmem.percent
return vmem_usage
except:
return None
def get_disk_usage():
try:
raw_disk = psutil.disk_usage('/')
disk_usage = raw_disk.percent
return disk_usage
except:
return None
def get_process_list():
try:
process = []
for p in psutil.process_iter():
info = p.as_dict(attrs=["pid", "cmdline", "username", "memory_percent", "cpu_percent"])
info["cmdline"] = " ".join(info["cmdline"]).strip()
if len(info.get('cmdline', None)) > 0:
process.append(info)
return process
except:
return None
def on_connect(client, userdata, flags, rc):
print ("Connected with result code "+str(rc))
if __name__ == "__main__":
ipaddress = '10.10.10.10'
assert (len(ipaddress)) > 0, 'configure private address'
client = mqtt.Client()
client.on_connect = on_connect
client.connect('127.0.0.1', 1883, 10)
client.loop_start()
try:
while True:
epoch = int(time.time())
if epoch % 5 == 0:
hostname = socket.gethostname()
loadavg = get_load_average()
cpu = get_cpu_percent()
vmem = get_virtual_memory()
disk = get_disk_usage()
plist = get_process_list()
report = {
'hostname': hostname,
'ip': ipaddress,
'timestamp': epoch,
'loadavg': loadavg,
'cpu': cpu,
'vmem': vmem,
'disk': disk,
'plist': plist
}
print(json.dumps(report, sort_keys=True, indent=4, separators=(',', ': ')))
client.publish("host/" + ipaddress, json.dumps(report), 0, True)
time.sleep(1)
else:
time.sleep(0.8)
except Exception as e:
print(e.args[0])
sys.exit()
except KeyboardInterrupt:
sys.exit(1)
| [
"[email protected]"
] | |
89f282413cdf61bce21e4c73aa34472fade90f44 | 04c29ca0255220f76d22a66f6075fa0a6813d3ad | /tests/__init__.py | a75c1322763f8ce3daabc30788f71cb0e4973919 | [
"MIT"
] | permissive | williamcanin/recover-grub | 584d7e6dc36e81cf164ca8e75e2452ffc2eeacbf | b22724cdd1b62c815431e6502bd1abc144bfcc69 | refs/heads/master | 2021-01-19T02:25:35.481650 | 2020-02-21T21:44:35 | 2020-02-21T21:44:35 | 47,598,478 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 45 | py | #!/usr/bin/env python
__tempdir__ = '/tmp'
| [
"[email protected]"
] | |
5bb9f92064b542e3bf928e60a8cbbd04e449d861 | 66d7292253d2815ce80fa2abd09b898117426b81 | /tests/conftest.py | 47b87ddc5599cc0828724b9be3f28ddb4959da63 | [
"MIT"
] | permissive | ubergesundheit/brightsky | 5801b13aa61e1f8cf2b3b1708858f932f353b3d2 | e8aec199dd2ade8ed520de8d0602db604cf0647e | refs/heads/master | 2021-05-19T10:13:03.706968 | 2020-03-31T15:01:53 | 2020-03-31T15:01:53 | 251,646,442 | 0 | 0 | MIT | 2020-03-31T15:28:55 | 2020-03-31T15:28:55 | null | UTF-8 | Python | false | false | 136 | py | import os
from pathlib import Path
import pytest
@pytest.fixture
def data_dir():
return Path(os.path.dirname(__file__)) / 'data'
| [
"[email protected]"
] | |
659ca6564bbc1040e080989d9f0b099b3a9fa9e2 | 30278f51d61cda6cb2e7dc0d5e8ba71f63092285 | /HELLOPYTHON/day02/myclass_01.py | 7b57b4fdcd3cd92ee5e11193f409aae3733bb347 | [] | no_license | shywj05/HelloPython | a99097ffc177e40ea7469bff5009bf06fe566a35 | 82ec0cf9fd00545ddb3a9c81d4474132d3c24767 | refs/heads/master | 2023-06-27T19:40:22.259916 | 2021-08-03T00:55:29 | 2021-08-03T00:55:29 | 392,139,533 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 626 | py | class Animal:
def __init__(self):
self.age = 1
def getOld(self):
self.age+=1
class Human(Animal):
def __init__(self):
super().__init__()
self.power_lang = 1
def learn_lang(self):
self.power_lang += 1
def pt(self,power):
self.power_lang += power
if __name__ == '__main__':
ani = Animal()
print(ani.age)
ani.getOld()
print(ani.age)
hum = Human()
print(hum.age)
hum.getOld()
print(hum.age)
hum.learn_lang()
print(hum.power_lang)
hum.pt(5)
print(hum.power_lang)
| [
"[email protected]"
] | |
71fd0e69a186260a53278340be66f03f80866095 | 7bdb0e12359162c5dd2bddc58d2ca1d234fb29d2 | /trunk/playground/intern/2009/Pakito/.svn/text-base/pakito.py.svn-base | 2973af6b4b23495fa94e8e661c2ea85598d13029 | [] | no_license | hitaf/Pardus-2011-Svn- | f40776b0bba87d473aac45001c4b946211cbc7bc | 16df30ab9c6ce6c4896826814e34cfeadad1be09 | refs/heads/master | 2021-01-10T19:48:33.836038 | 2012-08-13T22:57:37 | 2012-08-13T22:57:37 | 5,401,998 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 974 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
from kdecore import KApplication, KAboutData, KCmdLineArgs, KGlobal, KIcon
from qt import QObject, SIGNAL, SLOT
from pakito.gui.mainwindow import MainWindow
def I18N_NOOP(x):
return x
name = "Pakito"
version = "0.3"
mail = "[email protected]"
description = I18N_NOOP("A tool for accelerating package making process")
if __name__ == "__main__":
about = KAboutData(name.lower(), name, version, description, KAboutData.License_GPL_V2, "(C) Gökçen Eraslan 2007", None, None, mail)
about.addAuthor("Gökçen Eraslan", None, mail)
KCmdLineArgs.init(sys.argv, about)
app = KApplication()
programLogo = KGlobal.iconLoader().loadIcon("pisikga", KIcon.Desktop)
about.setProgramLogo(programLogo.convertToImage())
QObject.connect(app, SIGNAL("lastWindowClosed()"), app, SLOT("quit()"))
pac = MainWindow(None, name)
app.setMainWidget(pac)
pac.show()
app.exec_loop()
| [
"fatih@dhcppc1.(none)"
] | fatih@dhcppc1.(none) |
|
68e70fbbe907d4bf87455154db59c78968f64354 | bad62c2b0dfad33197db55b44efeec0bab405634 | /sdk/iothub/azure-mgmt-iothub/azure/mgmt/iothub/v2021_07_01/_configuration.py | 16e7beaf5b875bc8a0fafc102999e9b2661a6327 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | test-repo-billy/azure-sdk-for-python | 20c5a2486456e02456de17515704cb064ff19833 | cece86a8548cb5f575e5419864d631673be0a244 | refs/heads/master | 2022-10-25T02:28:39.022559 | 2022-10-18T06:05:46 | 2022-10-18T06:05:46 | 182,325,031 | 0 | 0 | MIT | 2019-07-25T22:28:52 | 2019-04-19T20:59:15 | Python | UTF-8 | Python | false | false | 3,501 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy
from ._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class IotHubClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
"""Configuration for IotHubClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The subscription identifier. Required.
:type subscription_id: str
:keyword api_version: Api Version. Default value is "2021-07-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None:
super(IotHubClientConfiguration, self).__init__(**kwargs)
api_version = kwargs.pop("api_version", "2021-07-01") # type: str
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
self.credential = credential
self.subscription_id = subscription_id
self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-iothub/{}".format(VERSION))
self._configure(**kwargs)
def _configure(
self, **kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = ARMChallengeAuthenticationPolicy(
self.credential, *self.credential_scopes, **kwargs
)
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.