blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
312aa26a98938f328a2eba660cd18cb8d9305fac | 57094f0d09fd3e74eeb511e94400c3ec97051ad3 | /Quax_dev_archive/quax_research/research/findif.py | 837a3116e26a36c17b410aed4ddeca71656b4abe | [] | no_license | adabbott/Research_Notes | cccba246e81065dc4a663703fe225fc1ebbf806b | 644394edff99dc6542e8ae6bd0ce8bcf158cff69 | refs/heads/master | 2023-05-12T20:26:58.938617 | 2021-06-02T17:15:35 | 2021-06-02T17:15:35 | 119,863,228 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 687 | py | # pip install fdm
import numpy as np
import jax.numpy as jnp
import fdm
import jax
# Function:
#f(x,y,z) = exp(1 * x + 2 * y + 3 * z)
def func(vec):
coef = jnp.array([1.,2.,3.])
return jnp.exp(jnp.sum(coef * vec))
findif_gradient = fdm.gradient(func)
jax_gradient = jax.jacfwd(func, 0)
inp1 = np.array([0.1,0.2,0.3])
inp2 = jnp.array([0.1,0.2,0.3])
print(findif_gradient(inp1))
print(jax_gradient(inp2))
findif_hessian = fdm.jacobian(jax_gradient)
jax_hessian = jax.jacfwd(jax_gradient)
print(findif_hessian(inp1))
print(jax_hessian(inp2))
findif_cubic = fdm.jacobian(jax_hessian)
jax_cubic = jax.jacfwd(jax_hessian)
print(findif_cubic(inp1))
print(jax_cubic(inp2))
| [
"[email protected]"
] | |
ba661605c33bc97a360545d8ba5b4e9de8ab3769 | 4ca44b7bdb470fcbbd60c2868706dbd42b1984c9 | /20.02.23/백준 2589.py | e74d174f72d9f3fb56f4e43329178cf1902b773a | [] | no_license | titiman1013/Algorithm | 3b3d14b3e2f0cbc4859029eb73ad959ec8778629 | 8a67e36931c42422779a4c90859b665ee468255b | refs/heads/master | 2023-06-29T17:04:40.015311 | 2021-07-06T01:37:29 | 2021-07-06T01:37:29 | 242,510,483 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,144 | py | # BFS
# N, M = list(map(int, input().split()))
# arr = [list(map(str, list(input()))) for i in range(N)]
# dx = [-1, 1, 0, 0]
# dy = [0, 0, -1, 1]
# for i in range(N):
# for j in range(M):
# clone = []
# for i in range(N):
# clone.append([])
# clone[i] = arr[i][:]
# if clone[i][j] == 'L':
# que = []
# x = i
# y = j
# cnt = 0
# result = 0
# clone[x][y] = 'W'
# que.append((x, y))
# while que:
# x, y = que.pop(0)
# for k in range(4):
# nx = x + dx[k]
# ny = y + dy[k]
# if 0 <= nx < N and 0 <= ny < M:
# if clone[nx][ny] == 'L':
# que.append((nx, ny))
# clone[nx][ny] = 'W'
# cnt += 1
# if cnt > result:
# result = cnt
# print(result)
# N, M = list(map(int, input().split()))
# arr = [list(map(str, list(input()))) for i in range(N)]
# dx = [-1, 1, 0, 0]
# dy = [0, 0, -1, 1]
# final = 0
# for i in range(N):
# for j in range(M):
# if arr[i][j] == 'L':
# que = []
# visit = [[0] * M for i in range(N)]
# x = i
# y = j
# result = 0
# visit[x][y] = 1
# que.append((x, y))
# while que:
# x, y = que.pop(0)
# for k in range(4):
# nx = x + dx[k]
# ny = y + dy[k]
# if 0 <= nx < N and 0 <= ny < M:
# if arr[nx][ny] == 'L' and visit[nx][ny] == 0:
# que.append((nx, ny))
# visit[nx][ny] = visit[x][y] + 1
# if visit[nx][ny] > result:
# result = visit[nx][ny]
# if result > final:
# final = result
# print(final-1)
# collection 사용
import collections
N, M = list(map(int, input().split()))
arr = [list(map(str, list(input()))) for i in range(N)]
dx = [-1, 1, 0, 0]
dy = [0, 0, -1, 1]
final = 0
for i in range(N):
for j in range(M):
if arr[i][j] == 'L':
que = collections.deque()
visit = [[0] * M for i in range(N)]
x = i
y = j
result = 0
visit[x][y] = 1
que.append((x, y))
while que:
x, y = que.popleft()
for k in range(4):
nx = x + dx[k]
ny = y + dy[k]
if 0 <= nx < N and 0 <= ny < M:
if arr[nx][ny] == 'L' and visit[nx][ny] == 0:
que.append((nx, ny))
visit[nx][ny] = visit[x][y] + 1
if visit[nx][ny] > result:
result = visit[nx][ny]
if result > final:
final = result
print(final-1) | [
"[email protected]"
] | |
d93d0dd79f44036af57b9124fc0a49af6d8f58dd | 8553e0b06161d288100750cda973924ee4030e17 | /clustack/blueprint.py | 4dfa4e64602ade14f0636b61a7ba98577f3a14d4 | [] | no_license | mrmh2/clustack | 0b635fec2c459f7a6db724aead84a36fb6bd7254 | 4b1e820e82ef8cbae400b69f156574050e72698a | refs/heads/master | 2021-01-19T03:23:20.122454 | 2015-04-13T12:47:07 | 2015-04-13T12:47:07 | 28,097,203 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,276 | py | import os
import yaml
import settings
def load_blueprint_yaml(filename):
"""Load a blueprint from the given YAML file."""
with open(filename) as f:
yaml_rep = yaml.load(f)
name = yaml_rep['name']
bp = BluePrint(name)
bp.version = yaml_rep['version']
if 'dependencies' in yaml_rep:
bp.direct_dependencies = yaml_rep['dependencies']
return bp
def load_blueprint(filename):
"""Load a blueprint from the given filename. Try to determine the type of
blueprint from the file extension and dispatch to the relevant loader."""
path, ext = os.path.splitext(filename)
try:
loader = LOADERS[ext]
except KeyError:
raise Exception("Don't know how to load blueprint: {}".format(filename))
return loader(filename)
def ext_matches(name, ext_list):
"""Return True if file extension is in the supplied list."""
base, ext = os.path.splitext(name)
return ext in ext_list
def get_basename_and_full_path(name, path):
"""Given name and path, return name with no extension and full path."""
basename = os.path.splitext(name)[0]
full_path = os.path.join(path, name)
return basename, full_path
def get_available_blueprints():
"""Generate list of all available blueprints, in the form of a dictionary of
name : path pairs."""
available = {}
bp_exts = settings.blueprint_exts
bp_path = settings.blueprint_path
for path in bp_path:
matching_files = [f for f in os.listdir(path)
if ext_matches(f, bp_exts)]
md = dict(get_basename_and_full_path(fn, path) for fn in matching_files)
available.update(md)
return available
def load_blueprint_by_name(name):
"""Load a blueprint from a (string) name. Use settings to determine search
path, then choose a loader."""
name = name.lower()
available_blueprints = get_available_blueprints()
if name in available_blueprints:
return load_blueprint(available_blueprints[name])
raise Exception("Can't load blueprint {} by name".format(name))
class BluePrint(object):
"""Class representing instructions for how to build a package. Expected to
be constructed from a file, and will be used to create a Builder to build
the package."""
def __init__(self, name):
self.name = name
self.direct_dependencies = []
self._full_dependencies = None
@property
def full_dependencies(self):
"""All dependencies, both direct and indirect. To avoid calculating
every time, cache the result of the initial tree walk."""
if not self._full_dependencies:
self._full_dependencies = self.find_full_dependencies()
return self._full_dependencies
def find_full_dependencies(self):
"""Find all indirect dependencies, by finding dependencies of
dependencies."""
full_dependencies = []
modules = self.direct_dependencies
while modules:
module = modules.pop()
full_dependencies.append(module)
bp_dep = load_blueprint_by_name(module)
modules += bp_dep.direct_dependencies
return full_dependencies
LOADERS = { '.yaml' : load_blueprint_yaml }
| [
"[email protected]"
] | |
6fdfbeb35e62d8c18a865ee9eff34cc303f77ce4 | e35ffef188a8b72000ae9402b9143b9f35cf95ca | /web3/utils/filters.py | ccdec69050ed01b26cf2d405c2b6f66fe6989572 | [
"MIT"
] | permissive | riccitensor/web3.py | 7dd9215b6d6755340cfc85c37d06cb076b3518de | 73bfb0f7bab1f035cb9c4ce9a8660cb664e61c58 | refs/heads/master | 2020-12-11T09:16:53.374513 | 2016-08-07T01:11:25 | 2016-08-07T01:11:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,915 | py | import re
import random
import gevent
from .types import (
is_string,
is_array,
)
from .abi import (
construct_event_topic_set,
construct_event_data_set,
)
def construct_event_filter_params(event_abi,
contract_address=None,
argument_filters=None,
topics=None,
fromBlock=None,
toBlock=None,
address=None):
filter_params = {}
if topics is None:
topic_set = construct_event_topic_set(event_abi, argument_filters)
else:
topic_set = [topics] + construct_event_topic_set(event_abi, argument_filters)
if len(topic_set) == 1 and is_array(topic_set[0]):
filter_params['topics'] = topic_set[0]
else:
filter_params['topics'] = topic_set
if address and contract_address:
if is_array(address):
filter_params['address'] = address + [contract_address]
elif is_string(address):
filter_params['address'] = [address, contract_address]
else:
raise ValueError(
"Unsupported type for `address` parameter: {0}".format(type(address))
)
elif address:
filter_params['address'] = address
elif contract_address:
filter_params['address'] = contract_address
if fromBlock is not None:
filter_params['fromBlock'] = fromBlock
if toBlock is not None:
filter_params['toBlock'] = toBlock
data_filters_set = construct_event_data_set(event_abi, argument_filters)
return data_filters_set, filter_params
class BaseFilter(gevent.Greenlet):
callbacks = None
running = None
stopped = False
def __init__(self, web3, filter_id):
self.web3 = web3
self.filter_id = filter_id
self.callbacks = []
gevent.Greenlet.__init__(self)
def __str__(self):
return "Filter for {0}".format(self.filter_id)
def _run(self):
if self.stopped:
raise ValueError("Cannot restart a Filter")
self.running = True
self.rejected_logs = []
previous_logs = self.web3.eth.getFilterLogs(self.filter_id)
if previous_logs:
for entry in previous_logs:
for callback_fn in self.callbacks:
if self.is_valid_entry(entry):
callback_fn(entry)
else:
self.rejected_logs.append(entry)
while self.running:
changes = self.web3.eth.getFilterChanges(self.filter_id)
if changes:
for entry in changes:
for callback_fn in self.callbacks:
if self.is_valid_entry(entry):
callback_fn(entry)
else:
self.rejected_logs.append(entry)
gevent.sleep(random.random())
def is_valid_entry(self, entry):
"""
Hook for subclasses to implement additional filtering layers.
"""
return True
def watch(self, *callbacks):
if self.stopped:
raise ValueError("Cannot watch on a filter that has been stopped")
self.callbacks.extend(callbacks)
if not self.running:
self.start()
def stop_watching(self, timeout=0):
self.running = False
self.stopped = True
self.web3.eth.uninstallFilter(self.filter_id)
self.join(timeout)
stopWatching = stop_watching
class BlockFilter(BaseFilter):
pass
class TransactionFilter(BaseFilter):
pass
ZERO_32BYTES = '[a-f0-9]{64}'
def construct_data_filter_regex(data_filter_set):
return re.compile((
'^' +
'|'.join((
'0x' + ''.join(
(ZERO_32BYTES if v is None else v[2:] for v in data_filter)
)
for data_filter in data_filter_set
)) +
'$'
))
class LogFilter(BaseFilter):
data_filter_set = None
data_filter_set_regex = None
def get(self, only_changes=True):
if self.running:
raise ValueError(
"Cannot call `get` on a filter object which is actively watching"
)
if only_changes:
return self.web3.eth.getFilterChanges(self.filter_id)
else:
return self.web3.eth.getFilterChanges(self.filter_id)
def set_data_filters(self, data_filter_set):
self.data_filter_set = data_filter_set
if any(data_filter_set):
self.data_filter_set_regex = construct_data_filter_regex(
data_filter_set,
)
def is_valid_entry(self, entry):
if not self.data_filter_set_regex:
return True
return bool(self.data_filter_set_regex.match(entry['data']))
| [
"[email protected]"
] | |
f054090518610eef7e91a188cf6eddfc36451fae | 92782460989bc10540c85804215832b465a6474a | /spell/__main__.py | 45b2f2907893f3d1ded09b0872d5dd381c92b1d3 | [
"GPL-1.0-or-later",
"GPL-3.0-only",
"MIT"
] | permissive | malaikannan/open-tamil | 805ccd2b9bd88df23127425c7bcebaf97f337352 | 99f0c4484cd07a98cf77146c754592e5c26f6965 | refs/heads/master | 2022-07-19T12:35:36.956062 | 2020-05-21T07:41:06 | 2020-05-21T07:41:06 | 266,903,297 | 2 | 0 | MIT | 2020-05-25T23:54:56 | 2020-05-25T23:54:56 | null | UTF-8 | Python | false | false | 158 | py | #!/usr/bin/python
#(C) 2016 Muthiah Annamalai
#This file is part of open-tamil package
from .spell import main
if __name__ == u"__main__":
main()
| [
"[email protected]"
] | |
78ec746175809e03465ab3c1d327ae68cb431ec3 | ed17ca55fedd70cc336271e40634b09791437db5 | /products/migrations/0002_auto_20201204_0247.py | 2b725f84fddbe8f837b129604041dbd116466796 | [] | no_license | wecode-bootcamp-korea/14-2nd-MyLittleTrip-backend | a5c8beb0a30ed7ef92fba04499036d4ccfbe2add | d9720f51a30028a866fd62d6c66a1a7e4525ce5f | refs/heads/master | 2023-01-24T13:40:18.601842 | 2020-12-09T19:03:35 | 2020-12-11T03:49:04 | 317,109,595 | 0 | 3 | null | 2020-12-11T03:49:05 | 2020-11-30T04:21:54 | Python | UTF-8 | Python | false | false | 381 | py | # Generated by Django 3.1.4 on 2020-12-04 02:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='region',
name='region_code',
field=models.CharField(max_length=30),
),
]
| [
"[email protected]"
] | |
fee4e1d37330f42b34263f994d91a021098befbe | 81537f77ddfa3820e7a224173b8249af2d665987 | /microcosm_flask/paging.py | dd014de3020195af3815195585789346b2ef9e63 | [
"Apache-2.0"
] | permissive | pokey/microcosm-flask | efa0ae6c39f0ecf85c79f3d2ef607af8bad80635 | 77dd32f4c670c1a6f2ac70dd37d2a3f05d118707 | refs/heads/develop | 2021-01-13T14:58:48.889868 | 2017-01-10T22:55:17 | 2017-01-10T22:55:17 | 79,353,026 | 0 | 0 | null | 2017-01-18T15:23:13 | 2017-01-18T15:23:13 | null | UTF-8 | Python | false | false | 3,576 | py | """
Pagination support.
"""
from marshmallow import fields, Schema
from microcosm_flask.linking import Link, Links
from microcosm_flask.operations import Operation
class PageSchema(Schema):
offset = fields.Integer(missing=0, default=0)
limit = fields.Integer(missing=20, limit=20)
def make_paginated_list_schema(ns, item_schema):
"""
Generate a paginated list schema.
:param ns: a `Namespace` for the list's item type
:param item_schema: a `Schema` for the list's item type
"""
class PaginatedListSchema(Schema):
__alias__ = "{}_list".format(ns.subject_name)
offset = fields.Integer(required=True)
limit = fields.Integer(required=True)
count = fields.Integer(required=True)
items = fields.List(fields.Nested(item_schema), required=True)
_links = fields.Raw()
return PaginatedListSchema
class Page(object):
def __init__(self, offset, limit, **rest):
self.offset = offset
self.limit = limit
self.rest = rest
@classmethod
def from_query_string(cls, qs):
"""
Create a page from a query string dictionary.
This dictionary should probably come from `PageSchema.from_request()`.
"""
dct = qs.copy()
offset = dct.pop("offset", None)
limit = dct.pop("limit", None)
return cls(
offset=offset,
limit=limit,
**dct
)
def next(self):
return Page(
offset=self.offset + self.limit,
limit=self.limit,
**self.rest
)
def prev(self):
return Page(
offset=self.offset - self.limit,
limit=self.limit,
**self.rest
)
def to_dict(self):
return dict(self.to_tuples())
def to_tuples(self):
"""
Convert to tuples for deterministic order when passed to urlencode.
"""
return [
("offset", self.offset),
("limit", self.limit),
] + [
(key, str(self.rest[key]))
for key in sorted(self.rest.keys())
]
class PaginatedList(object):
def __init__(self,
ns,
page,
items,
count,
schema=None,
operation=Operation.Search,
**extra):
self.ns = ns
self.page = page
self.items = items
self.count = count
self.schema = schema
self.operation = operation
self.extra = extra
def to_dict(self):
return dict(
count=self.count,
items=[
self.schema.dump(item).data if self.schema else item
for item in self.items
],
_links=self._links,
**self.page.to_dict()
)
@property
def offset(self):
return self.page.offset
@property
def limit(self):
return self.page.limit
@property
def _links(self):
return self.links.to_dict()
@property
def links(self):
links = Links()
links["self"] = Link.for_(self.operation, self.ns, qs=self.page.to_tuples(), **self.extra)
if self.page.offset + self.page.limit < self.count:
links["next"] = Link.for_(self.operation, self.ns, qs=self.page.next().to_tuples(), **self.extra)
if self.page.offset > 0:
links["prev"] = Link.for_(self.operation, self.ns, qs=self.page.prev().to_tuples(), **self.extra)
return links
| [
"[email protected]"
] | |
a58533d3e0ece739374952851246a130ea035d13 | 8f6aa9ac9c8c2e409875bbf36fbc49b3eb37d88b | /enthought/traits/ui/qt4/toolkit.py | 95a3bc996def22c143270a1727135aa6b5b892ba | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | enthought/etsproxy | 5660cf562c810db2ceb6b592b6c12274bce96d73 | 4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347 | refs/heads/master | 2023-03-27T04:51:29.297305 | 2020-12-02T09:05:18 | 2020-12-02T09:05:18 | 1,632,969 | 3 | 1 | NOASSERTION | 2020-12-02T09:05:20 | 2011-04-18T22:29:56 | Python | UTF-8 | Python | false | false | 50 | py | # proxy module
from traitsui.qt4.toolkit import *
| [
"[email protected]"
] | |
bdc4b50832daa23779baf1ddce94fb0fda27d228 | 8c3755e907a8f7fbae4e5e3334aa9332f8f705bb | /python_example_scripts/stack_lifo.py | d367791a5ad2ff1345144dca57c6111f3d7a842d | [] | no_license | xaneon/PythonProgrammingBasics | 20c9db82f621a41735856a0b008bf2c328d8e4b5 | accf4d16034d33e616b5ebe46f69c1130b09f85e | refs/heads/master | 2020-06-13T13:47:02.995326 | 2019-07-01T13:45:29 | 2019-07-01T13:45:29 | 194,235,103 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 800 | py | import random
import os
import time
def print_gegenstande(gegenstande):
os.system("clear")
for i, gegenstand in enumerate(gegenstande):
print(str(len(gegenstande) - i) + ". " + gegenstand)
def eliminate_fifo(gegenstande):
if len(gegenstande) == 0:
print("Kein Gegenstand mehr uebrig.")
else:
out = gegenstande.pop(0)
print_gegenstande(gegenstande)
print("%s herausgenommen." %out)
time.sleep(2)
eliminate_fifo(gegenstande)
print("Simulation einer Schlange, FIFO")
anzahl_zufallszahlen = 8
groesste_zahl = 10
zufallszahlen = random.sample(range(1, groesste_zahl), anzahl_zufallszahlen)
gegenstande = ['Gegenstand' + str(int(x)) for x in zufallszahlen]
print_gegenstande(gegenstande)
time.sleep(5)
eliminate_fifo(gegenstande)
| [
"[email protected]"
] | |
b6fa1efaa794a7a60877ebf4753cc19a49117b67 | a859aadea24af173a175c2d01910314487ec6fbf | /common/coco_caption/pycocoevalcap/eval.py | 1947f13ea55f7a0df5153ed2c5e8c352ae50a0fe | [
"BSD-3-Clause",
"BSD-2-Clause-Views"
] | permissive | jiahuei/tf-sparse-captioning | cc52cbef5590b47727ea89f265011c9ab58aebad | 9d7b8ecdd44fb1541500ca4f920d6c94fd15bad1 | refs/heads/main | 2023-04-07T05:27:28.395758 | 2021-04-19T11:27:28 | 2021-04-19T11:27:28 | 359,341,665 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,928 | py | __author__ = 'tylin'
from common.coco_caption.pycocoevalcap.tokenizer.ptbtokenizer import PTBTokenizer
from common.coco_caption.pycocoevalcap.bleu.bleu import Bleu
from common.coco_caption.pycocoevalcap.meteor.meteor import Meteor
from common.coco_caption.pycocoevalcap.rouge.rouge import Rouge
from common.coco_caption.pycocoevalcap.cider.cider import Cider
from common.coco_caption.pycocoevalcap.spice.spice import Spice
class COCOEvalCap:
def __init__(self, coco, cocoRes):
self.evalImgs = []
self.eval = {}
self.imgToEval = {}
self.coco = coco
self.cocoRes = cocoRes
self.params = {'image_id': coco.getImgIds()}
def evaluate(self):
imgIds = self.params['image_id']
# imgIds = self.coco.getImgIds()
gts = {}
res = {}
for imgId in imgIds:
gts[imgId] = self.coco.imgToAnns[imgId]
res[imgId] = self.cocoRes.imgToAnns[imgId]
# =================================================
# Set up scorers
# =================================================
print('tokenization...')
tokenizer = PTBTokenizer()
gts = tokenizer.tokenize(gts)
res = tokenizer.tokenize(res)
# =================================================
# Set up scorers
# =================================================
print('setting up scorers...')
scorers = [
(Bleu(4), ["Bleu_1", "Bleu_2", "Bleu_3", "Bleu_4"]),
(Meteor(), "METEOR"),
(Rouge(), "ROUGE_L"),
(Cider(), "CIDEr"),
(Spice(), "SPICE")
]
# =================================================
# Compute scores
# =================================================
for scorer, method in scorers:
print('computing %s score...' % (scorer.method()))
score, scores = scorer.compute_score(gts, res)
if type(method) == list:
for sc, scs, m in zip(score, scores, method):
self.setEval(sc, m)
self.setImgToEvalImgs(scs, gts.keys(), m)
print("%s: %0.3f" % (m, sc))
else:
self.setEval(score, method)
self.setImgToEvalImgs(scores, gts.keys(), method)
print("%s: %0.3f" % (method, score))
self.setEvalImgs()
def setEval(self, score, method):
self.eval[method] = score
def setImgToEvalImgs(self, scores, imgIds, method):
for imgId, score in zip(imgIds, scores):
if not imgId in self.imgToEval:
self.imgToEval[imgId] = {}
self.imgToEval[imgId]["image_id"] = imgId
self.imgToEval[imgId][method] = score
def setEvalImgs(self):
self.evalImgs = [eval for imgId, eval in self.imgToEval.items()]
| [
"[email protected]"
] | |
9391b31cd13597c6fe1434a51e63e7aec430beb3 | 3647a797d42fd51b4290f9bb3c56b80cebd1c17f | /lesson215.py | 83d846bf66e88a1f14a6e902be2e796be4870611 | [] | no_license | ipcoo43/algorithm | 1ff8193b0adc92056fbbb7cafcbf76afa6dc7543 | 7088e6a6f71f6c32feab0806cb9337a8c45755c3 | refs/heads/master | 2020-05-17T21:15:37.671199 | 2019-05-03T03:04:48 | 2019-05-03T03:04:48 | 183,966,114 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 110 | py | import glob2
print(list(filter(lambda x:'LIFE IS TOO SHORT' in open(x).read(), glob2.glob('./data/t*.txt') ))) | [
"[email protected]"
] | |
224e7ac3dff3f24f45821a832a7a16546a31d23f | fd60b97a4a506bbff92709c38136fda8e4a03274 | /Ben's Projects/Archived Projects/Self-Driving Buggy Rev. 2/Camera.py | af6d4f57c28feb56bfca0a855e95252d97f4b525 | [] | no_license | BPPJH/Self-Driving-Buggy | 3727795384f3a5d530c9bc0ab281a09dafa6748f | 7268b9e6bd6274f0520c7d57cda3af250316d913 | refs/heads/master | 2020-12-29T03:34:24.048157 | 2015-08-12T01:48:03 | 2015-08-12T01:48:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,905 | py | import cv2
import numpy
import copy
import time
import os
projectDir = os.path.dirname(os.path.realpath(__file__))
class Camera(object):
excludedSources = set()
increaseFPSActions = []
resolutions = {
"logitech": {
8: (1920, 1080),
12: (1440, 810),
23: (960, 540),
30: (480, 270),
31: (240, 135),
"default": 30
},
"ELP": {
10: (2048, 1536),
12: (1600, 1200),
30: (1280, 720),
31: (640, 480),
32: (320, 240),
"default": 30
},
"ELP180": {
6: (1920, 1080),
9: (1280, 720),
21: (800, 600),
30: (640, 480),
31: (352, 288),
32: (320, 240),
"default": 30
}
}
macKeyCodes = {
63232: "up",
63233: "down",
63234: "left",
63235: "right",
27: "esc",
13: "enter"
}
def __init__(self, windowName=None, camSource=None, width=None, height=None, sizeByFPS=None, cameraType=None,
crop=(None,) * 4, autoSelect=False):
time1 = time.time()
self.windowName = windowName
self.camSource = camSource
if self.windowName is None:
self.enableDraw = False
else:
self.enableDraw = True
self.sizeByFPS = sizeByFPS
self.isRunning = True
self.analysisApplied = False
self.cameraType = cameraType
self.cameraFPS = None
self.trackbarName = "Frame"
self.crop = list(crop)
if cameraType is not None:
self.resolutions = Camera.resolutions[cameraType]
else:
self.resolutions = Camera.resolutions["logitech"]
if windowName is not None:
cv2.namedWindow(windowName)
if camSource is not None:
if type(camSource) == str:
self.loadVideo(camSource)
else:
self.loadCamera(camSource)
else:
if autoSelect is True:
capture = self.searchForCamera()
else:
capture = self.cameraSelector()
self.loadCamera(capture)
if width is not None and height is not None:
self.width, self.height = width, height
elif self.sizeByFPS is not None:
if (self.sizeByFPS in self.resolutions.keys()) is False:
self.sizeByFPS = self.findClosestRes(sizeByFPS)
self.width, self.height = self.resolutions[self.sizeByFPS]
else:
if type(self.camSource) == int:
self.sizeByFPS = self.resolutions["default"]
self.width, self.height = self.resolutions[self.sizeByFPS]
else:
self.width, self.height = self.camera.get(cv2.CAP_PROP_FRAME_WIDTH), self.camera.get(
cv2.CAP_PROP_FRAME_HEIGHT)
if type(self.camSource) == int:
Camera.increaseFPSActions.append(self.increaseFPS)
if len(Camera.increaseFPSActions) >= 2:
for increaseFPS in Camera.increaseFPSActions:
increaseFPS()
Camera.increaseFPSActions = []
self.camera.set(cv2.CAP_PROP_FRAME_WIDTH, self.width)
self.camera.set(cv2.CAP_PROP_FRAME_HEIGHT, self.height)
if self.crop[0] is None:
self.crop[0] = 0
if self.crop[1] is None:
self.crop[1] = 0
if self.crop[2] is None:
self.crop[2] = self.width
if self.crop[3] is None:
self.crop[3] = self.height
time2 = time.time()
print str(self.camSource) + " loaded in " + str(time2 - time1) + " seconds. Capture size is " + \
str(int(self.width)) + "x" + str(int(self.height))
def cameraSelector(self):
shape = None
windowName = "camera #"
capture = 0
def updateCapture(windowName, video, capture, delta=None, newCapture=None):
print str(capture) + " ---> ",
cv2.destroyWindow(windowName + str(capture))
if delta is not None:
capture += delta
elif newCapture is not None:
capture = newCapture
print capture
video = cv2.VideoCapture(capture)
video.set(cv2.CAP_PROP_FRAME_WIDTH, 720)
video.set(cv2.CAP_PROP_FRAME_HEIGHT, 450)
return video, capture
video = cv2.VideoCapture(capture)
video.set(cv2.CAP_PROP_FRAME_WIDTH, 720)
video.set(cv2.CAP_PROP_FRAME_HEIGHT, 450)
while True:
key = self.getPressedKey(5)
if key == "left":
video, capture = updateCapture(windowName, video, capture, delta=-1)
elif key == "right":
video, capture = updateCapture(windowName, video, capture, delta=+1)
elif type(key) == str and key.isdigit():
video, capture = updateCapture(windowName, video, capture, newCapture=int(key))
elif key == "enter":
cv2.destroyWindow(windowName + str(capture))
return capture
elif key == 'q' or key == "esc":
quit()
success, frame = video.read()
if success is True and frame is not None:
cv2.imshow(windowName + str(capture), frame)
shape = frame.shape
else:
cv2.imshow(windowName + str(capture), numpy.zeros(shape))
def searchForCamera(self):
success, frame = False, None
capture = 0
while True:
temp = cv2.VideoCapture(capture)
success, frame = temp.read()
if (success is True or capture > 10) and (capture in Camera.excludedSources) == False:
break
capture += 1
if success is False:
capture = -1
while True:
temp = cv2.VideoCapture(capture)
success, frame = temp.read()
if (success is True or capture > 10) and (capture in Camera.excludedSources) == False:
break
capture -= 1
if success is False:
raise Exception("Camera could not be found")
return capture
def loadCamera(self, capture):
print "loading camera " + str(capture) + " into window named '" + str(self.windowName) + "'..."
self.camera = cv2.VideoCapture(capture)
self.camSource = capture
Camera.excludedSources.add(capture)
def loadVideo(self, camSource):
print "loading video into window named '" + str(self.windowName) + "'..."
self.camera = cv2.VideoCapture(projectDir + "/videos/" + camSource)
print "video loaded!"
self.cameraFPS = self.camera.get(cv2.CAP_PROP_FPS)
cv2.createTrackbar(self.trackbarName, self.windowName, 0,
int(self.camera.get(cv2.CAP_PROP_FRAME_COUNT)), self.onSlider)
def findClosestRes(self, sizeByFPS):
possibleFPSs = numpy.array(self.resolutions.keys())
minuend = copy.copy(possibleFPSs)
minuend.fill(sizeByFPS)
difference = possibleFPSs - minuend
difference = numpy.absolute(difference)
minimum = numpy.min(difference)
index = numpy.where(difference == minimum)[0][0]
return possibleFPSs[index]
def stopCamera(self):
self.isRunning = False
self.camera.release()
cv2.destroyWindow(self.windowName)
def setFrame(self, frameNumber):
if frameNumber >= self.camera.get(cv2.CAP_PROP_FRAME_COUNT):
frameNumber = 0
if type(self.camSource) == str and frameNumber >= 0:
self.camera.set(cv2.CAP_PROP_POS_FRAMES, frameNumber)
def incrementFrame(self):
self.setFrame(self.camera.get(cv2.CAP_PROP_POS_FRAMES) + 1)
def decrementFrame(self):
self.setFrame(self.camera.get(
cv2.CAP_PROP_POS_FRAMES) - 1.8) # huh??? Refuses to go back otherwise. frames numbers aren't integers?!
# cv2.CAP_PROP_POS_FRAMES) - 1)
def saveFrame(self, frame):
cv2.imwrite("images/" + time.strftime("%c").replace(":", "_") + ".png", frame)
def increaseFPS(self):
possibleFPSs = sorted(self.resolutions.keys())
index = possibleFPSs.index(self.sizeByFPS)
if (index + 1) < len(possibleFPSs):
index += 1
self.sizeByFPS = possibleFPSs[index]
self.width, self.height = self.resolutions[self.sizeByFPS]
self.setCameraSize(self.width, self.height)
def setCameraSize(self, width=None, height=None):
if self.width != None:
self.camera.set(cv2.CAP_PROP_FRAME_WIDTH, width)
self.width = width
if self.height != None:
self.camera.set(cv2.CAP_PROP_FRAME_HEIGHT, height)
self.height = height
def onSlider(self, frameIndex):
if frameIndex != self.currentFrameNumber():
self.camera.set(cv2.CAP_PROP_POS_FRAMES, frameIndex)
self.showFrame(self.updateFrame(False))
def getPressedKey(self, delay=1):
key = cv2.waitKey(delay)
if key in Camera.macKeyCodes:
return Camera.macKeyCodes[key]
elif key > -1:
return chr(key)
else:
return key
@staticmethod
def delay(delay):
cv2.waitKey(delay)
def showFrame(self, frame):
cv2.imshow(self.windowName, frame)
def currentFrameNumber(self):
return self.camera.get(cv2.CAP_PROP_POS_FRAMES)
def getVideoFPS(self):
return self.camera.get(cv2.CAP_PROP_FPS)
def updateFrame(self, readNextFrame=True):
if self.isRunning is False:
self.stopCamera()
return
if readNextFrame is False:
self.decrementFrame()
success, frame = self.camera.read()
if success is False or frame is None:
if type(self.camSource) == int:
raise Exception("Failed to read from camera!")
else:
self.setFrame(0)
success, frame = self.camera.read()
if frame.shape[0:2] != (self.height, self.width):
frame = cv2.resize(frame, (self.width, self.height), interpolation=cv2.INTER_NEAREST)
if self.crop is not None:
x0, y0, x1, y1 = self.crop
frame = frame[y0:y1, x0:x1]
if readNextFrame is True:
if type(self.camSource) == str:
cv2.setTrackbarPos(self.trackbarName, self.windowName,
int(self.camera.get(cv2.CAP_PROP_POS_FRAMES)))
return frame | [
"[email protected]"
] | |
3f9176bfbc4b64d2b60c858a1ea0840683339b80 | c84a3895e6fdcaff5a9f97abe9c3efbecbad535f | /strategy/indicator/stochastic/stochastic.py | 70af686788a789545bdd38b4abffeef8074a6528 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | cal97g/siis | 5a171eb34dd3f7ae6e19d8065ff1e2f8b6251319 | adc06e48e5df6ffd7bed6ee6b79d0aa3cfe80e0d | refs/heads/master | 2020-07-23T18:11:57.267225 | 2019-09-05T01:00:37 | 2019-09-05T01:00:37 | 207,663,001 | 0 | 1 | null | 2019-09-10T21:05:25 | 2019-09-10T21:05:25 | null | UTF-8 | Python | false | false | 4,547 | py | # @date 2018-09-02
# @author Frederic SCHERMA
# @author Xavier BONNIN
# @license Copyright (c) 2018 Dream Overflow
# Stochastique indicator
from strategy.indicator.indicator import Indicator
from strategy.indicator.utils import down_sample, MMexp_n, MM_n
import numpy as np
from talib import STOCH as ta_STOCH, STOCHF as to_STOCHF
class StochasticIndicator(Indicator):
"""
Stochastique indicator.
https://www.fidelity.com/learning-center/trading-investing/technical-analysis/technical-indicator-guide/slow-stochastic
"""
__slots__ = '_len_K', '_len_D', '_prev_k', '_last_k', '_prev_d', '_last_d', '_ks', '_ds'
@classmethod
def indicator_type(cls):
return Indicator.TYPE_MOMENTUM
@classmethod
def indicator_class(cls):
return Indicator.CLS_OSCILLATOR
def __init__(self, timeframe, len_K=9, len_D=3):
super().__init__("stochastic", timeframe)
self._len_K = len_K # periods number for the K
self._len_D = len_D # periods number for the D
self._prev_k = 0.0
self._last_k = 0.0
self._prev_d = 0.0
self._last_d = 0.0
self._ks = np.array([])
self._ds = np.array([])
@property
def length(self):
return self._length
@length.setter
def length(self, length):
self._length = length
@property
def prev_k(self):
return self._prev_k
@property
def last_k(self):
return self._last_k
@property
def prev_d(self):
return self._prev_d
@property
def last_d(self):
return self._last_d
@property
def len_K(self):
return self._len_K
@len_K.setter
def len_K(self, len_K):
self._len_K = len_K
@property
def len_D(self):
return self._len_D
@len_D.setter
def len_D(self, len_D):
self._len_D = len_D
@property
def ks(self):
return self._ks
@property
def ds(self):
return self._ds
def cross(self):
if (self._prev_k > self._prev_d and self._last_k < self._last_d):
return -1
elif (self._prev_k < self._prev_d and self._last_k > self._prev_d):
return 1
return 0
@staticmethod
def Stochastic(N, data, N_D=3):
K = np.zeros(len(data))
for (j,d) in enumerate(data):
i=min(j,N)
highest = max(data[j-i:j+1])
lowest = min(data[j-i:j+1])
if highest == lowest:
highest += 0.000000001
K[j]=(d-lowest)/(highest-lowest) # +epsilon to avoid 0
D = MM_n(N_D, K)
return (K, D)
@staticmethod
def Stochastic_sf(N, data, N_D=3, step=1, filtering=False):
"""
Calcul des stochastiques.
N est le nombre de periodes a observer pour repérer le min et le max du cours.
N_D est le nombre d'echantillons de K a utiliser pour le calcul de D
step permet de ne selectionner qu'un echantillon sur step dans data.
filtering permet de filtrer ou non les donnees avant d'appliquer la selection.
Retourne les stochastiques K, D interpolees lineairement ; meme taille que data.
"""
sub_data = down_sample(data, step) if filtering else data [::step]
K = np.zeros(len(sub_data))
t_subdata = range(0,len(data),step)
for (j,d) in enumerate(sub_data):
i=min(j,N)
highest = max(sub_data[j-i:j+1])
lowest = min(sub_data[j-i:j+1])
if highest == lowest:
highest += 0.000000001
K[j]=(d-lowest)/(highest-lowest) # +epsilon to avoid 0
D = MM_n(N_D, K)
return np.interp(range(len(data)), t_subdata, K), np.interp(range(len(data)), t_subdata, D)
def compute(self, timestamp, high, low, close):
self._prev_k = self._last_k
self._prev_d = self._last_d
# k, d = StochasticIndicator.Stochastic_sf(self._len_K, close, self._len_D) # , self._step, self._filtering)
# k, d = ta_STOCH(high, low, close, fastk_period=self._len_K, slowk_period=3, slowk_matype=0, slowd_period=3, slowd_matype=0)
self._ks, self._ds = to_STOCHF(high, low, close, fastk_period=self._len_K, fastd_period=self._len_D, fastd_matype=0)
self._last_k = self._ks[-1]
self._last_d = self._ds[-1]
self._last_timestamp = timestamp
return self._ks, self._ds
def trace(self):
return tuple(self._last_k, self._last_d)
| [
"[email protected]"
] | |
90f3fd457bb6be4986e3cb9a139ce1bb796a0b93 | 260133e46c0c88fd20f2ed18309c5f46508b7fb9 | /opengever/core/upgrades/20180427113547_remove_repository_favorites_registry/upgrade.py | d35786bcd9e9437d14564de8a7f13537174a91fb | [] | no_license | robertmuehsig/opengever.core | 4180fbea1436fade9b33232a293b0d43ebfc6c51 | 63b3747793d5b824c56eb3659987bb361d25d8d8 | refs/heads/master | 2020-09-08T14:55:00.340222 | 2019-11-08T10:16:02 | 2019-11-08T10:16:02 | 221,163,734 | 0 | 0 | null | 2019-11-12T08:08:59 | 2019-11-12T08:08:54 | null | UTF-8 | Python | false | false | 209 | py | from ftw.upgrade import UpgradeStep
class RemoveRepositoryFavoritesRegistry(UpgradeStep):
"""Remove repository favorites registry.
"""
def __call__(self):
self.install_upgrade_profile()
| [
"[email protected]"
] | |
70b9ff857f052de1494af6f15324f7f6a36b5a94 | 3b504a983f1807ae7c5af51078bfab8c187fc82d | /client/input/__init__.py | 073192a4d4a4bb57b86582751ad2f0503f45db8e | [] | no_license | SEA-group/wowp_scripts | 7d35fd213db95ea6b3dbd1ec6d3e0f13de86ba58 | 2fe54a44df34f2dcaa6860a23b835dcd8dd21402 | refs/heads/master | 2021-09-07T23:10:13.706605 | 2018-03-02T17:23:48 | 2018-03-02T17:23:48 | 117,280,141 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 59 | py | # Embedded file name: scripts/client/input/__init__.py
pass | [
"[email protected]"
] | |
ab0ea4f90f5163784652dfb3afb83b9c095b376c | 76baf5ff0e8717ab6a17575bc9c126e7eb4e2f28 | /docker/shummie/shummiev58.py | 9a7880440e45168b937a2c6f6d116ae5eb46d44a | [] | no_license | nmalaguti/halite-bots | f0e8caf3e9898884e6476ff5d36884a2f4e07309 | 82efde39f83b5c34719d31396c7659d2c2f18c03 | refs/heads/master | 2023-07-15T01:20:00.784928 | 2023-06-29T02:49:35 | 2023-06-29T02:49:35 | 73,758,433 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 63,668 | py | # ==============================================================================
# Imports
# ==============================================================================
import functools
import itertools
import logging
import math
import numpy as np
import random
import scipy.sparse
import sys
import time
import copy
# ==============================================================================
# Variables
# ==============================================================================
botname = "shummie v58"
strength_buffer = 0
print_maps = False
def print_map(npmap, name):
directory = "Maps/"
if print_maps:
np.savetxt(directory + name + str(game.frame) + ".txt", npmap)
# ==============================================================================
# Game Class
# ==============================================================================
class Game:
def __init__(self):
# This should only be called once, and at the beginning of the game
self.my_id = int(get_string())
map_size_string = get_string()
self.width, self.height = tuple(map(int, map_size_string.split()))
production_map_string = get_string()
self.production_map = np.array(list(map(int, production_map_string.split()))).reshape((self.height, self.width)).transpose()
self.create_squares_list()
self.frame = -1
self.phase = 0
self.get_frame()
self.starting_player_count = np.amax(self.owner_map) # Note, for range you'd need to increase the range by 1
# Create the distance map
self.create_one_time_maps()
self.max_turns = 10 * ((self.width * self.height) ** 0.5)
self.set_configs()
# Send the botname
send_string(botname)
def __iter__(self):
# Allows direct iteration over all squares
return itertools.chain.from_iterable(self.squares)
def get_frame(self, map_string=None):
# Updates the map information from the latest frame provided by the game environment
if map_string is None:
map_string = get_string()
split_string = map_string.split()
# The state of the map (including owner and strength values, but excluding production values) is sent in the following way:
# One integer, COUNTER, representing the number of tiles with the same owner consecutively.
# One integer, OWNER, representing the owner of the tiles COUNTER encodes.
# The above repeats until the COUNTER total is equal to the area of the map.
# It fills in the map from row 1 to row HEIGHT and within a row from column 1 to column WIDTH.
# Please be aware that the top row is the first row, as Halite uses screen-type coordinates.
owners = list()
while len(owners) < self.width * self.height:
counter = int(split_string.pop(0))
owner = int(split_string.pop(0))
owners.extend([owner] * counter)
assert len(owners) == self.width * self.height
self.owner_map = np.array(owners).reshape((self.height, self.width)).transpose()
# This is then followed by WIDTH * HEIGHT integers, representing the strength values of the tiles in the map.
# It fills in the map in the same way owner values fill in the map.
assert len(split_string) == self.width * self.height
str_list = list(map(int, split_string))
self.strength_map = np.array(str_list).reshape((self.height, self.width)).transpose()
# Update all squares
for x in range(self.width):
for y in range(self.height):
self.squares[x, y].update(self.owner_map[x, y], self.strength_map[x, y])
# Reset the move_map
self.move_map = np.ones((self.width, self.height)) * -1 # Could possibly expand this in the future to consider enemy moves...
self.frame += 1
def send_frame(self):
# Goes through each square and get the list of moves.
move_list = []
for sq in itertools.chain.from_iterable(self.squares):
if sq.owner == self.my_id:
if sq.strength == 0: # Squares with 0 strength shouldn't move.
sq.move = 4
if sq.move == -1:
# In the event we didn't actually assign a move, make sure it's coded to STILL
sq.move = 4
move_list.append(sq)
send_string(' '.join(str(square.x) + ' ' + str(square.y) + ' ' + str(translate_cardinal(square.move)) for square in move_list))
def create_squares_list(self):
self.squares = np.empty((self.width, self.height), dtype=np.object)
for x in range(self.width):
for y in range(self.height):
self.squares[x, y] = Square(self, x, y, self.production_map[x, y])
for x in range(self.width):
for y in range(self.height):
self.squares[x, y].after_init_update()
def create_one_time_maps(self):
self.distance_map = self.create_distance_map()
self.distance_map_no_decay = self.create_distance_map(1)
self.production_map_01 = np.maximum(self.production_map, 0.1)
self.production_map_1 = np.maximum(self.production_map, 1)
self.strength_map_01 = np.maximum(self.strength_map, 0.1)
self.strength_map_1 = np.maximum(self.strength_map, 1)
self.create_dijkstra_maps()
def create_dijkstra_maps(self):
def get_cost_recov(cellnum):
x = cellnum // self.height
y = cellnum % self.height
return self.strength_map_1[x, y] / self.production_map_01[x, y]
dij_recov_costs = scipy.sparse.dok_matrix((self.width * self.height, self.width * self.height))
for x in range(self.width):
for y in range(self.height):
coord = x * self.height + y
dij_recov_costs[coord, ((x + 1) % self.width) * self.height + ((y + 0) % self.height)] = get_cost_recov(((x + 1) % self.width) * self.height + ((y + 0) % self.height))
dij_recov_costs[coord, ((x - 1) % self.width) * self.height + ((y + 0) % self.height)] = get_cost_recov(((x - 1) % self.width) * self.height + ((y + 0) % self.height))
dij_recov_costs[coord, ((x + 0) % self.width) * self.height + ((y + 1) % self.height)] = get_cost_recov(((x + 0) % self.width) * self.height + ((y + 1) % self.height))
dij_recov_costs[coord, ((x + 0) % self.width) * self.height + ((y - 1) % self.height)] = get_cost_recov(((x + 0) % self.width) * self.height + ((y - 1) % self.height))
self.dij_recov_cost, self.dij_recov_route = scipy.sparse.csgraph.dijkstra(dij_recov_costs, return_predecessors=True)
self.dij_recov_distance_map = np.zeros((self.width, self.height, self.width, self.height))
self.dij_recov_route_map = np.zeros((self.width, self.height, self.width, self.height))
for x in range(self.width):
for y in range(self.height):
self.dij_recov_distance_map[x, y, :, :] = self.dij_recov_cost[x * self.height + y].reshape((self.width, self.height))
self.dij_recov_route_map[x, y, :, :] = self.dij_recov_route[x * self.height + y].reshape((self.width, self.height))
def create_distance_map(self, falloff=1):
# Creates a distance map so that we can easily divide a map to get ratios that we are interested in
# self.distance_map[x, y, :, :] returns an array of (width, height) that gives the distance (x, y) is from (i, j) for all i, j
# Note that the actual distance from x, y, to i, j is set to 1 to avoid divide by zero errors. Anything that utilizes this function should be aware of this fact.
# Create the base map for 0, 0
zero_zero_map = np.zeros((self.width, self.height))
for x in range(self.width):
for y in range(self.height):
dist_x = min(x, -x % self.width)
dist_y = min(y, -y % self.width)
zero_zero_map[x, y] = max(dist_x + dist_y, 1)
zero_zero_map = zero_zero_map ** falloff
distance_map = np.zeros((self.width, self.height, self.width, self.height))
for x in range(self.width):
for y in range(self.height):
distance_map[x, y, :, :] = roll_xy(zero_zero_map, x, y)
return distance_map
def set_configs(self):
self.buildup_multiplier = np.minimum(np.maximum(self.production_map, 4), 9)
self.pre_combat_threshold = -3
self.combat_radius = 8
self.production_cells_out = int(self.width / self.starting_player_count / 1.5)
self.phase = 0
# Find the "global max"
self.global_max_square = None
self.total_avg_cost_to_global = 0
def update_configs(self):
self.buildup_multiplier = np.minimum(np.maximum(self.production_map, 5), 5)
# self.buildup_multiplier = np.minimum(np.maximum(self.production_map, 4), 7)
self.buildup_multiplier = self.buildup_multiplier - (self.distance_from_border ** 0.4)
# self.combat_radius = int(min(max(5, self.percent_owned * self.width / 2), self.width // 2))
self.combat_radius = 8
if np.sum(self.combat_zone_map) > 3:
self.production_cells_out = int(self.width / self.starting_player_count / 2.5)
if self.percent_owned > 0.6:
self.buildup_multiplier -= 1
self.pre_combat_threshold = 0
self.combat_radius = 10
elif self.my_production_sum / self.next_highest_production_sum > 1.1:
self.buildup_multiplier += 1
def update(self):
# start = time.time()
self.update_maps()
# end = time.time()
# logging.debug("update_maps Frame: " + str(game.frame) + " : " + str(end - start))
self.update_stats()
self.update_configs()
def update_maps(self):
print_map(self.strength_map, "strength_map")
print_map(self.production_map, "production_map")
self.update_calc_maps()
self.update_owner_maps()
self.update_border_maps()
# start = time.time()
self.update_enemy_maps()
# end = time.time()
# logging.debug("update_enemymaps Frame: " + str(game.frame) + " : " + str(end - start))
# start = time.time()
self.update_value_production_map()
self.update_controlled_influence_production_maps()
def update_calc_maps(self):
self.strength_map_01 = np.maximum(self.strength_map, 0.1)
self.strength_map_1 = np.maximum(self.strength_map, 1)
def update_owner_maps(self):
self.is_owned_map = np.zeros((self.width, self.height))
self.is_neutral_map = np.zeros((self.width, self.height))
self.is_enemy_map = np.zeros((self.width, self.height))
self.is_owned_map[np.where(self.owner_map == self.my_id)] = 1
self.is_neutral_map[np.where(self.owner_map == 0)] = 1
self.is_enemy_map = 1 - self.is_owned_map - self.is_neutral_map
def update_border_maps(self):
self.border_map = np.zeros((self.width, self.height))
# self.inner_border_map = np.zeros((self.width, self.height))
self.combat_zone_map = np.zeros((self.width, self.height))
for square in itertools.chain.from_iterable(self.squares):
if square.owner == 0:
for n in square.neighbors:
if n.owner == self.my_id:
self.border_map[square.x, square.y] = 1
continue
border_squares_indices = np.transpose(np.nonzero(self.border_map))
border_squares = [self.squares[c[0], c[1]] for c in border_squares_indices]
self.distance_from_border = self.friendly_flood_fill_multiple_sources(border_squares, max(self.width, self.height))
owned_squares_indices = np.transpose(np.nonzero(self.is_owned_map))
owned_squares = [self.squares[c[0], c[1]] for c in owned_squares_indices]
self.distance_from_owned = self.non_friendly_flood_fill_multiple_sources(owned_squares, max(self.width, self.height))
# print_map(self.distance_from_owned, "distance_from_owned")
self.combat_zone_map = self.border_map * (self.strength_map == 0)
if self.starting_player_count > 1 and np.sum(self.combat_zone_map) >= 1: # Breaks in single player mode otherwise.
combat_squares_indices = np.transpose(np.nonzero(self.combat_zone_map))
combat_squares = [self.squares[c[0], c[1]] for c in combat_squares_indices]
self.distance_from_combat_zone = self.friendly_flood_fill_multiple_sources(combat_squares, max(self.width, self.height))
self.distance_from_combat_zone[self.distance_from_combat_zone == -1] = 9999
# print_map(self.distance_from_combat_zone, "distance_from_combat_zone")
else:
self.distance_from_combat_zone = np.ones((self.width, self.height)) * 999
def update_enemy_maps(self):
self.enemy_strength_map = np.zeros((5, self.width, self.height))
self.enemy_strength_map[0] = self.strength_map * self.is_enemy_map
for x in range(len(self.enemy_strength_map)):
self.enemy_strength_map[x] = spread_n(self.enemy_strength_map[0], x)
print_map(self.enemy_strength_map[x], "enemy_str_" + str(x) + "_")
self.own_strength_map = np.zeros((5, self.width, self.height))
self.own_strength_map[0] = self.strength_map * self.is_owned_map
for x in range(len(self.own_strength_map)):
self.own_strength_map[x] = spread_n(self.own_strength_map[0], x)
def update_value_production_map(self):
self.base_value_map = np.divide(self.production_map_01, self.strength_map_1) * (self.is_neutral_map - self.combat_zone_map)
# Each neutral cell gets assigned to the closest border non-combat cell
global_targets_indices = np.transpose(np.nonzero(self.is_neutral_map - self.combat_zone_map))
global_targets = [self.squares[c[0], c[1]] for c in global_targets_indices]
self.global_border_map = np.zeros((self.width, self.height))
for g in global_targets:
# Find the closest border square that routes to g
gb_map = self.dij_recov_distance_map[g.x, g.y] * (self.border_map - self.combat_zone_map)
gb_map[gb_map == 0] = 9999
tx, ty = np.unravel_index(gb_map.argmin(), (self.width, self.height))
self.global_border_map[tx, ty] += self.base_value_map[g.x, g.y] / self.dij_recov_distance_map[g.x, g.y, tx, ty]
self.value_production_map = 1 / np.maximum(self.base_value_map + self.global_border_map * 1, 0.001)
self.value_production_map *= (self.border_map - self.combat_zone_map) * (self.enemy_strength_map[1] == 0)
# self.value_production_map = (self.border_map - self.combat_zone_map) * self.recover_wtd_map
self.value_production_map[self.value_production_map == 0] = 9999
turns_left = self.max_turns - self.frame
recover_threshold = turns_left * 0.6
self.value_production_map[self.value_production_map > recover_threshold] == 9999
bx, by = np.unravel_index(self.value_production_map.argmin(), (self.width, self.height))
# best_cell_value = self.value_production_map[bx, by]
avg_recov_threshold = 2
# avg_map_recovery = np.sum(self.strength_map * (self.border_map - self.combat_zone_map)) / np.sum(self.production_map * (self.border_map - self.combat_zone_map))
avg_map_recovery = np.sum(self.strength_map * self.border_map) / np.sum(self.production_map * self.border_map)
self.value_production_map[self.value_production_map > (avg_recov_threshold * avg_map_recovery)] = 9999
def update_controlled_influence_production_maps(self):
max_distance = 6
self.controlled_production_influence_map = np.zeros((max_distance + 1, self.width, self.height))
self.controlled_production_influence_map[0] = self.production_map * (self.is_enemy_map + self.is_owned_map)
for distance in range(1, max_distance + 1):
self.controlled_production_influence_map[distance] = spread_n(self.controlled_production_influence_map[distance - 1], 1)
self.controlled_production_influence_map[distance] = rebase_map(self.controlled_production_influence_map[distance - 1], False)
def get_moves(self):
# This is the main logic controlling code.
# Find super high production cells
self.get_pre_combat_production()
# 1 - Find combat zone cells and attack them.
# start = time.time()
self.get_moves_attack()
# end = time.time()
# logging.debug("get_move_attack Frame: " + str(game.frame) + " : " + str(end - start))
self.get_moves_prepare_strength()
# 2 - Find production zone cells and attack them
# start = time.time()
self.get_moves_production()
# end = time.time()
# logging.debug("get production moves Frame: " + str(game.frame) + " : " + str(end - start))
# 3 - Move all other unassigned cells.
# start = time.time()
self.get_moves_other()
# end = time.time()
# logging.debug("get other moves Frame: " + str(game.frame) + " : " + str(end - start))
def get_pre_combat_production(self):
# In the event we are trying to fight in a very high production zone, reroute some attacking power to expand in this area.
potential_targets_indices = np.transpose(np.nonzero(self.border_map - self.combat_zone_map))
potential_targets = [self.squares[c[0], c[1]] for c in potential_targets_indices if (1 / self.base_value_map[c[0], c[1]] < self.pre_combat_threshold)]
if len(potential_targets) == 0:
return
potential_targets.sort(key=lambda sq: 1 / self.base_value_map[sq.x, sq.y])
best_target_value = 1 / self.base_value_map[potential_targets[0].x, potential_targets[0].y]
# anything with X of the best_value target should be considered. Let's set this to 4 right now.
while len(potential_targets) > 0 and 1 / self.base_value_map[potential_targets[0].x, potential_targets[0].y] <= (best_target_value + 1):
target = potential_targets.pop(0)
self.attack_cell(target, 2)
def get_moves_attack(self):
# Attempts to attack all border cells that are in combat
potential_targets_indices = np.transpose(np.nonzero(self.combat_zone_map))
potential_targets = [self.squares[c[0], c[1]] for c in potential_targets_indices]
# potential_targets.sort(key = lambda x: self.distance_from_enemy[x.x, x.y])
potential_targets.sort(key=lambda x: self.enemy_strength_map[2, x.x, x.y], reverse=True)
# TODO: Should sort by amount of overkill damage possible.
for square in potential_targets:
self.attack_cell(square, 1)
self.get_moves_breakthrough()
# Get a list of all squares within 5 spaces of a combat zone.
# TODO: This causes bounciness, i should probably do a floodfill of all combat zone squares instead?
combat_zone_squares = [self.squares[c[0], c[1]] for c in np.transpose(np.nonzero(self.combat_zone_map))]
combat_distance_matrix = self.friendly_flood_fill_multiple_sources(combat_zone_squares, self.combat_radius)
# np.savetxt("Masps\maps%i.txt" % self.frame, combat_distance_matrix)
# combat_distance_matrix[combat_distance_matrix == -1] = 0
# combat_distance_matrix[combat_distance_matrix == 1] = 0
combat_squares = [self.squares[c[0], c[1]] for c in np.transpose(np.nonzero(combat_distance_matrix))]
combat_squares.sort(key=lambda x: x.strength, reverse=True)
# combat_squares_indices = np.transpose(np.nonzero((self.distance_from_combat_zone <= combat_radius) * (self.move_map == -1)))
# combat_squares = [self.squares[c[0], c[1]] for c in combat_squares_indices]
print_map(combat_distance_matrix, "combat_distance_matrix_")
for square in combat_squares:
if (square.strength > 0) and (combat_distance_matrix[square.x, square.y] == 1) and (square.move == -1 or square.move == STILL):
targets = []
alt_targets = []
for n in square.neighbors:
if n.owner == 0 and n.strength == 0:
targets.append(n)
elif n.owner == self.my_id:
alt_targets.append(n)
targets.sort(key=lambda x: self.enemy_strength_map[2, x.x, x.y], reverse=True)
alt_targets.sort(key=lambda x: x.strength)
success = False
for t in targets:
success = self.move_square_to_target_simple(square, t, False)
if success:
break
if not success:
for t in targets:
success = self.move_square_to_target_simple(square, t, True)
if success:
break
elif (square.strength > (square.production * (self.buildup_multiplier[square.x, square.y] + self.distance_from_combat_zone[square.x, square.y]))) and ((square.x + square.y) % 2 == self.frame % 2) and square.move == -1 and square.moving_here == []:
# self.move_towards_map(square, self.distance_from_combat_zone)
self.move_towards_map_old(square, combat_distance_matrix)
# elif square.strength > square.production and square.move == -1 and self.distance_from_combat_zone[square.x, square.y] < 2:
# elif square.strength >= square.production and square.move == -1 and self.distance_from_combat_zone[square.x, square.y] < 2:
else:
if combat_distance_matrix[square.x, square.y] > 1:
self.make_move(square, STILL, None)
def find_nearest_non_owned_border(self, square):
current_distance = self.distance_from_border[square.x, square.y]
for n in square.neighbors:
if self.is_owned_map[n.x, n.y]:
if self.distance_from_border[n.x, n.y] < current_distance:
success = self.move_square_to_target(square, n, True)
if success:
break
def move_towards_map(self, square, distance_map):
current_distance = distance_map[square.x, square.y]
queue = [square]
targets = []
while len(queue) > 0:
current = queue.pop(0)
current_distance = distance_map[current.x, current.y]
for n in current.neighbors:
if distance_map[n.x, n.y] == 0:
targets.append(n)
elif distance_map[n.x, n.y] < current_distance:
queue.append(n)
random.shuffle(targets)
target = targets.pop(0)
# success = self.move_square_to_target(square, target, True)
# while len(targets) > 0:
# target = targets.pop(0)
# success = self.move_square_to_target(square, target, True)
# if success:
# return
def move_towards_map_old(self, square, distance_map, through_friendly=True):
current_distance = distance_map[square.x, square.y]
possible_moves = []
for n in square.neighbors:
if self.is_owned_map[n.x, n.y]:
if distance_map[n.x, n.y] <= current_distance - 1:
possible_moves.append(n)
if len(possible_moves) > 0:
random.shuffle(possible_moves)
possible_moves.sort(key=lambda sq: self.enemy_strength_map[4, sq.x, sq.y], reverse=True)
self.move_square_to_target(square, possible_moves[0], True)
def get_moves_prepare_strength(self):
# Attempts to build up strength prior to an immediate engagement, only if we aren't already in combat
# if np.sum(self.combat_zone_map) > 0:
# return
border_prepare_indices = np.transpose(np.nonzero(self.border_map * self.enemy_strength_map[1] > 0))
enemy_border_squares = [self.squares[c[0], c[1]] for c in border_prepare_indices]
if len(enemy_border_squares) > 0:
combat_distance_matrix = self.friendly_flood_fill_multiple_sources(enemy_border_squares, 5)
combat_distance_matrix[combat_distance_matrix == -1] = 0
combat_squares = [self.squares[c[0], c[1]] for c in np.transpose(np.nonzero(combat_distance_matrix))]
for square in combat_squares:
if self.distance_from_border[square.x, square.y] > 3 and (square.strength > square.production * self.buildup_multiplier[square.x, square.y] + 5) and ((square.x + square.y) % 2 == self.frame % 2) and square.move == -1 and square.moving_here == []:
self.move_towards_map_old(square, combat_distance_matrix)
elif square.strength >= 240 and self.own_strength_map[2, square.x, square.y] >= 500 and combat_distance_matrix[square.x, square.y] == 1:
# Attack
targets = []
for n in square.neighbors:
if combat_distance_matrix[n.x, n.y] == 0:
targets.append(n)
targets.sort(key=lambda n: self.enemy_strength_map[1, n.x, n.y], reverse=True)
self.move_square_to_target_simple(square, targets[0], False)
elif square.move == -1:
self.make_move(square, STILL, None)
def get_moves_production(self):
# Tries to find the best cells to attack from a production standpoint.
# Does not try to attack cells that are in combat zones.
# potential_targets_indices = np.transpose(np.nonzero((self.border_map - self.combat_zone_map) * (self.enemy_strength_map[1] == 0)))
potential_targets_indices = np.transpose(np.nonzero((self.value_production_map != 9999)))
potential_targets = [(self.squares[c[0], c[1]], self.value_production_map[c[0], c[1]], 1) for c in potential_targets_indices]
potential_targets = []
for c in potential_targets_indices:
target = self.squares[c[0], c[1]]
value = self.value_production_map[c[0], c[1]]
cells_out = 1
while cells_out <= self.production_cells_out:
potential_targets.append((target, value, cells_out))
cells_out += 1
if len(potential_targets) == 0:
return
potential_targets.sort(key=lambda x: x[0].strength)
potential_targets.sort(key=lambda x: x[1] + (x[2] * 1))
# Keep only the top 80ile?
# potential_targets = potential_targets[0:int(len(potential_targets) * .9)]
remove_targets = potential_targets[int(len(potential_targets) * 0.85):]
for t in remove_targets:
potential_targets.remove(t)
self.value_production_map[t[0].x, t[0].y] = 9999
# best_target_value = potential_targets[0][1]
# anything with X of the best_value target should be considered. Let's set this to 4 right now.
while len(potential_targets) > 0: # and potential_targets[0][1] <= (best_target_value + 4000):
target = potential_targets.pop(0)
success = self.attack_cell(target[0], target[2], target[2])
if success and target[2] < self.production_cells_out:
potential_targets = list(filter(lambda sq: sq[0] != target[0], potential_targets))
def get_moves_breakthrough(self):
# Determine if we should bust through and try to open up additional lanes of attack into enemy territory
# Best to have a separate lane. so we should evaluate squares that are not next to already open channels.
# We are only looking at squares which are next to the enemy already.
potential_squares_indices = np.transpose(np.nonzero((self.border_map - self.combat_zone_map) * (self.enemy_strength_map[1] > 0)))
potential_squares = [self.squares[c[0], c[1]] for c in potential_squares_indices]
# We only want to bust through if we have a lot of strength here.
# logging.debug(str(self.own_strength_map[4]))
for square in potential_squares:
if self.own_strength_map[4, square.x, square.y] > 750 and (self.own_strength_map[4, square.x, square.y] > 1.5 * self.enemy_strength_map[4, square.x, square.y]):
self.attack_cell(square, 1)
def get_moves_other(self):
# Tries to move to
idle_squares_indices = np.transpose(np.nonzero((self.move_map == -1) * self.is_owned_map))
idle_squares = [self.squares[c[0], c[1]] for c in idle_squares_indices]
if len(idle_squares) == 0:
return
# Move squares closer to the border first.
idle_squares.sort(key=lambda sq: self.distance_from_border[sq.x, sq.y])
for square in idle_squares:
if square.strength > square.production * self.buildup_multiplier[square.x, square.y] and square.move == -1 and square.moving_here == []:
if self.percent_owned > 0.65:
self.find_nearest_non_owned_border(square)
# self.move_towards_map(square, self.distance_from_border)
else:
# Move towards the closest border
# if not self.inner_border_map[square.x, square.y]:
# For now, move to the square with the lowest recovery
value_map = (self.value_production_map + self.distance_map_no_decay[square.x, square.y] * 1) * self.border_map
# best_target_value = (self.recover_wtd_map * (self.border_map - self.combat_zone_map)).argmin()
# value_map = value_map * (1 - self.combat_zone_map)
value_map[np.nonzero(self.combat_zone_map)] = 0
value_map += self.distance_map_no_decay[square.x, square.y] * 0.66 * self.combat_zone_map
value_map -= self.controlled_production_influence_map[5, square.x, square.y] * 5 * self.combat_zone_map
# value_map[self.combat_zone_map == 1] = self.distance_map_no_decay[square.x, square.y] * .8
value_map[value_map == 0] = 9999
# tx, ty = np.unravel_index(value_map.argmin(), (self.width, self.height))
tx, ty = np.unravel_index(value_map.argmin(), (self.width, self.height))
target = self.squares[tx, ty]
# We're targeting either a combat square, or a production square. Don't move towards close production squares.
if self.distance_between(square, target) < 6 and self.distance_from_combat_zone[square.x, square.y] < 7:
if (square.x + square.y) % 2 != game.frame % 2:
continue
if (self.enemy_strength_map[3, square.x, square.y] > 0) and (((square.x + square.y) % 2) != (game.frame % 2)):
self.make_move(square, STILL, None)
elif self.combat_zone_map[tx, ty]:
if self.distance_between(square, target) > 14:
self.move_square_to_target_simple(square, target, True)
elif self.distance_between(square, target) > 1:
self.move_square_to_target(square, target, True)
else:
if self.distance_between(square, target) > 14:
self.move_square_to_target_simple(square, target, True)
elif self.distance_between(square, target) > self.production_cells_out - 1:
self.move_square_to_target(square, target, True)
def distance_between(self, sq1, sq2):
dx = abs(sq1.x - sq2.x)
dy = abs(sq1.y - sq2.y)
if dx > self.width / 2:
dx = self.width - dx
if dy > self.height / 2:
dy = self.height - dy
return dx + dy
def attack_cell(self, target, max_cells_out, min_cells_out=1):
# Attempts to coordinate attack to a specific cell.
cells_out = min_cells_out
while cells_out <= max_cells_out:
# If we're trying to attack a combat zone cell, this isn't the function to do it. cancel.
if cells_out > 1 and self.combat_zone_map[target.x, target.y]:
return False
if target.strength == 0 or target.production >= 5 or self.phase == 0:
free_squares = self.is_owned_map * (self.move_map == -1)
else:
free_squares = self.is_owned_map * (self.move_map == -1) * (self.strength_map >= 5 * self.production_map)
target_distance_matrix = self.friendly_flood_fill(target, cells_out)
target_distance_matrix[target_distance_matrix == -1] = 0
target_distance_matrix = target_distance_matrix * free_squares
available_strength = np.sum(self.strength_map * np.minimum(target_distance_matrix, 1))
target_distance_matrix_production = cells_out - target_distance_matrix
target_distance_matrix_production[target_distance_matrix_production == cells_out] = 0 # Cells furthest out would be moving so no production
target_distance_matrix_production = target_distance_matrix_production * free_squares
available_production = np.sum(self.production_map * target_distance_matrix_production)
if available_strength + available_production > target.strength + 0:
attacking_cells_indices = np.transpose(np.nonzero(target_distance_matrix > 0))
attacking_cells = [self.squares[c[0], c[1]] for c in attacking_cells_indices]
still_cells = []
if cells_out > 1:
still_cells_indices = np.transpose(np.nonzero(target_distance_matrix_production > 0))
still_cells = [self.squares[c[0], c[1]] for c in still_cells_indices]
moving_cells = list(set(attacking_cells) - set(still_cells))
for square in still_cells:
self.make_move(square, STILL, None)
still_strength = np.sum(self.strength_map * np.minimum(target_distance_matrix_production, 1))
needed_strength_from_movers = target.strength - available_production - still_strength + 1
if needed_strength_from_movers > 0:
# Handle movement here
moving_cells.sort(key=lambda x: x.strength, reverse=True)
# There are probably ways to do this more efficiently, for now just start with the highest strength cell
# and work backwards to minimize the # of cells that need to be moved.
for square in moving_cells:
if square.strength > 0:
if cells_out == 1:
self.move_square_to_target(square, target, False)
else:
self.move_square_to_target(square, target, True)
needed_strength_from_movers -= square.strength
if needed_strength_from_movers < 0:
break
return True
else:
cells_out += 1
return False
def make_move(self, square, direction, far_target):
self.move_map[square.x, square.y] = direction
if direction == -1: # Reset the square move
if square.target is not None:
square.target.moving_here.remove(square)
square.target = None
square.far_target = None
square.move = -1
square.far_target = None
return
if square.move != -1:
if square.target is not None:
square.target.moving_here.remove(square)
square.target = None
square.far_target = None
square.move = direction
if direction != STILL:
square.target = square.neighbors[direction]
square.target.moving_here.append(square)
square.far_target = far_target
def move_square_to_target(self, source, destination, through_friendly):
# Get the distance matrix that we will use to determine movement.
distance_matrix = self.flood_fill_until_target(source, destination, through_friendly)
source_distance = distance_matrix[source.x, source.y]
if source_distance == -1 or source_distance == 0:
# We couldn't find a path to the destination or we're trying to move STILL
return False
path_choices = []
for d in directions:
if d != STILL:
neighbor = source.neighbors[d]
if distance_matrix[neighbor.x, neighbor.y] == (source_distance - 1):
path_choices.append((d, neighbor))
# There should be at most 2 cells in path_choices
path_choices.sort(key=lambda x: x[1].production)
# Try simple resolution
for (direction, target) in path_choices:
future_strength = 0
if target.owner == self.my_id:
if target.move == -1 or target.move == STILL:
future_strength = target.strength # + target.production
for sq in target.moving_here:
future_strength += sq.strength
if future_strength + source.strength <= 255 + strength_buffer:
self.make_move(source, direction, destination)
return True
for (direction, target) in path_choices:
# Ok, can we move the cell that we are moving to:
if target.owner == self.my_id:
# Yes. We can, but is the cell staying still? If not, then we can't do anything
if target.move == STILL or target.move == -1:
# Ok, let's make sure that moving this piece actually does something.
future_strength = source.strength
for sq in target.moving_here:
future_strength += sq.strength
if future_strength <= 255 + strength_buffer:
# Ok, let's move the target square.
# Start with trying to move to the same destination as someone moving here.
self.make_move(source, direction, destination) # Queue the move up, undo if it doesn't work
n_directions = list(range(4))
n_neighbors = [(nd, target.neighbors[nd]) for nd in n_directions]
n_neighbors.sort(key=lambda x: x[1].production)
n_neighbors.sort(key=lambda x: self.distance_from_border[x[1].x, x[1].y], reverse=True)
# Ok, none of these has worked, let's try moving to a neighbor square instead then.
for n_d in n_directions:
n = target.neighbors[n_d]
if n.owner == self.my_id and self.enemy_strength_map[2, n.x, n.y] == 0:
# Can we move into this square safely?
future_n_t_strength = target.strength
if n.move == STILL or n.move == -1:
future_n_t_strength += n.strength # + n.production
for n_moving in n.moving_here:
future_n_t_strength += n_moving.strength
if future_n_t_strength <= 255 + strength_buffer:
success = self.move_square_to_target_simple(target, n, True)
if success:
return True
# TODO: Logic to attempt to capture a neutral cell if we want.
self.make_move(source, -1, None)
# Nothing to do left
return False
def move_square_to_target_simple(self, source, destination, through_friendly):
# For large distances, we can probably get away with simple movement rules.
dist_w = (source.x - destination.x) % self.width
dist_e = (destination.x - source.x) % self.width
dist_n = (source.y - destination.y) % self.height
dist_s = (destination.y - source.y) % self.height
if dist_w == 0 and dist_n == 0:
return False
ew_swap = False
ns_swap = False
w_neighbor = source.neighbors[WEST]
e_neighbor = source.neighbors[EAST]
n_neighbor = source.neighbors[NORTH]
s_neighbor = source.neighbors[SOUTH]
if dist_w < dist_e:
if through_friendly and w_neighbor.owner != self.my_id:
if e_neighbor.owner == self.my_id:
ew_move = (EAST, e_neighbor)
ew_swap = True
else:
ew_move = None
else:
ew_move = (WEST, w_neighbor)
elif dist_e < dist_w:
if through_friendly and e_neighbor.owner != self.my_id:
if w_neighbor.owner == self.my_id:
ew_move = (WEST, w_neighbor)
ew_swap = True
else:
ew_move = None
else:
ew_move = (EAST, e_neighbor)
elif dist_w == 0:
ew_move = None
elif dist_w == dist_e:
if through_friendly and (w_neighbor.owner != self.my_id or e_neighbor.owner != self.my_id):
if w_neighbor.owner != self.my_id and e_neighbor.owner != self.my_id:
ew_move = None
elif w_neighbor.owner == self.my_id and e_neighbor.owner != self.my_id:
ew_move = (WEST, w_neighbor)
else:
ew_move = (EAST, e_neighbor)
else:
# Prefer the move with lower production
if e_neighbor.production < w_neighbor.production:
ew_move = (EAST, e_neighbor)
else:
ew_move = (WEST, w_neighbor)
if dist_s < dist_n:
if through_friendly and s_neighbor.owner != self.my_id:
if n_neighbor.owner == self.my_id:
ns_move = (NORTH, n_neighbor)
ns_swap = True
else:
ns_move = None
else:
ns_move = (SOUTH, s_neighbor)
elif dist_n < dist_s:
if through_friendly and n_neighbor.owner != self.my_id:
if s_neighbor.owner == self.my_id:
ns_move = (SOUTH, s_neighbor)
ns_swap = True
else:
ns_move = None
else:
ns_move = (NORTH, n_neighbor)
elif dist_s == 0:
ns_move = None
elif dist_s == dist_n:
if through_friendly and (s_neighbor.owner != self.my_id or n_neighbor.owner != self.my_id):
if s_neighbor.owner != self.my_id and n_neighbor.owner != self.my_id:
ns_move = None
elif s_neighbor.owner == self.my_id and n_neighbor.owner != self.my_id:
ns_move = (SOUTH, s_neighbor)
else:
ns_move = (NORTH, n_neighbor)
else:
# Prefer the move with lower production
if n_neighbor.production < s_neighbor.production:
ns_move = (NORTH, n_neighbor)
else:
ns_move = (SOUTH, s_neighbor)
if ns_move is None and ew_move is None:
return False
path_choices = []
if ns_move is None:
path_choices.append(ew_move)
elif ew_move is None:
path_choices.append(ns_move)
elif ns_swap is True and ew_swap is False:
path_choices.append(ew_move)
path_choices.append(ns_move)
elif ns_swap is False and ew_swap is True:
path_choices.append(ns_move)
path_choices.append(ew_move)
else:
if ew_move[1].production < ns_move[1].production:
path_choices.append(ew_move)
path_choices.append(ns_move)
else:
path_choices.append(ns_move)
path_choices.append(ew_move)
for (direction, target) in path_choices:
future_strength = 0
if target.owner == self.my_id:
if target.move == -1 or target.move == STILL:
future_strength = target.strength # + target.production
for sq in target.moving_here:
future_strength += sq.strength
if future_strength + source.strength <= 255 + strength_buffer:
self.make_move(source, direction, destination)
return True
# Try simple resolution
for (direction, target) in path_choices:
future_strength = 0
if target.owner == self.my_id:
if target.move == -1 or target.move == STILL:
future_strength = target.strength # + target.production
for sq in target.moving_here:
future_strength += sq.strength
if future_strength + source.strength <= 255 + strength_buffer:
self.make_move(source, direction, destination)
return True
for (direction, target) in path_choices:
# Ok, can we move the cell that we are moving to:
if target.owner == self.my_id:
# Yes. We can, but is the cell staying still? If not, then we can't do anything
if target.move == STILL or target.move == -1:
# Ok, let's make sure that moving this piece actually does something.
future_strength = source.strength
for sq in target.moving_here:
future_strength += sq.strength
if future_strength <= 255 + strength_buffer:
# Ok, let's move the target square.
# Start with trying to move to the same destination as someone moving here.
self.make_move(source, direction, destination) # Queue the move up, undo if it doesn't work
n_directions = list(range(4))
n_neighbors = [(nd, target.neighbors[nd]) for nd in n_directions]
n_neighbors.sort(key=lambda x: x[1].production)
n_neighbors.sort(key=lambda x: self.distance_from_border[x[1].x, x[1].y], reverse=True)
# Ok, none of these has worked, let's try moving to a neighbor square instead then.
for n_d in n_directions:
n = target.neighbors[n_d]
if n.owner == self.my_id and self.enemy_strength_map[2, n.x, n.y] == 0:
# Can we move into this square safely?
future_n_t_strength = target.strength
if n.move == STILL or n.move == -1:
future_n_t_strength += n.strength # + n.production
for n_moving in n.moving_here:
future_n_t_strength += n_moving.strength
if future_n_t_strength <= 255 + strength_buffer:
success = self.move_square_to_target_simple(target, n, True)
if success:
return True
# TODO: Logic to attempt to capture a neutral cell if we want.
self.make_move(source, -1, None)
# Nothing to do left
return False
def flood_fill_until_target(self, source, destination, friendly_only):
# Does a BFS flood fill to find shortest distance from source to target.
# Starts the fill AT destination and then stops once we hit the target.
q = [destination]
distance_matrix = np.ones((self.width, self.height)) * -1
distance_matrix[destination.x, destination.y] = 0
while len(q) > 0 and distance_matrix[source.x, source.y] == -1:
current = q.pop(0)
current_distance = distance_matrix[current.x, current.y]
for neighbor in current.neighbors:
if distance_matrix[neighbor.x, neighbor.y] == -1:
if not friendly_only or (friendly_only and neighbor.owner == self.my_id):
distance_matrix[neighbor.x, neighbor.y] = current_distance + 1
q.append(neighbor)
return distance_matrix
def friendly_flood_fill(self, source, max_distance):
# Returns a np.array((self.width, self.height)) that contains the distance to the target by traversing through friendly owned cells only.
# q is a queue(list) of items (cell, distance)
q = [source]
distance_matrix = np.ones((self.width, self.height)) * -1
distance_matrix[source.x, source.y] = 0
while len(q) > 0:
current = q.pop(0)
current_distance = distance_matrix[current.x, current.y]
for neighbor in current.neighbors:
if distance_matrix[neighbor.x, neighbor.y] == -1 and neighbor.owner == self.my_id:
distance_matrix[neighbor.x, neighbor.y] = current_distance + 1
if current_distance < max_distance - 1:
q.append(neighbor)
return distance_matrix
def friendly_flood_fill_multiple_sources(self, sources, max_distance=999):
# Returns a np.array((self.width, self.height)) that contains the distance to the target by traversing through friendly owned cells only.
# q is a queue(list) of items (cell, distance). sources is a list that contains the source cells.
q = sources
distance_matrix = np.ones((self.width, self.height)) * -1
for source in q:
distance_matrix[source.x, source.y] = 0
while len(q) > 0:
current = q.pop(0)
current_distance = distance_matrix[current.x, current.y]
for neighbor in current.neighbors:
if (distance_matrix[neighbor.x, neighbor.y] == -1 or distance_matrix[neighbor.x, neighbor.y] > (current_distance + 1)) and neighbor.owner == self.my_id:
distance_matrix[neighbor.x, neighbor.y] = current_distance + 1
if current_distance < max_distance - 1:
q.append(neighbor)
return distance_matrix
def friendly_flood_fill_multiple_sources_cells_out(self, sources, max_distance=999):
# Returns a np.array((self.width, self.height)) that contains the distance to the target by traversing through friendly owned cells only.
# q is a queue(list) of items (cell, distance). sources is a list that contains the source cells.
q = sources
distance_matrix = np.ones((self.width, self.height)) * -1
for source in q:
distance_matrix[source.x, source.y] = 0
while len(q) > 0:
current = q.pop(0)
current_distance = distance_matrix[current.x, current.y]
for neighbor in current.neighbors:
if (distance_matrix[neighbor.x, neighbor.y] == -1 or distance_matrix[neighbor.x, neighbor.y] > (current_distance + 1)) and neighbor.owner == self.my_id:
distance_matrix[neighbor.x, neighbor.y] = current_distance + 1
if current_distance < max_distance - 1:
current_distance += 1
q.append(neighbor)
return distance_matrix
def non_friendly_flood_fill_multiple_sources(self, sources, max_distance=999):
# Returns a np.array((self.width, self.height)) that contains the distance to the target by traversing through non owned cells only.
# q is a queue(list) of items (cell, distance). sources is a list that contains the source cells.
q = sources
distance_matrix = np.ones((self.width, self.height)) * -1
for source in q:
distance_matrix[source.x, source.y] = 0
while len(q) > 0:
current = q.pop(0)
current_distance = distance_matrix[current.x, current.y]
for neighbor in current.neighbors:
if (distance_matrix[neighbor.x, neighbor.y] == -1 or distance_matrix[neighbor.x, neighbor.y] > (current_distance + 1)) and neighbor.owner != self.my_id:
distance_matrix[neighbor.x, neighbor.y] = current_distance + 1
if current_distance < max_distance - 1:
q.append(neighbor)
return distance_matrix
def last_resort_strength_check(self):
# Calculates the projected strength map and identifies squares that are violating it.
# Ignore strength overloads due to production for now
# Validate moves
projected_strength_map = np.zeros((self.width, self.height))
# We only care about our moves.
for square in itertools.chain.from_iterable(self.squares):
if square.owner == self.my_id:
if square.move == -1 or square.move == STILL:
projected_strength_map[square.x, square.y] += square.strength # + square.production
else:
dx, dy = get_offset(square.move)
projected_strength_map[(square.x + dx) % self.width, (square.y + dy) % self.height] += square.strength
# Get a list of squares that are over the cap
violation_indices = np.transpose(np.nonzero((projected_strength_map > 255 + strength_buffer)))
violation_squares = [self.squares[c[0], c[1]] for c in violation_indices]
violation_count = len(violation_squares)
for square in violation_squares:
if square.owner == self.my_id and (square.move == -1 or square.move == STILL):
# We can try to move this square to an neighbor.
possible_paths = []
for d in range(0, 4):
# Move to the lowest strength neighbor. this might cause a collision but we'll resolve it with multiple iterations
n = square.neighbors[d]
if n.owner == self.my_id and self.enemy_strength_map[2, n.x, n.y] == 0:
possible_paths.append((d, n, projected_strength_map[n.x, n.y]))
else:
# Try attacking a bordering cell
if (square.strength > (2 * n.strength)) and (n.production > 1):
possible_paths.append((d, n, n.strength))
possible_paths.sort(key=lambda x: x[2])
possible_paths.sort(key=lambda x: self.distance_from_border[x[1].x, x[1].y], reverse=True)
# Force a move there
self.make_move(square, d, n)
else:
# We aren't the problem. one of the squares that's moving here is going to collide with us.
# How do we resolve this?
options_list = []
for n in square.neighbors:
if n.owner == self.my_id:
options_list.append((n, projected_strength_map[n.x, n.y]))
options_list.sort(key=lambda x: x[1])
# Let's try having the smallest one stay still instead
for opt in options_list:
self.make_move(opt[0], STILL, None)
# self.make_move(options_list[0][0], STILL, None)
return violation_count
def update_stats(self):
# Updates various stats used for tracking
self.turns_left = self.max_turns - self.frame
self.percent_owned = np.sum(self.is_owned_map) / (self.width * self.height)
self.production_values = [0]
for i in range(1, self.starting_player_count + 1):
self.production_values.append(np.sum(self.production_map * (self.owner_map == i)))
self.my_production_sum = self.production_values[self.my_id]
temp_production_sum = copy.copy(self.production_values)
temp_production_sum.pop(self.my_id)
temp_production_sum.pop(0)
self.next_highest_production_sum = max(temp_production_sum)
# ==============================================================================
# Square class
# ==============================================================================
class Square:
def __init__(self, game, x, y, production):
self.game = game
self.x = x
self.y = y
self.production = production
self.height = game.height
self.width = game.width
self.vertex = x * self.height + y
self.target = None
self.moving_here = []
self.far_target = None
def after_init_update(self):
# Should only be called after all squares in game have been initialized.
self.north = self.game.squares[(self.x + 0) % self.width, (self.y - 1) % self.height]
self.east = self.game.squares[(self.x + 1) % self.width, (self.y + 0) % self.height]
self.south = self.game.squares[(self.x + 0) % self.width, (self.y + 1) % self.height]
self.west = self.game.squares[(self.x - 1) % self.width, (self.y + 0) % self.height]
self.neighbors = [self.north, self.east, self.south, self.west] # doesn't include self
def get_neighbors(self, n=1, include_self=False):
# Returns a list containing all neighbors within n squares, excluding self unless include_self = True
assert isinstance(include_self, bool)
assert isinstance(n, int) and n > 0
if n == 1:
if not include_self:
return self.neighbors
combos = ((dx, dy) for dy in range(-n, n + 1) for dx in range(-n, n + 1) if abs(dx) + abs(dy) <= n)
return (self.game.squares[(self.x + dx) % self.width][(self.y + dy) % self.height] for dx, dy in combos if include_self or dx or dy)
def update(self, owner, strength):
# updates the square with the new owner and strength. Also resets movement variables
self.owner = owner
self.strength = strength
self.reset_move()
def reset_move(self):
# Resets the move information
# Note, the target's moving_here is NOT reset so this should really only be used if all squares are being reset.
self.move = -1
self.target = None
self.moving_here = []
self.far_target = None
####################
# Helper Functions #
####################
def get_offset(direction):
return ((0, -1), (1, 0), (0, 1), (-1, 0), (0, 0))[direction]
def distance_between(x1, y1, x2, y2):
dx = abs(x1 - x2)
dy = abs(y1 - y2)
if dx > game.width / 2:
dx = game.width - dx
if dy > game.height / 2:
dy = game.height - dy
return dx + dy
def opposite_direction(direction):
return (direction + 2) % 4 if direction != STILL else STILL
def roll_x(M, x):
return np.roll(M, x, 0)
def roll_y(M, y):
return np.roll(M, y, 1)
def roll_xy(M, x, y):
return np.roll(np.roll(M, x, 0), y, 1)
def spread_n(M, n, decay=0, include_self=True):
# Takes a matrix M, and then creates an influence map by offsetting by N in every direction.
# Decay function is currently of the form exp(-decay * distance)
if include_self is True:
spread_map = np.copy(M)
else:
spread_map = np.zeros_like(M)
distance = 1
while distance <= n:
combos = get_all_d_away(distance)
decay_factor = math.exp(-decay * distance)
for c in combos:
spread_map += roll_xy(np.multiply(decay_factor, M), c[0], c[1])
distance += 1
return spread_map
def spread(M, decay=0, include_self=True):
# For now to save time, we'll use game_map.distance_map and assume that we'll always be using the same falloff distances to calculate offsets.
# Takes the matrix M and then for each point (x, y), calculate the product of the distance map and the decay factor.
decay_map = np.exp(np.multiply(game.distance_map, -decay))
spread_map = np.sum(np.multiply(decay_map, M), (2, 3))
return spread_map
def get_all_d_away(d):
combos = []
for x in range(0, d + 1):
x_vals = list(set([x, -x]))
y_vals = list(set([d - x, -(d - x)]))
combos.extend(list(itertools.product(x_vals, y_vals)))
return list(set(combos))
def distance_from_owned(M, mine):
# Returns the minimum distance to get to any point if already at all points in xys using 4D array M
return np.apply_along_axis(np.min, 0, M[np.nonzero(mine)])
def rebase_map(map_a, total=True):
# Takes a map and returns a rebased version where numpy.sum(map) = self.width * self.height
# If Total = False, rebases to the # of non-zero squares
if total:
size = functools.reduce(lambda x, y: x * y, map_a.shape)
else:
size = np.sum(map_a != 0)
factor = size / np.sum(map_a)
return np.multiply(map_a, factor)
# ==============================================================================
# Functions for communicating with the Halite game environment (formerly contained in separate module networking.py
# ==============================================================================
def translate_cardinal(direction):
# Cardinal index used by the framework is:
# NORTH = 0, EAST = 1, SOUTH = 2, WEST = 3, STILL = 4
# Cardinal index used by the game is:
# STILL = 0, NORTH = 1, EAST = 2, SOUTH = 3, WEST = 4
return int((direction + 1) % 5)
def send_string(to_be_sent):
sys.stdout.write(to_be_sent + "\n")
sys.stdout.flush()
def get_string():
return sys.stdin.readline().rstrip('\n')
# ==============================================================================
# Game Loop
# ==============================================================================
def game_loop():
game.get_frame()
# logging.debug("Frame: " + str(game.frame))
game.update()
game.get_moves()
collision_check = 998
last_collision_check = 999
while collision_check < last_collision_check:
last_collision_check = collision_check
collision_check = game.last_resort_strength_check()
collision_check = 998
last_collision_check = 999
while collision_check < last_collision_check:
last_collision_check = collision_check
collision_check = game.last_resort_strength_check()
collision_check = 998
last_collision_check = 999
while collision_check < last_collision_check:
last_collision_check = collision_check
collision_check = game.last_resort_strength_check()
game.send_frame()
# #####################
# Game run-time code #
# #####################
logging.basicConfig(filename='logging.log', level=logging.DEBUG)
# logging.debug('your message here')
NORTH, EAST, SOUTH, WEST, STILL = range(5)
directions = [NORTH, EAST, SOUTH, WEST, STILL]
game = Game()
while True:
game_loop() | [
"[email protected]"
] | |
8397ff8119dc799e3deac3fdd0aa9bedf2c22261 | 0faa6fc013560b4c5acf64413d186019816a7582 | /chainedSCT/__init__.py | 9060c9628716ffba3b2324edd46528202b7eee2c | [
"MIT"
] | permissive | MSBeni/SmartContactTracing_Chained | f4deceb6107cfbf0f050a8f8943dd85caa1b43b8 | b667ef6e04ea4e3206760f9dc2035d425b6e26f5 | refs/heads/master | 2023-07-30T01:05:31.383298 | 2021-09-09T17:36:16 | 2021-09-09T17:36:16 | 328,707,374 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 110 | py | from . import extraction
from . import transformation
from . import Loading_Blockchian
__version__ = '0.0.1'
| [
"[email protected]"
] | |
e0aaddac3ec13c25540d435aa328944f249a9596 | 60105dfa9cd52412e86a1970b0873a47c058ca07 | /day1/files_ex1.py | 7d2050c21a6d506445c58dc66deef58798fb2eec | [
"Apache-2.0"
] | permissive | ktbyers/pynet-ons-feb19 | d6802ec01902169dffebe2b11fcd3a1ed9c42d3d | 5f02125f115fda2c26af87656b4f83d98cd2822c | refs/heads/master | 2020-04-19T20:43:31.754871 | 2019-02-25T20:47:40 | 2019-02-25T20:47:40 | 168,422,755 | 0 | 6 | Apache-2.0 | 2019-02-07T20:03:09 | 2019-01-30T22:05:35 | Python | UTF-8 | Python | false | false | 320 | py | #!/usr/bin/env python
# READ ####
f = open("my_file.txt")
my_content = f.read()
print(my_content)
# WRITE ####
print("\nWriting file.")
f = open("new_file.txt", "w")
f.write("whatever2\n")
f.close()
# APPEND ####
print("\nAppending file.")
with open("new_file.txt", "a") as f:
f.write("something else\n")
print()
| [
"[email protected]"
] | |
d036805b85edb2e1846da24c7dc43c38d5a4e726 | c8c95520fb1a17d627a0256df2c6702f4f53403a | /3.3_the_for_loop.py | 371bcc17241b1a8ab78f7f1d729ca822967cbf14 | [] | no_license | wsargeant/httlacs | 608f1b4b34c95f18f934f10883beb56a2895c269 | 3337b369c541e18d5ed9ecbca35494c3ebcfa591 | refs/heads/master | 2023-02-21T21:10:03.228762 | 2021-01-25T08:44:46 | 2021-01-25T08:44:46 | 284,936,152 | 0 | 0 | null | 2020-08-28T10:35:17 | 2020-08-04T09:32:38 | null | UTF-8 | Python | false | false | 202 | py | def main():
pass
if __name__ == '__main__':
main()
for f in ["Joe", "Steve", "Pete", "Ian", "Mike", "Dom"]:
invite = "Hi " + f + ". Please come to my party on Saturday."
print(invite)
| [
"[email protected]"
] | |
ee7b8e64c5c5aabbd39dc3e97b98f0c125bbe83b | ba7e577bc4d083d5bfb1b0622d6149d57537bc62 | /leetcode/217_contains_duplicate.py | 4a01bac85e2491ed77d936570df8ff60f59b2605 | [] | no_license | lvraikkonen/GoodCode | 5b59f51252384de38fd21f13d4afda1f4f8be94d | a0f270c1adce25be11df92877813037f2e73e28b | refs/heads/master | 2022-05-04T19:55:40.751420 | 2022-04-12T09:41:15 | 2022-04-12T09:41:15 | 71,972,790 | 0 | 0 | null | 2020-10-13T18:57:51 | 2016-10-26T06:24:11 | Java | UTF-8 | Python | false | false | 572 | py | from collections import Counter
def containsDuplicate_counter(nums):
"""
:type nums: List[int]
:rtype: bool
"""
n = Counter(nums)
for k, v in n.items():
if v > 1:
return True
return False
def containsDuplicate_set(nums):
"""
:type nums: List[int]
:rtype: bool
"""
distinct_nums = set()
for num in nums:
if num in distinct_nums:
return True
distinct_nums.add(num)
return False
if __name__ == "__main__":
result = containsDuplicate_set([1,2,3,3])
print result | [
"[email protected]"
] | |
7e9b9a0aad90d98e4fb2409f0db85fadcc0b665d | bc441bb06b8948288f110af63feda4e798f30225 | /architecture_view_sdk/model/metadata_center/stream_metric_states_pb2.py | 8a7a27d80fc16e11d705b7d43834edd01cdc33a1 | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | true | 3,685 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: stream_metric_states.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from architecture_view_sdk.model.metadata_center import stream_metric_schema_pb2 as architecture__view__sdk_dot_model_dot_metadata__center_dot_stream__metric__schema__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='stream_metric_states.proto',
package='metadata_center',
syntax='proto3',
serialized_options=_b('ZIgo.easyops.local/contracts/protorepo-models/easyops/model/metadata_center'),
serialized_pb=_b('\n\x1astream_metric_states.proto\x12\x0fmetadata_center\x1a\x46\x61rchitecture_view_sdk/model/metadata_center/stream_metric_schema.proto\"h\n\x12StreamMetricStates\x12\x0b\n\x03org\x18\x01 \x01(\x05\x12\x0f\n\x07\x63ommand\x18\x02 \x01(\t\x12\x34\n\x07payload\x18\x03 \x03(\x0b\x32#.metadata_center.StreamMetricSchemaBKZIgo.easyops.local/contracts/protorepo-models/easyops/model/metadata_centerb\x06proto3')
,
dependencies=[architecture__view__sdk_dot_model_dot_metadata__center_dot_stream__metric__schema__pb2.DESCRIPTOR,])
_STREAMMETRICSTATES = _descriptor.Descriptor(
name='StreamMetricStates',
full_name='metadata_center.StreamMetricStates',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='org', full_name='metadata_center.StreamMetricStates.org', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='command', full_name='metadata_center.StreamMetricStates.command', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payload', full_name='metadata_center.StreamMetricStates.payload', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=119,
serialized_end=223,
)
_STREAMMETRICSTATES.fields_by_name['payload'].message_type = architecture__view__sdk_dot_model_dot_metadata__center_dot_stream__metric__schema__pb2._STREAMMETRICSCHEMA
DESCRIPTOR.message_types_by_name['StreamMetricStates'] = _STREAMMETRICSTATES
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
StreamMetricStates = _reflection.GeneratedProtocolMessageType('StreamMetricStates', (_message.Message,), {
'DESCRIPTOR' : _STREAMMETRICSTATES,
'__module__' : 'stream_metric_states_pb2'
# @@protoc_insertion_point(class_scope:metadata_center.StreamMetricStates)
})
_sym_db.RegisterMessage(StreamMetricStates)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
2fcaf139e76286b816d99605e71bc07a933e540c | dc940d7614c4cf55a3c61a1ad4ee48e4ea4e319d | /src/slurmify/slurmify.py | cc78977d8e499ee972aa5bd9eb2e6fe671d6160d | [
"MIT"
] | permissive | salotz/slurmify | b4c88e434c2814c71fcef9cd13ecd716163a75d8 | 4a6da629706621b9b2633d34e574b121a75a8f87 | refs/heads/master | 2020-04-28T23:22:10.227665 | 2019-10-08T01:37:33 | 2019-10-08T01:37:33 | 175,651,924 | 0 | 1 | MIT | 2020-12-01T07:43:57 | 2019-03-14T15:42:30 | Python | UTF-8 | Python | false | false | 5,320 | py | import os
import os.path as osp
import tempfile
import subprocess
from jinja2 import Environment, FileSystemLoader
from slurmify import TEMPLATES_PATH
# names of the templates
SLURM_JOB_TEMPLATE = "slurm_job.sh.j2"
SLURM_RUN_TEMPLATE = "slurm_run.sh.j2"
SLURM_SETUP_TEMPLATE = "slurm_setup.sh.j2"
SLURM_TEARDOWN_TEMPLATE = "slurm_teardown.sh.j2"
SLURM_COMMANDS_TEMPLATE = "slurm_commands.sh.j2"
SLURM_SCRIPT_TEMPLATE = "slurm_script.sh.j2"
# list of all the templates
SLURM_TEMPLATE_NAMES = (SLURM_JOB_TEMPLATE, SLURM_RUN_TEMPLATE,
SLURM_SCRIPT_TEMPLATE, SLURM_COMMANDS_TEMPLATE)
# the names of the targets and whether or not they are optional (True)
# or not (False). This is from the perspective of the template and not
# the interfaces of the programs which may support defaults and other
# options etc.
SLURM_JOB_TARGETS = (
('job_name', False),
('stderr_log_dir', False),
('stdout_log_dir', False),
('login_shell', True),
('mail_user', True),
('mail_type', True),
)
SLURM_RUN_TARGETS = (
('walltime', False),
('nodes', False),
('ntasks', False),
('cpus_per_task', False),
('mem_per_cpu', True),
('node_mem', True),
('nodelist', True),
('constraint', True),
('gres', True),
('chdir', True),
)
SLURM_SCRIPT_TARGETS = (
('slurm_job', False),
('slurm_run', False),
('setup', True),
('payload', False),
('teardown', True),
)
SLURM_COMMANDS_TARGETS = (
('commands', False),
('epilog', True)
)
SLURM_SCRIPT_EMBED_TARGETS = (
('script', False),
('epilog', True)
)
SLURM_SETUP_TARGETS = (
('task_name', False),
('task_dir_path', False),
('env_vars', True),
('gnu_module', True),
('cuda_module', True),
# slurm job stuff
('walltime', True),
('memory', True),
('num_nodes', True),
('num_processors', True),
('num_gpus', True),
)
# Defaults
MAIL_TYPE_DEFAULT = "BEGIN,END,FAIL"
def get_env():
return Environment(loader=FileSystemLoader(TEMPLATES_PATH))
def check_kwargs(targets, input_kwargs):
missing = []
for key, optional in targets:
if key not in input_kwargs:
if not optional:
missing.append(key)
return missing
class SlurmJob():
def __init__(self, job_name,
logs_dir='logs',
setup=None,
teardown=None,
login_shell=True,
email=None,
mail_type='BEGIN,END,FAIL'):
if logs_dir:
stderr_log_dir = logs_dir
stdout_log_dir = logs_dir
self.job_kwargs = {
'job_name' : job_name,
'stderr_log_dir' : stderr_log_dir,
'stdout_log_dir' : stdout_log_dir,
'login_shell' : login_shell,
'mail_user' : email,
'mail_type' : mail_type
}
self.env = get_env()
job_template = self.env.get_template(SLURM_JOB_TEMPLATE)
# check to make sure all arguments work
if len(check_kwargs(SLURM_JOB_TARGETS, self.job_kwargs)) < 1:
self._job_header = job_template.render(self.job_kwargs)
else:
raise ValueError
# get default setup and teardown scripts
self._setup = ""
self._teardown = ""
@property
def job_header(self):
return self._job_header
@property
def setup(self):
return self._setup
@property
def teardown(self):
return self._teardown
def run(self, run_kwargs, commands, epilog=None):
run_template = self.env.get_template(SLURM_RUN_TEMPLATE)
commands_template = self.env.get_template(SLURM_COMMANDS_TEMPLATE)
script_template = self.env.get_template(SLURM_SCRIPT_TEMPLATE)
if len(check_kwargs(SLURM_RUN_TARGETS, run_kwargs)) < 1:
run_header = run_template.render(run_kwargs)
else:
raise ValueError
payload = commands_template.render(commands=commands,
epilog=epilog)
script_kwargs = {
'slurm_job' : self.job_header,
'slurm_run' : run_header,
'setup' : self.setup,
'payload' : payload,
'teardown' : self.teardown
}
if len(check_kwargs(SLURM_SCRIPT_TARGETS, script_kwargs)) < 1:
script_str = script_template.render(script_kwargs)
else:
raise ValueError
# make a temporary file for the script and use sbatch to
# submit it
with tempfile.NamedTemporaryFile() as tmpfile:
# write the script to the tempfile
tmpfile.write(str.encode(script_str))
# set the file pointer back to the beginning of the file
# so we don't have to reopen it
tmpfile.seek(0)
# the path to the temp file
tmpfile_path = tmpfile.name
# then actually submit the script and get the return
# values
complete_process = subprocess.run(['sbatch', tmpfile_path])
# if there was error get it:
if complete_process.stderr is not None:
pass
# get the jobid from stdout
#complete_process.stdout
return 0, complete_process
| [
"[email protected]"
] | |
7a8fa12f5d1cb13446a07ad2b0d36370c27adf16 | 1d04c90b3331261d741cc4f8757aeb0088193627 | /setup.py | fab6d30a6a0fe2cbc72fdd710ca21377f27e0b75 | [
"ZPL-2.1"
] | permissive | jean/Products.ZopeVersionControl | 11f5251fea47e12b6db10c2ff2067f23a93ebad4 | c5a7efacf39bd27a7acda525096f7f05e0672179 | refs/heads/master | 2021-01-22T16:37:29.630614 | 2013-03-13T14:29:07 | 2013-03-13T14:29:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 849 | py | from setuptools import setup, find_packages
__version__ = '1.1.4dev'
setup(
name='Products.ZopeVersionControl',
version=__version__,
description="Zope Version Control",
long_description=(open('README.rst').read() + "\n" +
open('CHANGES.rst').read()),
classifiers=[
'Framework :: Zope2',
],
license='ZPL',
author='Zope Foundation and Contributors',
author_email='[email protected]',
url='http://pypi.python.org/pypi/Products.ZopeVersionControl',
packages=find_packages('src'),
package_dir={'': 'src'},
namespace_packages=['Products'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'zope.interface',
'Acquisition',
'DateTime',
'transaction',
'ZODB3',
'Zope2',
],
)
| [
"[email protected]"
] | |
9230922cc0d5d21762db9fa255247982efd79301 | eb9f655206c43c12b497c667ba56a0d358b6bc3a | /python/testData/formatter/closingParenthesisInFromImportStatementWithNoHangingIndent.py | acd9400b7c7627d65aaecf625caff9ce653fb00c | [
"Apache-2.0"
] | permissive | JetBrains/intellij-community | 2ed226e200ecc17c037dcddd4a006de56cd43941 | 05dbd4575d01a213f3f4d69aa4968473f2536142 | refs/heads/master | 2023-09-03T17:06:37.560889 | 2023-09-03T11:51:00 | 2023-09-03T12:12:27 | 2,489,216 | 16,288 | 6,635 | Apache-2.0 | 2023-09-12T07:41:58 | 2011-09-30T13:33:05 | null | UTF-8 | Python | false | false | 52 | py | from foo import (bar,
baz
) | [
"[email protected]"
] | |
0c238b53e30bb6aace21ca85e6139c54047c9e9c | 32e789a8ae5eecec4ee8ab6cf72af0b7c599f5a9 | /.venv/bin/wheel | 5c7f8f28cc1a15bb37695045165bd81d187b9a7f | [] | no_license | kafura-kafiri/Khorus | 5aabc306673c9298bbd0c4a85ce874b7240a1d00 | 3d4cb0e8b56f9d296003e8bb7ff9a4f30a8f3ef8 | refs/heads/master | 2021-05-10T13:48:22.513815 | 2018-01-31T15:00:57 | 2018-01-31T15:00:57 | 118,488,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247 | #!/home/pouria/PyProjects/Khorus/.venv/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from wheel.tool import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
a9982a8f2ea8b99b85488f5ab2984195127a5fdd | 390c347ce47ad253883030f93f6901061874f2da | /experiments/genes/gene_sca_discrim_1.0.py | 8a68ebbee7bcf5d85541f935f27a47cc4e866775 | [
"MIT"
] | permissive | bdpedigo/sparse_new_basis | 55646622330a30dd33f490bf3a23036273f4b8ad | 4bb9c766154fe713c544a80178c067ae8c867026 | refs/heads/main | 2023-01-28T05:43:54.488110 | 2020-12-09T23:23:51 | 2020-12-09T23:23:51 | 312,875,716 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,160 | py | #%%
import os
import pickle
import time
from pathlib import Path
import colorcet as cc
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
from hyppo.discrim import DiscrimOneSample, DiscrimTwoSample
from matplotlib.lines import Line2D
from sklearn.decomposition import PCA, SparsePCA
from sklearn.feature_selection import VarianceThreshold
from sklearn.metrics import pairwise_distances
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from tqdm import tqdm
from umap import UMAP
from graspologic.plot import pairplot
from sparse_decomposition import SparseComponentAnalysis
from sparse_decomposition.utils import (
calculate_explained_variance_ratio,
l1_norm,
proportion_variance_explained,
)
from sparse_new_basis.data import load_scRNAseq
from sparse_new_basis.plot import savefig, set_theme
set_theme()
fig_dir = Path("sparse_new_basis/results/gene_sca_discrim_1.0")
def stashfig(name, *args, **kwargs):
savefig(fig_dir, name, *args, **kwargs)
#%%
var_thresh = 0.005
train_size = 2 ** 14
max_iter = 20
with_mean = True
with_std = True
seed = 8888
#%%
sequencing_df, annotation_df = load_scRNAseq(fillna=True)
#%% throw out some genes with low variance
X = sequencing_df.values
var_thresh = VarianceThreshold(threshold=var_thresh)
X = var_thresh.fit_transform(X)
gene_index = sequencing_df.columns
original_n_genes = len(gene_index)
gene_index = gene_index[var_thresh.get_support()]
sequencing_df = sequencing_df[gene_index]
new_n_genes = len(gene_index)
print(
f"Number of genes removed: {original_n_genes - new_n_genes} "
f"out of {original_n_genes}"
)
#%%
np.random.seed(seed)
neuron_index = sequencing_df.index
y = sequencing_df.index.get_level_values(level="Neuron_type").values
# stratify=y will try to set the distribution of class labels the same for train/test
X_train, X_test, index_train, index_test = train_test_split(
X, neuron_index, stratify=y, train_size=train_size, shuffle=True
)
#%% center and scale training data
currtime = time.time()
scaler = StandardScaler(with_mean=with_mean, with_std=with_std, copy=False)
X_train = scaler.fit_transform(X_train)
print(f"{time.time() - currtime:.3f} elapsed to scale and center data.")
X_test = scaler.transform(X_test)
#%%
def compute_metrics(model, X_train, X_test):
train_pve = proportion_variance_explained(X_train, model.components_.T)
test_pve = proportion_variance_explained(X_test, model.components_.T)
n_nonzero = np.count_nonzero(model.components_)
p_nonzero = n_nonzero / model.components_.size
n_nonzero_cols = np.count_nonzero(model.components_.max(axis=0))
p_nonzero_cols = n_nonzero_cols / model.components_.shape[1]
component_l1 = l1_norm(model.components_)
output = {
"train_pve": train_pve,
"test_pve": test_pve,
"n_nonzero": n_nonzero,
"p_nonzero": p_nonzero,
"n_nonzero_cols": n_nonzero_cols,
"p_nonzero_cols": p_nonzero_cols,
"component_l1": component_l1,
}
return output
params = []
S_train_by_params = {}
S_test_by_params = {}
models_by_params = {}
metric_rows = []
#%%
# Sparse Component Analysis and PCA
method = "SCA"
max_iter = 15
tol = 1e-4
n_components_range = [30]
for n_components in n_components_range:
gammas = [
2 * n_components,
0.25 * np.sqrt(n_components * X_train.shape[1]),
0.5 * np.sqrt(n_components * X_train.shape[1]),
np.sqrt(n_components * X_train.shape[1]),
0.5 * n_components * np.sqrt(X_train.shape[1]),
]
gammas = [float(int(g)) for g in gammas]
gammas.append(np.inf)
for gamma in gammas:
if gamma == np.inf:
method = "PCA"
else:
method = "SCA"
print(f"method = {method}, n_components = {n_components}, gamma = {gamma}")
print()
curr_params = (method, n_components, gamma)
params.append(curr_params)
# fit model
currtime = time.time()
model = SparseComponentAnalysis(
n_components=n_components,
max_iter=max_iter,
gamma=gamma,
verbose=10,
tol=tol,
)
S_train = model.fit_transform(X_train)
train_time = time.time() - currtime
print(f"{train_time:.3f} elapsed to train model.")
S_test = model.transform(X_test)
# save model fit
models_by_params[curr_params] = model
S_train_by_params[curr_params] = S_train
S_test_by_params[curr_params] = S_test
# save metrics
metrics = compute_metrics(model, X_train, X_test)
metrics["sparsity_param"] = gamma
metrics["sparsity_level"] = gamma
metrics["n_components"] = n_components
metrics["train_time"] = train_time
metrics["method"] = method
metric_rows.append(metrics)
print(f"Component L0 ratio: {metrics['p_nonzero']}")
print(f"Component L0 columns: {metrics['p_nonzero_cols']}")
print("\n\n\n")
#%%
# SparsePCA (Online Dictionary Learning)
method = "SparsePCA"
max_iter = 10
for n_components in n_components_range:
alphas = [1, 5, 15, 30, 40]
alphas = [float(int(a)) for a in alphas]
for alpha in alphas:
print(f"method = {method}, n_components = {n_components}, alpha = {alpha}")
print()
curr_params = (method, n_components, alpha)
params.append(curr_params)
# fit model
currtime = time.time()
model = SparsePCA(
n_components=n_components,
max_iter=max_iter,
alpha=alpha,
verbose=0,
tol=tol,
n_jobs=1,
)
S_train = model.fit_transform(X_train)
train_time = time.time() - currtime
print(f"{train_time:.3f} elapsed to train model.")
S_test = model.transform(X_test)
# save model fit
models_by_params[curr_params] = model
S_train_by_params[curr_params] = S_train
S_test_by_params[curr_params] = S_test
# save metrics
metrics = compute_metrics(model, X_train, X_test)
metrics["sparsity_param"] = alpha
metrics["sparsity_level"] = -alpha
metrics["n_components"] = n_components
metrics["train_time"] = train_time
metrics["method"] = method
metric_rows.append(metrics)
print(f"Component L0 ratio: {metrics['p_nonzero']}")
print(f"Component L0 columns: {metrics['p_nonzero_cols']}")
print("\n\n\n")
#%%
# Discriminability as a metric
n_subsamples = 10
n_per_subsample = 2 ** 12
# n_per_subsample = None
metric = "cosine"
def get_int_labels(index):
y = index.get_level_values("Neuron_type").values
uni_y = np.unique(y)
name_map = dict(zip(uni_y, range(len(uni_y))))
y = np.vectorize(name_map.get)(y)
return y
discrim_result_rows = []
for curr_params in params:
print(curr_params)
print()
model = models_by_params[curr_params]
for mode in ["test"]:
if mode == "train":
S = S_train_by_params[curr_params]
y = get_int_labels(index_train)
elif mode == "test":
S = S_test_by_params[curr_params]
y = get_int_labels(index_test)
for i in range(n_subsamples):
if n_per_subsample is None:
n_per_subsample = len(S)
subsample_inds = np.random.choice(
len(S), size=n_per_subsample, replace=False
)
# compute discriminability
dist_S = pairwise_distances(S[subsample_inds], metric=metric)
currtime = time.time()
discrim = DiscrimOneSample(is_dist=True)
tstat, _ = discrim.test(dist_S, y[subsample_inds], reps=0)
print(f"{time.time() - currtime:.3f} elapsed for discriminability.")
# save results
metrics = {}
metrics["method"] = curr_params[0]
metrics["tstat"] = tstat
metrics["n_components"] = curr_params[1]
metrics["discrim_resample"] = i
metrics["mode"] = mode
metrics["sparsity_param"] = curr_params[2]
discrim_result_rows.append(metrics)
print()
discrim_results = pd.DataFrame(discrim_result_rows)
discrim_results
#%%
discrim_results["params"] = list(
zip(
discrim_results["method"],
discrim_results["n_components"],
discrim_results["sparsity_param"],
)
)
discrim_results["pretty_params"] = ""
for i, row in discrim_results.iterrows():
method = row["method"]
if method == "PCA":
discrim_results.loc[i, "pretty_params"] = "PCA"
else:
if method == "SCA":
symbol = r"$\gamma$"
elif method == "SparsePCA":
symbol = r"$\alpha$"
discrim_results.loc[i, "pretty_params"] = (
row["method"] + ", " + symbol + "=" + f"{row['sparsity_param']:.0f}"
)
#%%
discrim_results = discrim_results.sort_values(
["method", "n_components", "sparsity_param"]
)
discrim_results
#%%
# red_shades = sns.color_palette("Reds", n_colors=len(gammas)+1)[-2:-1]
# gammas = np.unique(discrim_results[discrim_results['method'] == 'SCA']['sparsity_param'])
# alphas = np.unique(discrim_results[discrim_results['method'] == 'SparsePCA']['sparsity_param'])
# push = 2
# blue_shades = sns.color_palette("Blues", n_colors=len(gammas)+push)[push:]
# green_shades = sns.color_palette("Greens", n_colors=len(alphas)+push)[push:][::-1]
# shades = red_shades + blue_shades + green_shades
# palette = dict(zip(discrim_results['params'], shades))
# palette
#%%
metrics = pd.DataFrame(metric_rows)
metrics["params"] = list(
zip(metrics["method"], metrics["n_components"], metrics["sparsity_param"])
)
metrics = metrics.set_index("params")
metrics
#%%
plot_results = discrim_results[discrim_results["mode"] == "test"].copy()
plot_results = plot_results.groupby("params").mean()
plot_results = pd.concat(
(plot_results, metrics.drop(["n_components", "sparsity_param", "method"], axis=1)),
axis=1,
)
plot_results = (
plot_results.reset_index()
.rename({"level_0": "method"}, axis=1)
.drop(["level_1", "level_2"], axis=1)
)
plot_results = plot_results.sort_values(["method", "n_components", "sparsity_level"])
#%%
plot_results["pretty_params"] = ""
for i, row in plot_results.iterrows():
method = row["method"]
if method == "PCA":
plot_results.loc[i, "pretty_params"] = "PCA"
else:
if method == "SCA":
symbol = r"$\gamma$"
elif method == "SparsePCA":
symbol = r"$\alpha$"
plot_results.loc[i, "pretty_params"] = (
row["method"] + ", " + symbol + "=" + f"{row['sparsity_param']:.0f}"
)
plot_results
#%%
red_shades = sns.color_palette("Reds", n_colors=len(gammas) + 1)[-2:-1]
gammas = np.unique(plot_results[plot_results["method"] == "SCA"]["sparsity_param"])
alphas = np.unique(
plot_results[plot_results["method"] == "SparsePCA"]["sparsity_param"]
)
push = 2
blue_shades = sns.color_palette("Blues", n_colors=len(gammas) + push)[push:]
green_shades = sns.color_palette("Greens", n_colors=len(alphas) + push)[push:]
shades = red_shades + blue_shades + green_shades
palette = dict(zip(plot_results["pretty_params"], shades))
palette
line_palette = dict(
zip(
["PCA", "SCA", "SparsePCA"], [red_shades[-1], blue_shades[-1], green_shades[-1]]
)
)
#%%
fig, ax = plt.subplots(1, 1, figsize=(8, 6))
sns.scatterplot(
data=plot_results,
x="p_nonzero_cols",
y="tstat",
hue="pretty_params",
palette=palette,
ax=ax,
s=100,
)
handles, labels = ax.get_legend_handles_labels()
handles = handles[:11]
labels = labels[:11]
sns.lineplot(
data=plot_results,
x="p_nonzero_cols",
y="tstat",
hue="method",
zorder=-1,
palette=line_palette,
)
ax.get_legend().remove()
ax.legend(handles=handles, labels=labels, bbox_to_anchor=(1, 1), loc="upper left")
ax.set(ylabel="Discriminability", xlabel="# of genes used")
stashfig("discriminability-vs-n_genes-new")
#%%
# plot_results["p_nonzero_cols_jitter"] = plot_results[
# "p_nonzero_cols"
# ] + np.random.uniform(-0.02, 0.02, size=len(plot_results))
# mean_results = plot_results.groupby("params").mean()
# gammas = np.unique(plot_results["gamma"])
# palette = dict(zip(gammas, sns.color_palette("deep", 10)))
# blue_shades = sns.color_palette("Blues", n_colors=len(gammas))[1:]
# palette = dict(zip(gammas[:-1], blue_shades))
# red_shades = sns.color_palette("Reds", n_colors=len(gammas))[1:]
# palette[np.inf] = red_shades[-1]
fig, ax = plt.subplots(1, 1, figsize=(8, 6))
sns.scatterplot(
data=plot_results,
x="p_nonzero_cols",
y="tstat",
hue="params",
palette=palette,
ax=ax,
s=10,
)
# sns.scatterplot(
# data=mean_results,
# x="p_nonzero_cols",
# y="tstat",
# hue="params",
# palette=palette,
# ax=ax,
# marker="_",
# s=200,
# linewidth=4,
# legend=False,
# )
ax.get_legend().remove()
ax.legend(bbox_to_anchor=(1, 1), loc="upper left")
ax.set(ylabel="Discriminability", xlabel="# of genes used")
stashfig("discriminability-vs-n_genes-new")
#%%
fig, ax = plt.subplots(1, 1, figsize=(8, 6))
sns.scatterplot(
data=plot_results,
x="p_nonzero",
y="p_nonzero_cols",
ax=ax,
hue="params",
palette=palette,
)
#%%
fig, ax = plt.subplots(1, 1, figsize=(8, 6))
sns.scatterplot(
data=plot_results,
x="p_nonzero",
y="train_time",
ax=ax,
hue="params",
palette=palette,
)
#%%
from sklearn.decomposition import PCA
from scipy.stats import ortho_group
n_components = 10
pca = PCA(n_components=n_components)
pca.fit_transform(X_train[: 2 ** 11])
Y = pca.components_.T
#%%
rows = []
for i in range(1000):
R = ortho_group.rvs(n_components)
loading_l1 = l1_norm(Y @ R)
rows.append({"rotation": "random", "l1_norm": loading_l1})
from sparse_decomposition.decomposition.decomposition import _varimax
Y_varimax = _varimax(Y)
loading_l1 = l1_norm(Y_varimax)
rows.append({"rotation": "varimax", "l1_norm": loading_l1})
results = pd.DataFrame(rows)
#%%
import matplotlib.transforms as transforms
fig, ax = plt.subplots(1, 1, figsize=(8, 4))
sns.histplot(
data=results[results["rotation"] == "random"],
x="l1_norm",
ax=ax,
stat="density",
kde=True,
element="step",
)
ax.axvline(loading_l1, color="darkred", linestyle="--", linewidth=2)
# ax.axvline(l1_norm(Y), color='blue', linestyle='--', linewidth=2)
ax.annotate(
"Varimax\nrotation",
(loading_l1 + 2, 0.8),
(20, 20),
xycoords=transforms.blended_transform_factory(ax.transData, ax.transAxes),
textcoords="offset points",
arrowprops=dict(arrowstyle="->"),
)
ax.annotate(
"Random\nrotation",
(605, 0.6),
(-100, 20),
xycoords=transforms.blended_transform_factory(ax.transData, ax.transAxes),
textcoords="offset points",
arrowprops=dict(arrowstyle="->"),
)
ax.set(xlabel=r"$\|\|YR\|\|_{1}$ (element-wise norm)", ylabel="", yticks=[])
ax.spines["left"].set_visible(False)
stashfig("random-rotations")
| [
"[email protected]"
] | |
ce530f3a0911852d37807d078ec6dcec182f3b0f | a2dce63dc04f484d1457073610343378656a1ffd | /p24.py | c193733b0ed3535db6ba4015fe24cd7bf5da243e | [] | no_license | analaura09/pythongame | 5ece67047095160cdbc56ae3bb14920c787d8d02 | 54c83cf731a384fdb04bc4c3ed0bcf109b03d5ed | refs/heads/main | 2023-03-29T00:35:35.713616 | 2021-03-21T17:53:28 | 2021-03-21T17:53:28 | 348,432,418 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 382 | py | import run()
nome = str(input(' Digite o nome da cidade: ')).strip()
print(nome[:5].upper == 'SANTO')
opçao = int(input('deseja repetir esse exercicio?[1] \ndeseja voltar para o mundo1? [2] \ndeseja sair do jogo?[3]'))
if opçao == 1:
import p24
p24.run()
if opçao ==2:
import mundo1
mundo1.run()
if opçao == 3:
print('Obrigada por jogar! Volte sempre :)')
| [
"[email protected]"
] | |
bd43d4b1bc8f330307b9720cb6b8e6383124f6dc | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/62/usersdata/191/32531/submittedfiles/ex1.py | e50dfa28cc19c4488d103df646274da8bad5050f | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 338 | py | # -*- coding: utf-8 -*-
from __future__ import division
a = float(input('Digite a: '))
b = float(input('Digite b: '))
c = float(input('Digite c: '))
delta=(b**2)-(4*a*c)
if delta>=0:
x1=(-b+delta**(1/2))/(2*a)
x2=(-b-delta**(1/2))/(2*a)
print('x1 é igual a %.2f'%x1)
print('x2 é igual a %.2f'%x2)
else:
print('SRR')
| [
"[email protected]"
] | |
61e37196f31ae35d096a8c8fd30ddb31149a289a | 3256af0d6c19732bb84b256a9f792aaf7f3d901a | /f5/bigip/tm/asm/policies/test/functional/test_extractions.py | 64620349bef5b578dcd0fbe54e55e1a2ec400974 | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] | permissive | F5Networks/f5-common-python | 73e33ea489d989399d205077163f24ce584d83b9 | 3050df0079c2426af99b9a1b8f93d0b512468ff4 | refs/heads/development | 2023-08-29T10:11:23.713392 | 2022-09-21T02:45:03 | 2022-09-21T02:45:03 | 45,062,555 | 286 | 180 | Apache-2.0 | 2023-05-12T23:13:03 | 2015-10-27T18:48:06 | Python | UTF-8 | Python | false | false | 4,460 | py | # Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import pytest
import tempfile
from distutils.version import LooseVersion
from f5.bigip.tm.asm.policies.extractions import Extraction
from f5.sdk_exception import MissingRequiredCreationParameter
from requests.exceptions import HTTPError
@pytest.fixture(scope='function')
def set_extraction(policy):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
r1 = policy.extractions_s.extraction.create(
extractFromAllItems=True,
name=name
)
yield r1
r1.delete()
@pytest.mark.skipif(
LooseVersion(pytest.config.getoption('--release')) < LooseVersion('11.6.0'),
reason='This collection is fully implemented on 11.6.0 or greater.'
)
class TestExtractions(object):
def test_create_req_arg(self, policy):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
r1 = policy.extractions_s.extraction.create(
extractFromAllItems=True,
name=name
)
assert r1.kind == 'tm:asm:policies:extractions:extractionstate'
r1.delete()
def test_create_mandatory_arg_missing(self, policy2):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
with pytest.raises(MissingRequiredCreationParameter) as err:
policy2.extractions_s.extraction.create(
extractFromAllItems=False,
name=name
)
assert 'This resource requires at least one of the' in str(err.value)
def test_create_mandatory_arg_present(self, policy2):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
r1 = policy2.extractions_s.extraction.create(
extractFromAllItems=False,
name=name,
extractFromRegularExpression='["test"]')
assert r1.kind == 'tm:asm:policies:extractions:extractionstate'
assert r1.extractFromRegularExpression == '["test"]'
assert r1.extractFromAllItems is False
r1.delete()
def test_refresh(self, set_extraction, policy):
r1 = set_extraction
r2 = policy.extractions_s.extraction.load(id=r1.id)
assert r1.kind == r2.kind
assert r1.extractFromAllItems == r2.extractFromAllItems
assert r1.searchInXml == r2.searchInXml
r2.modify(searchInXml=True)
assert r1.searchInXml is False
assert r2.searchInXml is True
r1.refresh()
assert r1.searchInXml is True
def test_delete(self, policy):
file = tempfile.NamedTemporaryFile()
name = os.path.basename(file.name)
r1 = policy.extractions_s.extraction.create(
extractFromAllItems=True,
name=name
)
idhash = r1.id
r1.delete()
with pytest.raises(HTTPError) as err:
policy.extractions_s.extraction.load(id=idhash)
assert err.value.response.status_code == 404
def test_load_no_object(self, policy):
with pytest.raises(HTTPError) as err:
policy.extractions_s.extraction.load(id='Lx3553-321')
assert err.value.response.status_code == 404
def test_load(self, set_extraction, policy):
r1 = set_extraction
assert r1.kind == 'tm:asm:policies:extractions:extractionstate'
assert r1.searchInXml is False
r1.modify(searchInXml=True)
assert r1.searchInXml is True
r2 = policy.extractions_s.extraction.load(id=r1.id)
assert r1.kind == r2.kind
assert r1.searchInXml == r2.searchInXml
def test_extractions_subcollection(self, policy, set_extraction):
r1 = set_extraction
assert r1.kind == 'tm:asm:policies:extractions:extractionstate'
cc = policy.extractions_s.get_collection()
assert isinstance(cc, list)
assert len(cc)
assert isinstance(cc[0], Extraction)
| [
"[email protected]"
] | |
ba4ce143ab4efe10927d17ed3c09492ccae1cd5b | b3aa3d77836fa8f05b54d68e7bd6bff19dced90d | /Codeforces/636 Div 3/D.py | 84ea81677d1e3e2eaf8eef86a8f7d3c3ae042df2 | [] | no_license | anoubhav/Codeforces-Atcoder-Codechef-solutions | 660c5b78723791bc33b1d51977bf11ebe6dfe4c1 | aeebcae332af64aba49f52261d11aa6996f33b1c | refs/heads/master | 2022-12-08T14:02:49.574928 | 2020-08-29T14:18:30 | 2020-08-29T14:18:30 | 255,004,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,380 | py | # # Unsolved in contest
## Accepted solution: O(n+k) solution
# https://www.youtube.com/watch?v=QouZfBC9CVw&list=RDCMUC4lyxGubhu5u1s1LYCwXtZw&index=2
def prefix_algo():
t = int(input())
from collections import defaultdict
import math
for _ in range(t):
n, k = list(map(int, input().split()))
seq = list(map(int, input().split()))
prefix_arr = [0]*(2*k+10)
zero_count = defaultdict(int)
for i in range(n//2):
zero_count[seq[i] + seq[n-i-1]] += 1
a, b = sorted((seq[i], seq[n-i-1]))
L = a + 1
R = b + k
prefix_arr[L] += 1
prefix_arr[R+1] -= 1
prefsum = 0
for i in range(2*k + 10):
prefsum += prefix_arr[i]
prefix_arr[i] = prefsum
ans = n
for i in range(2, 2*k + 1):
zero = zero_count[i]
ones = prefix_arr[i] - zero
twos = n//2 - ones - zero
total = ones + twos*2
ans = min(ans, total)
print(ans)
prefix_algo()
## Naive solution: O(n*k) - TLE
def Naive():
t = int(input())
import math
for _ in range(t):
n, k = list(map(int, input().split()))
seq = list(map(int, input().split()))
ans = 10**6
for tot in range(2, 2*k + 1):
counter = 0
for i in range(n//2):
a, b = seq[i], seq[n - i - 1]
if a+b>tot:
minchange = max(a-1, b-1)
if a+b - minchange > tot:
counter += 2
else: counter += 1
elif a + b < tot:
maxchange = max(k - a, k - b)
if a+b+maxchange < tot:
counter +=2
else: counter += 1
if counter < ans:
ans = counter
print(ans)
### Main mistake was that I diregarded the individual numbers (a, b) and only took the sum. The number5
## Failed approach 1
# ans = 10**6
# pair_sum = list()
# for i in range(n//2):
# pair_sum.append(seq[i] + seq[n - i - 1])
# for master in pair_sum:
# counter = 0
# for pair in pair_sum:
# if pair!=master:
# if abs(pair - master) >= k:
# counter += 2
# else:
# counter += 1
# if counter<ans:
# ans = counter
# master = k
# counter = 0
# for pair in pair_sum:
# if pair!=master:
# if abs(pair - master) >= k:
# counter += 2
# else:
# counter += 1
# if counter<ans:
# ans = counter
# print(ans)
## Failed approach 2
# pair_sum = dict()
# mode_count = 0
# for i in range(n//2):
# temp = seq[i] + seq[n - i - 1]
# if temp in pair_sum: pair_sum[temp] += 1
# else: pair_sum[temp] = 1
# if pair_sum[temp]>mode_count:
# mode_count = pair_sum[temp]
# ans = 10**6
# for key, v in pair_sum.items():
# if v == mode_count:
# mode_sum = key
# counter = 0
# for i in range(n//2):
# temp = seq[i] + seq[n - i - 1]
# if temp!=mode_sum:
# if abs(temp - mode_sum) >= k:
# counter += 2
# else:
# counter += 1
# # print(mode_sum, temp, counter)
# if counter<ans:
# ans = counter
# if k//2 not in pair_sum:
# mode_sum = k//2
# counter = 0
# for i in range(n//2):
# temp = seq[i] + seq[n - i - 1]
# if temp!=mode_sum:
# if abs(temp - mode_sum) >= k:
# counter += 2
# else:
# counter += 1
# # print(mode_sum, temp, counter)
# if counter<ans:
# ans = counter
# ans = min(n//2, ans)
# print(ans)
| [
"[email protected]"
] | |
a4e1b7f960a339d2696bd14deaf451002d5bf475 | 12cb9edd19612c132028c45707b4ec944ae4ae88 | /3-deploy_web_static.py | 8438f8acc72a80543e4eb6219a324f9f94c48f43 | [] | no_license | imperfectskillz/AirBnB_clone_v2 | 6345389a99f66c23d310ac3030986ef8973c00bb | 8910d78da6a6bb6f14fa569af913739f4b5cf5f5 | refs/heads/master | 2020-03-08T01:08:53.124647 | 2018-04-19T01:14:16 | 2018-04-19T01:14:16 | 127,822,734 | 0 | 0 | null | 2018-04-02T23:11:34 | 2018-04-02T23:11:34 | null | UTF-8 | Python | false | false | 1,537 | py | #!/usr/bin/python3
#fabric script generates a tgz archive
from fabric.api import *
from datetime import datetime
import os
env.hosts = ["52.91.246.129", "107.23.155.217"]
env.user = "ubuntu"
def do_pack():
"""
creates tgz
"""
filetime = datetime.now().strftime('%Y%m%d%H%M%s')
filename = 'versions/web_static_{}.tgz'.format(filetime)
try:
local("mkdir -p versions")
local('tar -cvzf {} web_static'.format(filename))
return filename
except:
return None
def do_deploy(archive_path):
"""
distributes archive to web servers
"""
file = archive_path.split("/")[1]
if os.path.isfile(archive_path):
try:
put(archive_path, "/tmp/{}".format(file))
file1 = file.split(".")[0]
run("mkdir -p /data/web_static/releases/{}/".format(file1))
run("tar -xzf /tmp/{} -C /data/web_static/releases/{}".format(
file, file1))
run("rm /tmp/{}".format(file))
run("mv /data/web_static/releases/{}/web_static/* /data/web_static/releases/{}/".format(file1, file1))
run("rm -rf /data/web_static/releases/{}/web_static".format(file1))
run("rm -rf /data/web_static/current")
run("ln -s /data/web_static/releases/{}/ /data/web_static/current".format(file1))
return True
except:
return False
def deploy():
"""
set-up
"""
temp = do_pack()
if not temp:
return False
return do_deploy(temp)
| [
"[email protected]"
] | |
c29aa7ebe11d45637e582453e40d5f668b9e0189 | 5b3d8b5c612c802fd846de63f86b57652d33f672 | /Python/six_kyu/digital_root.py | 5655cf937c38c8dc012f4fb38a715f99a3c7198f | [
"Apache-2.0"
] | permissive | Brokenshire/codewars-projects | 1e591b57ed910a567f6c0423beb194fa7f8f693e | db9cd09618b8a7085b0d53ad76f73f9e249b9396 | refs/heads/master | 2021-07-22T18:50:25.847592 | 2021-01-25T23:27:17 | 2021-01-25T23:27:17 | 228,114,677 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,183 | py | # Python solution for 'Sum of Digits / Digital Root' codewars question.
# Level: 6 kyu
# Tags: Algorithms, Mathematics, Numbers, and Arithmetic.
# Author: Jack Brokenshire
# Date: 13/02/2020
import unittest
def digital_root(n):
"""
A digital root is the recursive sum of all the digits in a number. Given n, take the sum of the digits of n.
If that value has more than one digit, continue reducing in this way until a single-digit number is produced.
This is only applicable to the natural numbers.
:param n: A input integer value.
:return: Adds each digit together in the given number until it reaches a single-digit number.
"""
while n > 10:
n = sum([int(x) for x in str(n) if n > 10])
digital_root(n)
return n
class TestDigitalRoot(unittest.TestCase):
"""Class to test 'digital_root' function"""
def test_digital_root(self):
self.assertEqual(digital_root(16), 7)
self.assertEqual(digital_root(456), 6)
self.assertEqual(digital_root(942), 6)
self.assertEqual(digital_root(132189), 6)
self.assertEqual(digital_root(493193), 2)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
fb5e0cbc84d8801e24e350f9c849aaae07d4483e | 4de03eecadc4c69caf792f4773571c2f6dbe9d68 | /tests/api/test_beshared.py | 6362f1d3b23953ff61e2bcedd67f638da265f5a2 | [
"Apache-2.0"
] | permissive | Tr-1234/seahub | c1663dfd12f7584f24c160bcf2a83afdbe63a9e2 | ed255e0566de054b5570218cb39cc320e99ffa44 | refs/heads/master | 2022-12-23T16:20:13.138757 | 2020-10-01T04:13:42 | 2020-10-01T04:13:42 | 300,138,290 | 0 | 0 | Apache-2.0 | 2020-10-01T04:11:41 | 2020-10-01T04:11:40 | null | UTF-8 | Python | false | false | 2,540 | py | import json
import seaserv
from seaserv import seafile_api
from seahub.test_utils import BaseTestCase
class BeSharedReposTest(BaseTestCase):
def setUp(self):
self.login_as(self.admin)
def tearDown(self):
self.remove_repo()
def _prepare_repo_and_group(self):
# create repo for user
sub_repo_id = seafile_api.create_virtual_repo(self.repo.id,
self.folder,
self.repo.name, '',
self.user.username)
self.sub_repo_id = sub_repo_id
# create group for admin
admin_group_id = seaserv.ccnet_threaded_rpc.create_group('admin-group',
self.admin.email)
self.admin_group_id = admin_group_id
def test_can_list_personal_shared_repo(self):
self._prepare_repo_and_group()
# A user shares a folder to admin with permission 'rw'.
seafile_api.share_repo(self.sub_repo_id,
self.user.username,
self.admin.username,
'rw')
resp = self.client.get('/api2/beshared-repos/')
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert json_resp[0]['repo_id'] == self.sub_repo_id
assert json_resp[0]['share_type'] == 'personal'
def test_can_list_group_repo(self):
self._prepare_repo_and_group()
# A user shares a folder to admin group with permission 'rw'.
seafile_api.set_group_repo(self.sub_repo_id,
self.admin_group_id,
self.user.username,
'rw')
resp = self.client.get('/api2/beshared-repos/')
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert json_resp[0]['repo_id'] == self.sub_repo_id
assert json_resp[0]['share_type'] == 'group'
def test_can_list_public_repo(self):
self._prepare_repo_and_group()
# A user shares a folder to public with permission 'rw'.
seafile_api.add_inner_pub_repo(self.sub_repo_id, 'rw')
resp = self.client.get('/api2/beshared-repos/')
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert json_resp[0]['repo_id'] == self.sub_repo_id
assert json_resp[0]['share_type'] == 'public'
| [
"[email protected]"
] | |
0fcb38ed4b11b89f3af06a4adbf4410b3eab6123 | 528f910908885c3ded4ecc6380b9603c8dcacbd6 | /tbapi/top/api/rest/SimbaRptCusteffectGetRequest.py | 58297bc4539f31f788a51628d13782982910cac7 | [] | no_license | Monica-ckd/data007 | 15fe9c4c898a51a58100138b6b064211199d2ed1 | 0e54ae57eb719b86ec14ce9f77b027882a3398a8 | refs/heads/master | 2023-03-16T05:26:14.257318 | 2016-05-25T06:57:05 | 2016-05-25T06:57:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 468 | py | '''
Created by auto_sdk on 2013-04-01 16:44:41
'''
from top.api.base import RestApi
class SimbaRptCusteffectGetRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.end_time = None
self.nick = None
self.page_no = None
self.page_size = None
self.source = None
self.start_time = None
self.subway_token = None
def getapiname(self):
return 'taobao.simba.rpt.custeffect.get'
| [
"[email protected]"
] | |
10d497ee159d5fa12d220be2c774fd33f365bf17 | fcd80f58e8006cb6bb04ac9dca4b3d58dc8d1d70 | /files/example197_file_seek.py | 9beb719cee7f34becceb6ea5685d4a6b719d77dd | [] | no_license | penguuu/python-examples | 56e252be3dbf61cb0345cf8f95a01577f755f332 | 32f46ba435fd2797454af6228b368524ac1ebd79 | refs/heads/master | 2021-01-04T22:18:03.474098 | 2020-02-15T20:32:18 | 2020-02-15T20:32:18 | 240,781,755 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 223 | py | #!/usr/bin/python
fo = open("example197_file_seek.py","r")
print "Name of the file: ", fo.name
line = fo.readline()
print "Read Line: %s" % line
fo.seek(0,0)
line = fo.readline()
print "Read Line: %s" % line
fo.close()
| [
"[email protected]"
] | |
64b2dd0ee2448d18b763f3d2663e33790714d44c | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /MbbX7qJJeEnQu9bKr_16.py | 770c027324993f571e65cc9d43a73570afc5f508 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py |
def max_occur(text):
freqs = {c: text.count(c) for c in text if text.count(c) > 1}
if not freqs:
return 'No Repetition'
return sorted(c for c in freqs if freqs[c] == max(freqs.values()))
| [
"[email protected]"
] | |
6d23004951c597c739a77280febec8db68afdaf6 | d8a5525d2e2070f0434e971b6b03df6d6457d47d | /torch_glow/tests/nodes/zero_test.py | 3a5472f453da540a4dc3a705bbf9d2e2a12c8655 | [
"Apache-2.0"
] | permissive | pytorch/glow | 0006dbac932da386ff1143cff166b89323aec337 | f35c3d180290c79d5d3578ccc800bb35ce88e420 | refs/heads/master | 2023-08-30T17:44:59.170945 | 2023-08-29T05:29:36 | 2023-08-29T05:29:36 | 105,281,531 | 3,201 | 780 | Apache-2.0 | 2023-09-14T08:29:16 | 2017-09-29T14:28:18 | C++ | UTF-8 | Python | false | false | 1,146 | py | # Copyright (c) Glow Contributors. See CONTRIBUTORS file.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
import torch
from tests import utils
class TestZero(utils.TorchGlowTestCase):
def test_zero_basic(self):
"""Basic test of the PyTorch zero Node on Glow."""
class TestModule(torch.nn.Module):
def forward(self, a):
b = torch.zeros(a.size(), dtype=torch.float)
return a + b
x = torch.randn(2, 3, 4)
utils.compare_tracing_methods(TestModule(), x, fusible_ops={"aten::zeros"})
| [
"[email protected]"
] | |
b5995ba400cdef8251e2ed83be1b7a3160a382e1 | 627cca9406c31ce30c493ff7502f79eb4c57eee3 | /xcha/pools/pool_config.py | 2302aa6557d98b0686a6c0e7e602138bfb72a332 | [
"Apache-2.0"
] | permissive | blockchiansea/xcha-blockchain | 40c6d36813f671e94316a522904238f495f39f6b | 7de0ba89056236e30069aef12fe25843f6093bcf | refs/heads/master | 2023-07-26T02:36:57.654196 | 2021-09-06T06:04:21 | 2021-09-06T06:04:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,801 | py | import logging
from dataclasses import dataclass
from pathlib import Path
from typing import List
from blspy import G1Element
from xcha.types.blockchain_format.sized_bytes import bytes32
from xcha.util.byte_types import hexstr_to_bytes
from xcha.util.config import load_config, save_config
from xcha.util.streamable import Streamable, streamable
"""
Config example
This is what goes into the user's config file, to communicate between the wallet and the farmer processes.
pool_list:
launcher_id: ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa
authentication_public_key: 970e181ae45435ae696508a78012dc80548c334cf29676ea6ade7049eb9d2b9579cc30cb44c3fd68d35a250cfbc69e29
owner_public_key: 84c3fcf9d5581c1ddc702cb0f3b4a06043303b334dd993ab42b2c320ebfa98e5ce558448615b3f69638ba92cf7f43da5
payout_instructions: c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8
pool_url: localhost
p2_singleton_puzzle_hash: 2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
target_puzzle_hash: 344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58
""" # noqa
log = logging.getLogger(__name__)
@dataclass(frozen=True)
@streamable
class PoolWalletConfig(Streamable):
launcher_id: bytes32
pool_url: str
payout_instructions: str
target_puzzle_hash: bytes32
p2_singleton_puzzle_hash: bytes32
owner_public_key: G1Element
authentication_public_key: G1Element
def load_pool_config(root_path: Path) -> List[PoolWalletConfig]:
config = load_config(root_path, "config.yaml")
ret_list: List[PoolWalletConfig] = []
if "pool_list" in config["pool"]:
for pool_config_dict in config["pool"]["pool_list"]:
try:
pool_config = PoolWalletConfig(
hexstr_to_bytes(pool_config_dict["launcher_id"]),
pool_config_dict["pool_url"],
pool_config_dict["payout_instructions"],
hexstr_to_bytes(pool_config_dict["target_puzzle_hash"]),
hexstr_to_bytes(pool_config_dict["p2_singleton_puzzle_hash"]),
G1Element.from_bytes(hexstr_to_bytes(pool_config_dict["owner_public_key"])),
G1Element.from_bytes(hexstr_to_bytes(pool_config_dict["authentication_public_key"])),
)
ret_list.append(pool_config)
except Exception as e:
log.error(f"Exception loading config: {pool_config_dict} {e}")
return ret_list
async def update_pool_config(root_path: Path, pool_config_list: List[PoolWalletConfig]):
full_config = load_config(root_path, "config.yaml")
full_config["pool"]["pool_list"] = [c.to_json_dict() for c in pool_config_list]
save_config(root_path, "config.yaml", full_config)
| [
"[email protected]"
] | |
0c2564c74f11c92624cfde26e3724565ec38a320 | 26d6c34df00a229dc85ad7326de6cb5672be7acc | /msgraph-cli-extensions/beta/financials_beta/azext_financials_beta/vendored_sdks/financials/aio/_financials.py | d04272fa50b6915e7ad0d391b9af5cb2f0907b76 | [
"MIT"
] | permissive | BrianTJackett/msgraph-cli | 87f92471f68f85e44872939d876b9ff5f0ae6b2c | 78a4b1c73a23b85c070fed2fbca93758733f620e | refs/heads/main | 2023-06-23T21:31:53.306655 | 2021-07-09T07:58:56 | 2021-07-09T07:58:56 | 386,993,555 | 0 | 0 | NOASSERTION | 2021-07-17T16:56:05 | 2021-07-17T16:56:05 | null | UTF-8 | Python | false | false | 26,543 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Optional, TYPE_CHECKING
from azure.mgmt.core import AsyncARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
from ._configuration import FinancialsConfiguration
from .operations import FinancialsFinancialsOperations
from .operations import FinancialsOperations
from .operations import FinancialsCompaniesOperations
from .operations import FinancialsCompaniesCustomerPaymentJournalsOperations
from .operations import FinancialsCompaniesCustomerPaymentJournalsCustomerPaymentsOperations
from .operations import FinancialsCompaniesCustomerPaymentJournalsCustomerPaymentsCustomerOperations
from .operations import FinancialsCompaniesCustomerPaymentsOperations
from .operations import FinancialsCompaniesCustomerPaymentsCustomerOperations
from .operations import FinancialsCompaniesCustomersOperations
from .operations import FinancialsCompaniesDimensionsOperations
from .operations import FinancialsCompaniesEmployeesOperations
from .operations import FinancialsCompaniesGeneralLedgerEntriesOperations
from .operations import FinancialsCompaniesItemsOperations
from .operations import FinancialsCompaniesJournalLinesOperations
from .operations import FinancialsCompaniesJournalsOperations
from .operations import FinancialsCompaniesJournalsJournalLinesOperations
from .operations import FinancialsCompaniesPurchaseInvoiceLinesOperations
from .operations import FinancialsCompaniesPurchaseInvoiceLinesItemOperations
from .operations import FinancialsCompaniesPurchaseInvoicesOperations
from .operations import FinancialsCompaniesPurchaseInvoicesPurchaseInvoiceLinesOperations
from .operations import FinancialsCompaniesPurchaseInvoicesPurchaseInvoiceLinesItemOperations
from .operations import FinancialsCompaniesPurchaseInvoicesVendorOperations
from .operations import FinancialsCompaniesSalesCreditMemoLinesOperations
from .operations import FinancialsCompaniesSalesCreditMemoLinesItemOperations
from .operations import FinancialsCompaniesSalesCreditMemosOperations
from .operations import FinancialsCompaniesSalesCreditMemosCustomerOperations
from .operations import FinancialsCompaniesSalesCreditMemosSalesCreditMemoLinesOperations
from .operations import FinancialsCompaniesSalesCreditMemosSalesCreditMemoLinesItemOperations
from .operations import FinancialsCompaniesSalesInvoiceLinesOperations
from .operations import FinancialsCompaniesSalesInvoiceLinesItemOperations
from .operations import FinancialsCompaniesSalesInvoicesOperations
from .operations import FinancialsCompaniesSalesInvoicesCustomerOperations
from .operations import FinancialsCompaniesSalesInvoicesSalesInvoiceLinesOperations
from .operations import FinancialsCompaniesSalesInvoicesSalesInvoiceLinesItemOperations
from .operations import FinancialsCompaniesSalesOrderLinesOperations
from .operations import FinancialsCompaniesSalesOrderLinesItemOperations
from .operations import FinancialsCompaniesSalesOrdersOperations
from .operations import FinancialsCompaniesSalesOrdersCustomerOperations
from .operations import FinancialsCompaniesSalesOrdersSalesOrderLinesOperations
from .operations import FinancialsCompaniesSalesOrdersSalesOrderLinesItemOperations
from .operations import FinancialsCompaniesSalesQuoteLinesOperations
from .operations import FinancialsCompaniesSalesQuoteLinesItemOperations
from .operations import FinancialsCompaniesSalesQuotesOperations
from .operations import FinancialsCompaniesSalesQuotesCustomerOperations
from .operations import FinancialsCompaniesSalesQuotesSalesQuoteLinesOperations
from .operations import FinancialsCompaniesSalesQuotesSalesQuoteLinesItemOperations
from .operations import FinancialsCompaniesVendorsOperations
from .. import models
class Financials(object):
"""Financials.
:ivar financials_financials: FinancialsFinancialsOperations operations
:vartype financials_financials: financials.aio.operations.FinancialsFinancialsOperations
:ivar financials: FinancialsOperations operations
:vartype financials: financials.aio.operations.FinancialsOperations
:ivar financials_companies: FinancialsCompaniesOperations operations
:vartype financials_companies: financials.aio.operations.FinancialsCompaniesOperations
:ivar financials_companies_customer_payment_journals: FinancialsCompaniesCustomerPaymentJournalsOperations operations
:vartype financials_companies_customer_payment_journals: financials.aio.operations.FinancialsCompaniesCustomerPaymentJournalsOperations
:ivar financials_companies_customer_payment_journals_customer_payments: FinancialsCompaniesCustomerPaymentJournalsCustomerPaymentsOperations operations
:vartype financials_companies_customer_payment_journals_customer_payments: financials.aio.operations.FinancialsCompaniesCustomerPaymentJournalsCustomerPaymentsOperations
:ivar financials_companies_customer_payment_journals_customer_payments_customer: FinancialsCompaniesCustomerPaymentJournalsCustomerPaymentsCustomerOperations operations
:vartype financials_companies_customer_payment_journals_customer_payments_customer: financials.aio.operations.FinancialsCompaniesCustomerPaymentJournalsCustomerPaymentsCustomerOperations
:ivar financials_companies_customer_payments: FinancialsCompaniesCustomerPaymentsOperations operations
:vartype financials_companies_customer_payments: financials.aio.operations.FinancialsCompaniesCustomerPaymentsOperations
:ivar financials_companies_customer_payments_customer: FinancialsCompaniesCustomerPaymentsCustomerOperations operations
:vartype financials_companies_customer_payments_customer: financials.aio.operations.FinancialsCompaniesCustomerPaymentsCustomerOperations
:ivar financials_companies_customers: FinancialsCompaniesCustomersOperations operations
:vartype financials_companies_customers: financials.aio.operations.FinancialsCompaniesCustomersOperations
:ivar financials_companies_dimensions: FinancialsCompaniesDimensionsOperations operations
:vartype financials_companies_dimensions: financials.aio.operations.FinancialsCompaniesDimensionsOperations
:ivar financials_companies_employees: FinancialsCompaniesEmployeesOperations operations
:vartype financials_companies_employees: financials.aio.operations.FinancialsCompaniesEmployeesOperations
:ivar financials_companies_general_ledger_entries: FinancialsCompaniesGeneralLedgerEntriesOperations operations
:vartype financials_companies_general_ledger_entries: financials.aio.operations.FinancialsCompaniesGeneralLedgerEntriesOperations
:ivar financials_companies_items: FinancialsCompaniesItemsOperations operations
:vartype financials_companies_items: financials.aio.operations.FinancialsCompaniesItemsOperations
:ivar financials_companies_journal_lines: FinancialsCompaniesJournalLinesOperations operations
:vartype financials_companies_journal_lines: financials.aio.operations.FinancialsCompaniesJournalLinesOperations
:ivar financials_companies_journals: FinancialsCompaniesJournalsOperations operations
:vartype financials_companies_journals: financials.aio.operations.FinancialsCompaniesJournalsOperations
:ivar financials_companies_journals_journal_lines: FinancialsCompaniesJournalsJournalLinesOperations operations
:vartype financials_companies_journals_journal_lines: financials.aio.operations.FinancialsCompaniesJournalsJournalLinesOperations
:ivar financials_companies_purchase_invoice_lines: FinancialsCompaniesPurchaseInvoiceLinesOperations operations
:vartype financials_companies_purchase_invoice_lines: financials.aio.operations.FinancialsCompaniesPurchaseInvoiceLinesOperations
:ivar financials_companies_purchase_invoice_lines_item: FinancialsCompaniesPurchaseInvoiceLinesItemOperations operations
:vartype financials_companies_purchase_invoice_lines_item: financials.aio.operations.FinancialsCompaniesPurchaseInvoiceLinesItemOperations
:ivar financials_companies_purchase_invoices: FinancialsCompaniesPurchaseInvoicesOperations operations
:vartype financials_companies_purchase_invoices: financials.aio.operations.FinancialsCompaniesPurchaseInvoicesOperations
:ivar financials_companies_purchase_invoices_purchase_invoice_lines: FinancialsCompaniesPurchaseInvoicesPurchaseInvoiceLinesOperations operations
:vartype financials_companies_purchase_invoices_purchase_invoice_lines: financials.aio.operations.FinancialsCompaniesPurchaseInvoicesPurchaseInvoiceLinesOperations
:ivar financials_companies_purchase_invoices_purchase_invoice_lines_item: FinancialsCompaniesPurchaseInvoicesPurchaseInvoiceLinesItemOperations operations
:vartype financials_companies_purchase_invoices_purchase_invoice_lines_item: financials.aio.operations.FinancialsCompaniesPurchaseInvoicesPurchaseInvoiceLinesItemOperations
:ivar financials_companies_purchase_invoices_vendor: FinancialsCompaniesPurchaseInvoicesVendorOperations operations
:vartype financials_companies_purchase_invoices_vendor: financials.aio.operations.FinancialsCompaniesPurchaseInvoicesVendorOperations
:ivar financials_companies_sales_credit_memo_lines: FinancialsCompaniesSalesCreditMemoLinesOperations operations
:vartype financials_companies_sales_credit_memo_lines: financials.aio.operations.FinancialsCompaniesSalesCreditMemoLinesOperations
:ivar financials_companies_sales_credit_memo_lines_item: FinancialsCompaniesSalesCreditMemoLinesItemOperations operations
:vartype financials_companies_sales_credit_memo_lines_item: financials.aio.operations.FinancialsCompaniesSalesCreditMemoLinesItemOperations
:ivar financials_companies_sales_credit_memos: FinancialsCompaniesSalesCreditMemosOperations operations
:vartype financials_companies_sales_credit_memos: financials.aio.operations.FinancialsCompaniesSalesCreditMemosOperations
:ivar financials_companies_sales_credit_memos_customer: FinancialsCompaniesSalesCreditMemosCustomerOperations operations
:vartype financials_companies_sales_credit_memos_customer: financials.aio.operations.FinancialsCompaniesSalesCreditMemosCustomerOperations
:ivar financials_companies_sales_credit_memos_sales_credit_memo_lines: FinancialsCompaniesSalesCreditMemosSalesCreditMemoLinesOperations operations
:vartype financials_companies_sales_credit_memos_sales_credit_memo_lines: financials.aio.operations.FinancialsCompaniesSalesCreditMemosSalesCreditMemoLinesOperations
:ivar financials_companies_sales_credit_memos_sales_credit_memo_lines_item: FinancialsCompaniesSalesCreditMemosSalesCreditMemoLinesItemOperations operations
:vartype financials_companies_sales_credit_memos_sales_credit_memo_lines_item: financials.aio.operations.FinancialsCompaniesSalesCreditMemosSalesCreditMemoLinesItemOperations
:ivar financials_companies_sales_invoice_lines: FinancialsCompaniesSalesInvoiceLinesOperations operations
:vartype financials_companies_sales_invoice_lines: financials.aio.operations.FinancialsCompaniesSalesInvoiceLinesOperations
:ivar financials_companies_sales_invoice_lines_item: FinancialsCompaniesSalesInvoiceLinesItemOperations operations
:vartype financials_companies_sales_invoice_lines_item: financials.aio.operations.FinancialsCompaniesSalesInvoiceLinesItemOperations
:ivar financials_companies_sales_invoices: FinancialsCompaniesSalesInvoicesOperations operations
:vartype financials_companies_sales_invoices: financials.aio.operations.FinancialsCompaniesSalesInvoicesOperations
:ivar financials_companies_sales_invoices_customer: FinancialsCompaniesSalesInvoicesCustomerOperations operations
:vartype financials_companies_sales_invoices_customer: financials.aio.operations.FinancialsCompaniesSalesInvoicesCustomerOperations
:ivar financials_companies_sales_invoices_sales_invoice_lines: FinancialsCompaniesSalesInvoicesSalesInvoiceLinesOperations operations
:vartype financials_companies_sales_invoices_sales_invoice_lines: financials.aio.operations.FinancialsCompaniesSalesInvoicesSalesInvoiceLinesOperations
:ivar financials_companies_sales_invoices_sales_invoice_lines_item: FinancialsCompaniesSalesInvoicesSalesInvoiceLinesItemOperations operations
:vartype financials_companies_sales_invoices_sales_invoice_lines_item: financials.aio.operations.FinancialsCompaniesSalesInvoicesSalesInvoiceLinesItemOperations
:ivar financials_companies_sales_order_lines: FinancialsCompaniesSalesOrderLinesOperations operations
:vartype financials_companies_sales_order_lines: financials.aio.operations.FinancialsCompaniesSalesOrderLinesOperations
:ivar financials_companies_sales_order_lines_item: FinancialsCompaniesSalesOrderLinesItemOperations operations
:vartype financials_companies_sales_order_lines_item: financials.aio.operations.FinancialsCompaniesSalesOrderLinesItemOperations
:ivar financials_companies_sales_orders: FinancialsCompaniesSalesOrdersOperations operations
:vartype financials_companies_sales_orders: financials.aio.operations.FinancialsCompaniesSalesOrdersOperations
:ivar financials_companies_sales_orders_customer: FinancialsCompaniesSalesOrdersCustomerOperations operations
:vartype financials_companies_sales_orders_customer: financials.aio.operations.FinancialsCompaniesSalesOrdersCustomerOperations
:ivar financials_companies_sales_orders_sales_order_lines: FinancialsCompaniesSalesOrdersSalesOrderLinesOperations operations
:vartype financials_companies_sales_orders_sales_order_lines: financials.aio.operations.FinancialsCompaniesSalesOrdersSalesOrderLinesOperations
:ivar financials_companies_sales_orders_sales_order_lines_item: FinancialsCompaniesSalesOrdersSalesOrderLinesItemOperations operations
:vartype financials_companies_sales_orders_sales_order_lines_item: financials.aio.operations.FinancialsCompaniesSalesOrdersSalesOrderLinesItemOperations
:ivar financials_companies_sales_quote_lines: FinancialsCompaniesSalesQuoteLinesOperations operations
:vartype financials_companies_sales_quote_lines: financials.aio.operations.FinancialsCompaniesSalesQuoteLinesOperations
:ivar financials_companies_sales_quote_lines_item: FinancialsCompaniesSalesQuoteLinesItemOperations operations
:vartype financials_companies_sales_quote_lines_item: financials.aio.operations.FinancialsCompaniesSalesQuoteLinesItemOperations
:ivar financials_companies_sales_quotes: FinancialsCompaniesSalesQuotesOperations operations
:vartype financials_companies_sales_quotes: financials.aio.operations.FinancialsCompaniesSalesQuotesOperations
:ivar financials_companies_sales_quotes_customer: FinancialsCompaniesSalesQuotesCustomerOperations operations
:vartype financials_companies_sales_quotes_customer: financials.aio.operations.FinancialsCompaniesSalesQuotesCustomerOperations
:ivar financials_companies_sales_quotes_sales_quote_lines: FinancialsCompaniesSalesQuotesSalesQuoteLinesOperations operations
:vartype financials_companies_sales_quotes_sales_quote_lines: financials.aio.operations.FinancialsCompaniesSalesQuotesSalesQuoteLinesOperations
:ivar financials_companies_sales_quotes_sales_quote_lines_item: FinancialsCompaniesSalesQuotesSalesQuoteLinesItemOperations operations
:vartype financials_companies_sales_quotes_sales_quote_lines_item: financials.aio.operations.FinancialsCompaniesSalesQuotesSalesQuoteLinesItemOperations
:ivar financials_companies_vendors: FinancialsCompaniesVendorsOperations operations
:vartype financials_companies_vendors: financials.aio.operations.FinancialsCompaniesVendorsOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param top: Show only the first n items.
:type top: int
:param skip: Skip the first n items.
:type skip: int
:param search: Search items by search phrases.
:type search: str
:param filter: Filter items by property values.
:type filter: str
:param count: Include count of items.
:type count: bool
:param str base_url: Service URL
"""
def __init__(
self,
credential: "AsyncTokenCredential",
top: Optional[int] = None,
skip: Optional[int] = None,
search: Optional[str] = None,
filter: Optional[str] = None,
count: Optional[bool] = None,
base_url: Optional[str] = None,
**kwargs: Any
) -> None:
if not base_url:
base_url = 'https://graph.microsoft.com/beta'
self._config = FinancialsConfiguration(credential, top, skip, search, filter, count, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.financials_financials = FinancialsFinancialsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials = FinancialsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies = FinancialsCompaniesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_customer_payment_journals = FinancialsCompaniesCustomerPaymentJournalsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_customer_payment_journals_customer_payments = FinancialsCompaniesCustomerPaymentJournalsCustomerPaymentsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_customer_payment_journals_customer_payments_customer = FinancialsCompaniesCustomerPaymentJournalsCustomerPaymentsCustomerOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_customer_payments = FinancialsCompaniesCustomerPaymentsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_customer_payments_customer = FinancialsCompaniesCustomerPaymentsCustomerOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_customers = FinancialsCompaniesCustomersOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_dimensions = FinancialsCompaniesDimensionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_employees = FinancialsCompaniesEmployeesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_general_ledger_entries = FinancialsCompaniesGeneralLedgerEntriesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_items = FinancialsCompaniesItemsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_journal_lines = FinancialsCompaniesJournalLinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_journals = FinancialsCompaniesJournalsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_journals_journal_lines = FinancialsCompaniesJournalsJournalLinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_purchase_invoice_lines = FinancialsCompaniesPurchaseInvoiceLinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_purchase_invoice_lines_item = FinancialsCompaniesPurchaseInvoiceLinesItemOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_purchase_invoices = FinancialsCompaniesPurchaseInvoicesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_purchase_invoices_purchase_invoice_lines = FinancialsCompaniesPurchaseInvoicesPurchaseInvoiceLinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_purchase_invoices_purchase_invoice_lines_item = FinancialsCompaniesPurchaseInvoicesPurchaseInvoiceLinesItemOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_purchase_invoices_vendor = FinancialsCompaniesPurchaseInvoicesVendorOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_credit_memo_lines = FinancialsCompaniesSalesCreditMemoLinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_credit_memo_lines_item = FinancialsCompaniesSalesCreditMemoLinesItemOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_credit_memos = FinancialsCompaniesSalesCreditMemosOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_credit_memos_customer = FinancialsCompaniesSalesCreditMemosCustomerOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_credit_memos_sales_credit_memo_lines = FinancialsCompaniesSalesCreditMemosSalesCreditMemoLinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_credit_memos_sales_credit_memo_lines_item = FinancialsCompaniesSalesCreditMemosSalesCreditMemoLinesItemOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_invoice_lines = FinancialsCompaniesSalesInvoiceLinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_invoice_lines_item = FinancialsCompaniesSalesInvoiceLinesItemOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_invoices = FinancialsCompaniesSalesInvoicesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_invoices_customer = FinancialsCompaniesSalesInvoicesCustomerOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_invoices_sales_invoice_lines = FinancialsCompaniesSalesInvoicesSalesInvoiceLinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_invoices_sales_invoice_lines_item = FinancialsCompaniesSalesInvoicesSalesInvoiceLinesItemOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_order_lines = FinancialsCompaniesSalesOrderLinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_order_lines_item = FinancialsCompaniesSalesOrderLinesItemOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_orders = FinancialsCompaniesSalesOrdersOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_orders_customer = FinancialsCompaniesSalesOrdersCustomerOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_orders_sales_order_lines = FinancialsCompaniesSalesOrdersSalesOrderLinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_orders_sales_order_lines_item = FinancialsCompaniesSalesOrdersSalesOrderLinesItemOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_quote_lines = FinancialsCompaniesSalesQuoteLinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_quote_lines_item = FinancialsCompaniesSalesQuoteLinesItemOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_quotes = FinancialsCompaniesSalesQuotesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_quotes_customer = FinancialsCompaniesSalesQuotesCustomerOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_quotes_sales_quote_lines = FinancialsCompaniesSalesQuotesSalesQuoteLinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_sales_quotes_sales_quote_lines_item = FinancialsCompaniesSalesQuotesSalesQuoteLinesItemOperations(
self._client, self._config, self._serialize, self._deserialize)
self.financials_companies_vendors = FinancialsCompaniesVendorsOperations(
self._client, self._config, self._serialize, self._deserialize)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "Financials":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details)
| [
"[email protected]"
] | |
2a04a0b4d524fd056e57e98a0700803743f8a35a | afd2087e80478010d9df66e78280f75e1ff17d45 | /torch/nn/intrinsic/modules/fused.py | dc962f956427ec6f6e6b1d0580a1d5c73bd9cd29 | [
"BSD-3-Clause",
"BSD-2-Clause",
"LicenseRef-scancode-secret-labs-2011",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0"
] | permissive | pytorch/pytorch | 7521ac50c47d18b916ae47a6592c4646c2cb69b5 | a6f7dd4707ac116c0f5fb5f44f42429f38d23ab4 | refs/heads/main | 2023-08-03T05:05:02.822937 | 2023-08-03T00:40:33 | 2023-08-03T04:14:52 | 65,600,975 | 77,092 | 24,610 | NOASSERTION | 2023-09-14T21:58:39 | 2016-08-13T05:26:41 | Python | UTF-8 | Python | false | false | 901 | py | from torch.ao.nn.intrinsic import BNReLU2d
from torch.ao.nn.intrinsic import BNReLU3d
from torch.ao.nn.intrinsic import ConvBn1d
from torch.ao.nn.intrinsic import ConvBn2d
from torch.ao.nn.intrinsic import ConvBn3d
from torch.ao.nn.intrinsic import ConvBnReLU1d
from torch.ao.nn.intrinsic import ConvBnReLU2d
from torch.ao.nn.intrinsic import ConvBnReLU3d
from torch.ao.nn.intrinsic import ConvReLU1d
from torch.ao.nn.intrinsic import ConvReLU2d
from torch.ao.nn.intrinsic import ConvReLU3d
from torch.ao.nn.intrinsic import LinearBn1d
from torch.ao.nn.intrinsic import LinearReLU
from torch.ao.nn.intrinsic.modules.fused import _FusedModule # noqa: F401
__all__ = [
'BNReLU2d',
'BNReLU3d',
'ConvBn1d',
'ConvBn2d',
'ConvBn3d',
'ConvBnReLU1d',
'ConvBnReLU2d',
'ConvBnReLU3d',
'ConvReLU1d',
'ConvReLU2d',
'ConvReLU3d',
'LinearBn1d',
'LinearReLU',
]
| [
"[email protected]"
] | |
1f6bf9540ccf30dd2d98ad22948909b458eb8fab | 1beea84ee4ebc720e9353b71dce9cc34898f7823 | /test/test_dynamic_shapes.py | 28d5e740624384b890b0d240984aafeadb3cec44 | [
"BSD-2-Clause",
"LicenseRef-scancode-secret-labs-2011",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0"
] | permissive | EikanWang/pytorch | 93974c30013f27064d7aeaa6a6cd061854c721e0 | 16d85160d511f6e0c3b3eda418b133a66d3376dd | refs/heads/master | 2023-03-22T14:10:44.608629 | 2023-03-20T07:56:18 | 2023-03-20T07:56:18 | 616,394,473 | 1 | 1 | NOASSERTION | 2023-03-20T10:01:18 | 2023-03-20T10:00:29 | null | UTF-8 | Python | false | false | 30,179 | py | # -*- coding: utf-8 -*-
# Owner(s): ["oncall: jit"]
from torch._C import _disabled_torch_function_impl
import torch.fx
import torch.nn.functional as F
from torch.testing._internal.common_utils import run_tests, TestCase, skipIfTorchDynamo, \
parametrize, instantiate_parametrized_tests
import torch
import operator
import itertools
import contextlib
import math
import copy
import sympy
from torch.utils._pytree import tree_map
from torch.fx.experimental import symbolic_shapes
from torch.fx.experimental.proxy_tensor import make_fx
from torch.fx.experimental.symbolic_shapes import SymNode, \
FloorDiv, ShapeEnv, sym_sqrt, sym_float, to_node, GuardOnDataDependentSymNode, \
guard_bool, guard_int, guard_float
from torch.utils._python_dispatch import TorchDispatchMode
from torch import SymBool, SymInt, SymFloat, sym_int
aten = torch.ops.aten
meta_funcs = {}
def register_meta(op):
def decorator(f):
def add_func(op):
meta_funcs[op] = f
tree_map(add_func, op)
return f
return decorator
@register_meta([aten.add.Tensor, aten.sub.Tensor])
def binary_meta(a, b):
return a.new_empty(a.shape)
@register_meta(aten.cat.default)
def cat_meta(tensors, dim=0):
concat_length = 0
shape = tensors[0].shape
for tensor in tensors:
for idx, (common_length, length) in enumerate(zip(shape, tensor.shape)):
if idx == dim:
concat_length = concat_length + length
else:
assert length == common_length
new_shape = list(shape)
new_shape[dim] = concat_length
return tensors[0].new_empty(new_shape)
@register_meta([aten.narrow_copy.default])
def narrow_copy_symint_meta(a, dim, start, length, **kwargs):
shape = []
for i, x in enumerate(a.shape):
if i == dim:
shape.append(length)
else:
shape.append(x)
return a.new_empty(tuple(shape))
@register_meta([aten.expand.default])
def expand_symint_meta(a, size, implicit=False):
return a.new_empty(size)
def create_contiguous(shape):
strides = [1]
for dim in reversed(shape[:-1]):
strides.append(dim * strides[-1])
return list(reversed(strides))
class FakeSymbolicTensor(torch.Tensor):
@staticmethod
def __new__(cls, sym_shape, sym_strides, dtype, layout, requires_grad, device, storage_offset=0):
# TODO: this is wrong in general
sym_stride = create_contiguous(sym_shape)
r = torch.Tensor._make_wrapper_subclass(
cls, sym_shape,
sym_stride, storage_offset,
dtype=dtype, layout=layout, requires_grad=requires_grad,
device=device,
)
return r
__torch_function__ = _disabled_torch_function_impl
def new_empty(self, shape):
return FakeSymbolicTensor(shape, None, self.dtype, self.layout, self.requires_grad, self.device)
@classmethod
def __torch_dispatch__(cls, func_overload, types, args=(), kwargs=None):
if func_overload in meta_funcs:
return meta_funcs[func_overload](*args, **kwargs)
if func_overload == torch.ops.aten.new_empty.default:
self = args[0]
shape = args[1]
return FakeSymbolicTensor(shape, self.stride(), self.dtype, self.layout, self.requires_grad, self.device)
raise RuntimeError(f"operator {func_overload} not supported")
def create_symbolic_tensor(name, arg, shape_env):
from torch._dynamo.source import ConstantSource
sym_shapes, sym_strides, sym_storage_offset = \
shape_env.create_symbolic_sizes_strides_storage_offset(arg, source=ConstantSource(name))
return FakeSymbolicTensor(sym_shapes, sym_strides, arg.dtype, arg.layout, arg.requires_grad, arg.device, sym_storage_offset)
def create_symint(shape_env, i: int):
from torch._dynamo.source import ConstantSource
return shape_env.create_symintnode(
shape_env.create_symbol(i, source=ConstantSource(f"__testing_only{len(shape_env.var_to_val)}")),
hint=i
)
@skipIfTorchDynamo("Creating ShapeEnv fails for confusing reasons (also we never expect dynamo to see code like this)")
class TestPySymInt(TestCase):
def test_arith_ops(self):
shape_env = ShapeEnv()
symints = []
for i in range(2, 5):
symints.append((i, create_symint(shape_env, i)))
ops = [operator.add, operator.sub, operator.floordiv, operator.mul, operator.mod]
for op in ops:
for args in itertools.permutations(symints, 2):
if not isinstance(args[0][1], int) and ((op != operator.mod or op != operator.floordiv) and args[1][0] != 0):
self.assertTrue(op(args[0][1], args[1][1]) == op(args[0][0], args[1][0]))
def test_reverse_arith_ops(self):
shape_env = ShapeEnv()
a = create_symint(shape_env, 2)
self.assertTrue(5 // a == 5 // 2)
a = create_symint(shape_env, 2)
self.assertTrue(5 * a == 5 * 2)
def test_roundtrip(self):
shape_env = ShapeEnv()
x = create_symbolic_tensor("x", torch.randn(5, 4, 3), shape_env)
self.assertTrue(not isinstance(x.shape[0], SymNode))
self.assertTrue(isinstance(x.shape[0], SymInt))
self.assertTrue(x.shape[0] == 5)
self.assertTrue(x.shape[1] == 4)
self.assertTrue(x.shape[2], 3)
self.assertTrue(x.size()[0], 5)
self.assertTrue(x.size()[1], 4)
self.assertTrue(isinstance(x.size()[1], SymInt))
self.assertTrue(x.size()[2] == 3)
self.assertTrue(x.size(0) == 5)
self.assertTrue(x.size(1) == 4)
self.assertTrue(x.size(2) == 3)
self.assertTrue(isinstance(x.size(2), SymInt))
y = create_symbolic_tensor("y", torch.randn(5, 4, 3)[1:], shape_env)
self.assertTrue(isinstance(y.storage_offset(), SymInt))
self.assertTrue(y.storage_offset() == 12)
def test_binary(self):
shape_env = ShapeEnv()
x = create_symbolic_tensor("x", torch.randn(5, 4, 3), shape_env)
y = create_symbolic_tensor("y", torch.randn(5, 4, 3), shape_env)
z = x + y
self.assertTrue(z.shape[0] == 5)
self.assertTrue(z.shape[1] == 4)
self.assertTrue(z.shape[2] == 3)
# broadcasting
y = create_symbolic_tensor("y2", torch.randn(1, 4, 1), shape_env)
z = x + y
self.assertTrue(z.shape[0] == 5)
self.assertTrue(z.shape[1] == 4)
self.assertTrue(z.shape[2] == 3)
def test_symint_args(self):
shape_env = ShapeEnv()
x = create_symbolic_tensor("x", torch.randn(5, 4, 3), shape_env)
y = create_symbolic_tensor("y", torch.randn(5, 4, 1), shape_env)
LAST_DIM = 2
z = x.narrow_copy(LAST_DIM, 0, y.shape[LAST_DIM])
self.assertTrue(z.shape[2] == y.shape[2])
# arithmetic expr with two symints
z = x.narrow_copy(LAST_DIM, 0, x.shape[LAST_DIM] - y.shape[LAST_DIM])
self.assertTrue(z.shape[2] == 2)
# arithmetic expr with a symint and python int
z = x.narrow_copy(LAST_DIM, 0, x.shape[LAST_DIM] - 1)
self.assertTrue(z.shape[2] == 2)
def test_symint_vargs(self):
shape_env = ShapeEnv()
x = create_symbolic_tensor("x", torch.randn(5, 4, 3), shape_env)
y = create_symbolic_tensor("y", torch.randn(1, 4, 1), shape_env)
# varargs
z = y.expand(x.shape[0], y.shape[1], x.shape[2])
self.assertTrue(z.shape[0] == 5)
self.assertTrue(z.shape[1] == 4)
self.assertTrue(z.shape[2] == 3)
# shape list
z = y.expand((x.shape[0], y.shape[1], x.shape[2]))
self.assertTrue(z.shape[0] == 5)
self.assertTrue(z.shape[1] == 4)
self.assertTrue(z.shape[2] == 3)
# mixed python symints and ints
z = y.expand(x.shape[0], y.shape[1], 3)
self.assertTrue(z.shape[0] == 5)
self.assertTrue(z.shape[1] == 4)
self.assertTrue(z.shape[2] == 3)
# mixed python symints and ints in a list
z = y.expand((x.shape[0], y.shape[1], 3))
self.assertTrue(z.shape[0] == 5)
self.assertTrue(z.shape[1] == 4)
self.assertTrue(z.shape[2] == 3)
# mixed python symints and ints
z = y.expand(5, y.shape[1], x.shape[2])
self.assertTrue(z.shape[0] == 5)
self.assertTrue(z.shape[1] == 4)
self.assertTrue(z.shape[2] == 3)
# mixed python ints and symints in a list
z = y.expand((5, y.shape[1], x.shape[2]))
self.assertTrue(z.shape[0] == 5)
self.assertTrue(z.shape[1] == 4)
self.assertTrue(z.shape[2] == 3)
z = y.expand((y.shape[1],))
z = y.expand(y.shape[1])
def test_stride(self):
shape_env = ShapeEnv()
x = create_symbolic_tensor("x", torch.randn(5, 5), shape_env)
self.assertIsInstance(x.stride()[0], SymInt)
def test_size_expressions(self):
shape_env = ShapeEnv()
x = create_symbolic_tensor("x", torch.randn(5), shape_env)
expand_x = x.expand(x.shape[0], x.shape[0])
if expand_x.shape[0] > 3:
result = expand_x + expand_x
else:
result = expand_x + expand_x
gt_op, _bt = shape_env.guards[-1]
self.assertTrue(isinstance(gt_op, sympy.core.relational.StrictGreaterThan))
self.assertTrue(str(x.shape[0]), str(gt_op.args[0]))
self.assertTrue(str(expand_x.shape[1]), str(x.shape[0]))
self.assertTrue(str(expand_x.shape[1]), str(result.shape[0]))
def test_numel(self):
shape_env = ShapeEnv()
x = create_symbolic_tensor("x", torch.randn(5), shape_env)
self.assertIsInstance(x.numel(), torch.SymInt)
self.assertIsInstance(torch.numel(x), torch.SymInt)
x = torch.rand(3, 3)
self.assertIsInstance(x.numel(), int)
self.assertIsInstance(torch.numel(x), int)
def test_int_to_float(self):
shape_env = ShapeEnv()
x = create_symbolic_tensor("x", torch.randn(5), shape_env)
r = sym_float(x.shape[0])
self.assertIsInstance(r, torch.SymFloat, msg=type(r))
def test_aten_ops(self):
shape_env = ShapeEnv()
x = create_symbolic_tensor("x", torch.randn(5), shape_env)
torch.ops.aten.narrow_copy.default(x, 0, 0, x.shape[0])
shape_env = ShapeEnv()
x = create_symbolic_tensor("x2", torch.randn(5, 4, 3), shape_env)
torch.ops.aten.expand.default(x, [x.shape[0], x.shape[1], x.shape[2]])
def test_fx_trace_intlist(self):
class CustomModule(torch.nn.Module):
def forward(self, x):
bs, c, h, w = x.shape
return F.pad(x, (0, w % 2, 0, h % 2, 0, 0))
m = CustomModule()
x = torch.rand(1, 3, 4, 4)
# should not TypeError: pad(): argument 'pad' (position 2) must be
# tuple of ints, not tuple
torch.fx.symbolic_trace(m)
def test_meta_symint(self):
shape_env = ShapeEnv()
a0 = create_symint(shape_env, 2)
r = torch.empty(a0, device='meta')
self.assertIsInstance(r.shape[0], SymInt)
def test_guard_int(self):
shape_env = ShapeEnv()
a0 = create_symint(shape_env, 2)
self.assertEqual(guard_int(a0), 2)
self.assertExpectedInline(str(shape_env.guards[0][0]), """Eq(s0, 2)""")
def test_sym_int(self):
shape_env = ShapeEnv()
a0 = create_symint(shape_env, 5)
r = sym_int(a0)
self.assertEqual(r, 5)
self.assertIsInstance(r, torch.SymInt, msg=type(r))
self.assertExpectedInline(str(shape_env.guards[0][0]), """Eq(s0, 5)""")
a1 = create_symint(shape_env, 7)
r = sym_int(a1 / 2)
self.assertEqual(guard_int(r), 3)
self.assertIsInstance(r, torch.SymInt, msg=type(r))
self.assertExpectedInline(str(shape_env.guards[1][0]), """Eq(floor(s1/2), 3)""")
a2 = create_symint(shape_env, -3)
r = sym_int(a2 / 2)
self.assertEqual(guard_int(r), -1)
self.assertIsInstance(r, torch.SymInt, msg=type(r))
self.assertExpectedInline(str(shape_env.guards[2][0]), """Eq(ceiling(-s2/2), -1)""")
a3 = create_symint(shape_env, 3)
r = sym_int(2.0 * sym_float(a3))
self.assertEqual(guard_int(r), 6)
self.assertIsInstance(r, torch.SymInt, msg=type(r))
self.assertExpectedInline(str(shape_env.guards[3][0]), """Eq(2*s2, 6)""")
def test_sym_sqrt(self):
shape_env = ShapeEnv()
a0 = create_symint(shape_env, 4)
r = sym_sqrt(a0)
self.assertEqual(r, 2)
self.assertIsInstance(r, torch.SymFloat, msg=type(r))
self.assertExpectedInline(str(shape_env.guards[0][0]), """Eq(sqrt(s0), 2)""")
def test_sym_floor(self):
shape_env = ShapeEnv()
a0 = create_symint(shape_env, 5)
r = math.floor(a0 / 2)
self.assertEqual(r, 2)
self.assertIsInstance(r, torch.SymInt, msg=type(r))
self.assertExpectedInline(str(shape_env.guards[0][0]), """Eq(floor(s0/2), 2)""")
r = math.floor(3.0 * a0)
self.assertEqual(r, 15)
self.assertIsInstance(r, torch.SymInt, msg=type(r))
self.assertExpectedInline(str(shape_env.guards[1][0]), """Eq(3*s0, 15)""")
def test_sym_ceil(self):
shape_env = ShapeEnv()
a0 = create_symint(shape_env, 5)
r = math.ceil(a0 / 2)
self.assertEqual(r, 3)
self.assertIsInstance(r, torch.SymInt, msg=type(r))
self.assertExpectedInline(str(shape_env.guards[0][0]), """Eq(ceiling(s0/2), 3)""")
r = math.floor(3.0 * a0)
self.assertEqual(r, 15)
self.assertIsInstance(r, torch.SymInt, msg=type(r))
self.assertExpectedInline(str(shape_env.guards[1][0]), """Eq(3*s0, 15)""")
def test_int_conversion(self):
shape_env = ShapeEnv()
a0 = create_symint(shape_env, 2)
int(a0)
self.assertExpectedInline(str(shape_env.guards[0][0]), """Eq(s0, 2)""")
def test_data_dependent_guard(self):
shape_env = ShapeEnv()
s0 = shape_env.create_unbacked_symint()
self.assertRaises(GuardOnDataDependentSymNode, lambda: bool(s0 == 0))
def test_non_overlapping_and_dense(self):
shape_env = ShapeEnv()
a0 = create_symint(shape_env, 5)
r = torch.empty_strided((a0, 7), (1, a0), device='meta')
self.assertTrue(torch.ops.aten.is_non_overlapping_and_dense.default(r))
def test_specialize_zero_one(self):
shape_env = ShapeEnv(specialize_zero_one=True)
a0 = create_symint(shape_env, 5)
assert a0 != 1
self.assertEqual(len(shape_env.guards), 0)
shape_env = ShapeEnv(specialize_zero_one=False)
a0 = create_symint(shape_env, 5)
assert a0 != 1
self.assertEqual(len(shape_env.guards), 1)
def test_duck_shape(self):
shape_env = ShapeEnv(duck_shape=True)
a0 = create_symint(shape_env, 5)
a1 = create_symint(shape_env, 5)
assert a0 == a1
self.assertEqual(len(shape_env.guards), 0)
shape_env = ShapeEnv(duck_shape=False)
a0 = create_symint(shape_env, 5)
a1 = create_symint(shape_env, 5)
assert a0 == a1
self.assertEqual(len(shape_env.guards), 1)
def test_int_bool(self):
# See https://github.com/pytorch/pytorch/issues/95981
shape_env = ShapeEnv(duck_shape=True)
a0 = create_symint(shape_env, 5)
assert a0
self.assertEqual(len(shape_env.guards), 0)
def test_symint_as_scalar(self):
shape_env = ShapeEnv()
a0 = create_symint(shape_env, 2)
sym_int_encountered = False
class TestSymInt(TorchDispatchMode):
def __torch_dispatch__(self, func, types, args=(), kwargs=None):
assert func == torch.ops.aten.add.Tensor
nonlocal sym_int_encountered
# WARNING: do not do identity tests on the outer
# SymInt/SymFloat, they are NOT STABLE
sym_int_encountered = kwargs["alpha"].node is a0.node
kwargs["alpha"] = 0
return func(*args)
x = torch.rand([4, 4])
with TestSymInt():
y = torch.add(x, x, alpha=a0)
self.assertTrue(sym_int_encountered)
def test_deepcopy(self):
shape_env = ShapeEnv()
a0 = create_symint(shape_env, 2)
assert a0 < 4
new_shape_env = copy.deepcopy(shape_env)
self.assertEqual(len(new_shape_env.guards), 1)
def test_print_readable_with_symints(self):
def f(a, b):
dim0 = a.shape[0] + b.shape[0]
dim1 = a.shape[1] + b.shape[1]
d = a.new_empty(dim0, dim1)
d = torch.ops.aten.native_dropout(d, 0.5, train=True)
return d
fx_g = make_fx(f, tracing_mode="symbolic")(torch.randn(5, 3), torch.randn(4, 3))
out = fx_g.print_readable(print_output=False)
self.assertExpectedInline(out.strip(), """\
class f(torch.nn.Module):
def forward(self, a_1: f32[s0, s1], b_1: f32[s2, s1]):
# No stacktrace found for following nodes
sym_size: Sym(s0) = torch.ops.aten.sym_size(a_1, 0)
sym_size_1: Sym(s2) = torch.ops.aten.sym_size(b_1, 0)
add: Sym(s0 + s2) = sym_size + sym_size_1; sym_size = sym_size_1 = None
sym_size_2: Sym(s1) = torch.ops.aten.sym_size(a_1, 1)
sym_size_3: Sym(s1) = torch.ops.aten.sym_size(b_1, 1); b_1 = None
add_1: Sym(2*s1) = sym_size_2 + sym_size_3; sym_size_2 = sym_size_3 = None
new_empty: f32[s0 + s2, 2*s1] = torch.ops.aten.new_empty.default(a_1, [add, add_1], dtype = torch.float32, layout = torch.strided, device = device(type='cpu'), pin_memory = False); a_1 = add = add_1 = None
native_dropout = torch.ops.aten.native_dropout.default(new_empty, 0.5, True); new_empty = None
getitem: f32[s0 + s2, 2*s1] = native_dropout[0]
getitem_1: b8[s0 + s2, 2*s1] = native_dropout[1]; native_dropout = None
return (getitem, getitem_1)""") # noqa: B950
@skipIfTorchDynamo("Creating ShapeEnv fails for confusing reasons (also we never expect dynamo to see code like this)")
class TestSymNumberMagicMethods(TestCase):
def _do_test(self, fn, inp1, inp2, shape_env, is_unary_fn):
# Helper function
seed_node = (create_symint(shape_env, 1) / 1.).node
bool_seed_node = (create_symint(shape_env, 1) == 1).node
def get_sym_inp(inp):
# NB: this must come before int
if isinstance(inp, bool):
return torch.SymBool(to_node(bool_seed_node, inp))
elif isinstance(inp, int):
return torch.SymInt(to_node(seed_node, inp))
else:
return torch.SymFloat(to_node(seed_node, inp))
def maybe_xfail(inp1, inp2):
if fn == "sym_sqrt" and inp1 < 0:
# ValueError: math domain error
return self.assertRaises((ValueError,))
elif fn in ("truediv", "floordiv", "mod") and inp2 == 0:
# ZeroDivisionError: division by zero
return self.assertRaises((ZeroDivisionError,))
elif fn == "pow" and inp1 == 0 and inp2 < 0:
# ZeroDivisionError: 0.0 cannot be raised to a negative power
return self.assertRaises((ZeroDivisionError,))
elif fn == "pow" and inp1 < 0 and inp2 in (2.5, -2.5) and (
type(inp1) in (SymFloat, SymInt) or
type(inp2) in (SymFloat, SymInt)
):
# Complex result, which we do not support:
# TypeError: Cannot convert complex to float
return self.assertRaises((TypeError,))
else:
return contextlib.nullcontext()
if fn in symbolic_shapes.magic_methods_on_math:
lambda_apply = getattr(math, fn)
elif fn in symbolic_shapes.magic_methods_on_submodule:
lambda_apply = getattr(symbolic_shapes, fn)
elif fn in symbolic_shapes.magic_methods_on_operator_with_trailing_underscore:
lambda_apply = getattr(operator, f"{fn}_")
else:
lambda_apply = getattr(operator, fn)
def guard_fn(v):
try:
if type(v) in (SymBool, bool):
return guard_bool(v)
elif type(v) in (SymFloat, float):
return guard_float(v)
else: # SymInt, int
return guard_int(v)
except Exception as e:
raise e
# Get reference result
with maybe_xfail(inp1, inp2):
if is_unary_fn:
ref_out = lambda_apply(inp1)
else:
ref_out = lambda_apply(inp1, inp2)
# Symified first arg
sym_inp1 = get_sym_inp(inp1)
with maybe_xfail(sym_inp1, inp2):
if is_unary_fn:
out = lambda_apply(sym_inp1)
else:
out = lambda_apply(sym_inp1, inp2)
out = guard_fn(out)
self.assertEqual(out, ref_out)
if is_unary_fn:
return
# Symified second arg
sym_inp2 = get_sym_inp(inp2)
with maybe_xfail(inp1, sym_inp2):
out = lambda_apply(inp1, sym_inp2)
out = guard_fn(out)
self.assertEqual(out, ref_out)
# Symified both args
with maybe_xfail(sym_inp1, sym_inp2):
out = lambda_apply(sym_inp1, sym_inp2)
out = guard_fn(out)
self.assertEqual(out, ref_out)
@parametrize("fn", list(symbolic_shapes.magic_methods.keys()))
def test_bool_method(self, fn):
if fn not in symbolic_shapes.bool_magic_methods:
self.skipTest(f"{fn} is non-bool")
is_unary_fn = fn in symbolic_shapes.unary_magic_methods
shape_env = ShapeEnv()
self._do_test(fn, True, False, shape_env, is_unary_fn)
@parametrize("fn", list(symbolic_shapes.magic_methods.keys()))
@parametrize("first_type", ["int", "float"])
@parametrize("second_type", ["int", "float"])
def test_method(self, fn, first_type, second_type):
if first_type == "float":
# TODO: Hmm, this looks like we skip all floats
self.skipTest(f"{fn} is not a float magic method")
is_unary_fn = fn in symbolic_shapes.unary_magic_methods
# Second argument is ignored for unary function. So only run for one type
if is_unary_fn and second_type == "float":
self.skipTest(f"{fn} is unary and already tested")
if fn in symbolic_shapes.bool_magic_methods:
self.skipTest(f"{fn} is bool")
# Only floats here since these will be converted to int if necessary.
# We also ignore complex and bool.
values = (
0.0,
1.0,
2.5,
)
neg_values = tuple(-x for x in values)
for inp1, inp2 in itertools.chain(
itertools.product(values, values),
itertools.product(values, neg_values),
itertools.product(neg_values, values),
itertools.product(neg_values, neg_values),
):
if first_type == "int":
inp1 = int(inp1)
if second_type == "int":
inp2 = int(inp2)
shape_env = ShapeEnv()
self._do_test(fn, inp1, inp2, shape_env, is_unary_fn)
instantiate_parametrized_tests(TestSymNumberMagicMethods)
class TestFloorDiv(TestCase):
@staticmethod
def python_floordiv(x, y):
return x // y
@staticmethod
def torch_floordiv(x, y):
# Note: we fully evaluate here since FloorDiv might not always do
# that.
shape_env = ShapeEnv()
return shape_env.evaluate_expr(FloorDiv(x, y))
@staticmethod
def yield_test_cases(values, negate=True):
for x, y in values:
yield (x, y)
if negate:
yield (-x, y)
yield (x, -y)
yield (-x, -y)
def test_floordiv_float_int(self):
values = (
(2.5, 2.1),
(2.1, 2.5),
(2.0, 2.1),
(7, 2.5),
(2.1, 7),
(7, 2),
)
for x, y in TestFloorDiv.yield_test_cases(values):
self.assertEqual(TestFloorDiv.python_floordiv(x, y), TestFloorDiv.torch_floordiv(x, y))
def test_floordiv_bool(self):
values = (
(False, True),
(True, 2.5),
(2.5, True),
(False, 7),
(7, True),
)
for x, y in TestFloorDiv.yield_test_cases(values, negate=False):
# Compares to int since our FloorDiv has no bool support
self.assertEqual(TestFloorDiv.python_floordiv(x, y), TestFloorDiv.torch_floordiv(int(x), int(y)))
# Tests that our impl throws
self.assertRaisesRegex(
TypeError,
(rf"unsupported operand type\(s\) for //: "
rf"'{type(sympy.sympify(x)).__name__}' and '{type(sympy.sympify(y)).__name__}'"
rf", expected integer or real"),
lambda: TestFloorDiv.torch_floordiv(x, y))
def test_floordiv_complex(self):
values = (
(1.5 + 2.5j, 1.3 + 3.5j),
(1.5 + 2.5j, 2.5),
(2.5, 1.5 + 2.5j),
(1.5 + 2.5j, 7),
(7, 1.5 + 2.5j),
)
for x, y in TestFloorDiv.yield_test_cases(values):
# We don't test error messages to avoid depending on Python
# interpreter version
self.assertRaises(TypeError, lambda: TestFloorDiv.python_floordiv(x, y))
self.assertRaisesRegex(
TypeError,
(rf"unsupported operand type\(s\) for //: "
rf"'{type(sympy.sympify(x)).__name__}' and '{type(sympy.sympify(y)).__name__}'"
rf", expected integer or real"),
lambda: TestFloorDiv.torch_floordiv(x, y))
def test_floordiv_div_by_zero(self):
values = (
(2.5, 0),
(2.1, 0.0),
(2.3, sympy.Symbol("s", zero=True)),
)
for x, y in TestFloorDiv.yield_test_cases(values, negate=False):
# We don't test error messages to avoid depending on Python
# interpreter version
if type(y) is not sympy.Symbol:
self.assertRaises(ZeroDivisionError, lambda: TestFloorDiv.python_floordiv(x, y))
self.assertRaisesRegex(
ZeroDivisionError,
"division by zero",
lambda: TestFloorDiv.torch_floordiv(x, y))
def test_floordiv_zero_base(self):
values = (
(0, 2.5),
(0.0, 2.1),
(sympy.Symbol("s", zero=True), 2.3),
)
for x, y in TestFloorDiv.yield_test_cases(values, negate=False):
if type(x) is not sympy.Symbol:
self.assertEqual(TestFloorDiv.python_floordiv(x, y), TestFloorDiv.torch_floordiv(x, y))
else:
self.assertEqual(0, TestFloorDiv.torch_floordiv(x, y))
def test_floordiv_div_by_one(self):
values = (
(2.5, 1),
(2.1, 1.0),
(2, 1.0),
(2, 1),
)
for x, y in TestFloorDiv.yield_test_cases(values):
self.assertEqual(TestFloorDiv.python_floordiv(x, y), TestFloorDiv.torch_floordiv(x, y))
def test_floordiv_simplify(self):
# Tests how we simplify or evaluate FloorDiv without free variables
shape_env = ShapeEnv()
result = 21
exprs = (
7 * FloorDiv(6, 2),
7 * FloorDiv(6.28, 2),
7 * FloorDiv(6.28, 2.0),
7 * FloorDiv(6.28, (FloorDiv(6.28, 3.14))),
)
for expr in exprs:
self.assertEqual(expr, result)
self.assertEqual(expr.doit(deep=False), result)
self.assertEqual(expr.doit(deep=True), result)
self.assertEqual(sympy.simplify(expr), result)
self.assertEqual(shape_env.simplify(expr), result)
self.assertEqual(shape_env.evaluate_expr(expr), result)
def test_floordiv_assumptions(self):
# We define two Symbols (with different names) for each type to make
# sure the behavior is consistent regardless of whether both arguments
# are the same object or not.
cases = (
sympy.Symbol("i1", integer=True),
sympy.Symbol("i2", integer=True),
sympy.Symbol("r1", real=True),
sympy.Symbol("r2", real=True),
sympy.Symbol("c1", complex=True, real=False, integer=False),
sympy.Symbol("c2", complex=True, real=False, integer=False),
sympy.Symbol("s1"),
sympy.Symbol("s2"),
)
for base, divisor in itertools.product(cases, repeat=2):
def op():
return FloorDiv(base, divisor)
def is_complex(x):
return x.is_integer is False and x.is_real is False and x.is_complex
if is_complex(base) or is_complex(divisor):
self.assertRaisesRegex(
TypeError,
(r"unsupported operand type\(s\) for //: 'Symbol' and 'Symbol',"
r" expected integer or real"),
op)
continue
op = op()
# In regular Python, x//x == 1.0 if x is a float, but FloorDiv
# always returns an integer 1 when both args are the same object.
# This even works for Symbols with no assumptions specified.
if base is divisor:
self.assertTrue(op.is_integer)
self.assertTrue(op.is_real)
elif base.is_integer and divisor.is_integer:
self.assertTrue(op.is_integer)
self.assertTrue(op.is_real)
else:
self.assertEqual(op.is_integer, None)
self.assertTrue(op.is_real)
if __name__ == '__main__':
run_tests()
| [
"[email protected]"
] | |
970f78ea015e9db88c4e3b9ee9a24620d1acd033 | 14252ea933a08056363230c6df89223b996a0da2 | /app/users/migrations/0001_initial.py | 89fc36d8186ebc06e266431cde71c53a49bd67a8 | [
"MIT"
] | permissive | S3Infosoft/mvr-insights | eeb02aa2e6767e6a23818d4e09f7be7ce29f80cb | ac73feff03c1592d5efd8e0b82f72dd4dbd3e921 | refs/heads/master | 2020-05-29T14:08:11.070784 | 2020-04-23T19:46:57 | 2020-04-23T19:46:57 | 189,184,619 | 0 | 1 | MIT | 2020-04-23T19:46:58 | 2019-05-29T08:35:56 | CSS | UTF-8 | Python | false | false | 2,357 | py | # Generated by Django 2.2.1 on 2019-05-29 20:09
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='CustomUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('email', models.EmailField(max_length=254, unique=True, verbose_name='email address')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
),
]
| [
"[email protected]"
] | |
6a1fa05b533ac5a143059c018c1463515a70c318 | e6630377da829600b846ad7ecd67daa335878b5a | /main/views.py | 6211c54b7998d088bd564686df00b806c983aa97 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
] | permissive | jorgecarleitao/public-contracts | 839a09cf54af5fc292aa640d1aaf235f1fb755fa | 3107ddc007f3574ce19aaa2223399484bc6b1382 | refs/heads/master | 2021-06-15T23:07:13.291743 | 2017-05-09T11:44:56 | 2017-05-09T11:44:56 | 14,209,257 | 26 | 8 | null | 2017-07-21T10:45:15 | 2013-11-07T16:36:47 | Python | UTF-8 | Python | false | false | 353 | py | from django.shortcuts import render
def robots(request):
return render(request, 'robots.txt', content_type='text/plain')
def home(request):
return render(request, 'main_page.html', {'REQUIRE_D3JS': True})
def about(request):
return render(request, 'about.html')
def contribute(request):
return render(request, 'contribute.html')
| [
"[email protected]"
] | |
79431e97956904979ec976554f8289946bb61697 | ee760ad085d305f12e4a60e6ea548e94e71606b6 | /sds/envs/cartpole/cartpole.py | 9f40106c313c24d4b0369a481d801c71182f2463 | [
"MIT"
] | permissive | zlatanajanovic/sds | 819ab40f044b72e0a88504601f23d233302c0b4b | 3c195fb9cbd88a9284287d62c0eacb6afc4598a7 | refs/heads/master | 2023-07-19T00:55:20.549984 | 2021-08-23T14:51:09 | 2021-08-23T14:51:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,644 | py | import gym
from gym import spaces
from gym.utils import seeding
import numpy as np
def normalize(x):
# wraps angle between [-pi, pi]
return ((x + np.pi) % (2. * np.pi)) - np.pi
class Cartpole(gym.Env):
def __init__(self):
self.state_dim = 4
self.act_dim = 1
self.obs_dim = 4
self.dt = 0.01
self.sigma = 1e-8
# g = [x, th, dx, dth]
self.g = np.array([0., 0., 0., 0.])
self.gw = - np.array([1e0, 2e0, 1e-1, 1e-1])
# x = [x, th, dx, dth]
self.xmax = np.array([5., np.inf, np.inf, np.inf])
self.state_space = spaces.Box(low=-self.xmax,
high=self.xmax,
dtype=np.float64)
# y = [x, th, dx, dth]
self.ymax = np.array([5., np.inf, np.inf, np.inf])
self.observation_space = spaces.Box(low=-self.ymax,
high=self.ymax,
dtype=np.float64)
self.uw = - 1e-3 * np.ones((self.act_dim, ))
self.umax = 5.0 * np.ones((self.act_dim, ))
self.action_space = spaces.Box(low=-self.umax,
high=self.umax, shape=(1,),
dtype=np.float64)
self.uniform = True
self.state = None
self.np_random = None
self.seed()
@property
def xlim(self):
return self.xmax
@property
def ulim(self):
return self.umax
def dynamics(self, x, u):
uc = np.clip(u, -self.ulim, self.ulim)
# Equations: http://coneural.org/florian/papers/05_cart_pole.pdf
# x = [x, th, dx, dth]
g = 9.81
Mc = 0.37
Mp = 0.127
Mt = Mc + Mp
l = 0.3365
fr = 0.005
def f(x, u):
q, th, dq, dth = x
sth = np.sin(th)
cth = np.cos(th)
# This friction model is not exactly right
# It neglects the influence of the pole
num = g * sth + cth * (- (u - fr * dq) - Mp * l * dth**2 * sth) / Mt
denom = l * ((4. / 3.) - Mp * cth**2 / Mt)
ddth = num / denom
ddx = (u + Mp * l * (dth**2 * sth - ddth * cth)) / Mt
return np.hstack((dq, dth, ddx, ddth))
c1 = f(x, uc)
c2 = f(x + 0.5 * self.dt * c1, uc)
c3 = f(x + 0.5 * self.dt * c2, uc)
c4 = f(x + self.dt * c3, uc)
xn = x + self.dt / 6. * (c1 + 2. * c2 + 2. * c3 + c4)
xn = np.clip(xn, -self.xlim, self.xlim)
return xn
def observe(self, x):
return np.array([x[0], normalize(x[1]), x[2], x[3]])
def noise(self, x=None, u=None):
_u = np.clip(u, -self.ulim, self.ulim)
_x = np.clip(x, -self.xlim, self.xlim)
return self.sigma * np.eye(self.obs_dim)
def rewrad(self, x, u):
_x = np.array([x[0], normalize(x[1]), x[2], x[3]])
return (_x - self.g).T @ np.diag(self.gw) @ (_x - self.g)\
+ u.T @ np.diag(self.uw) @ u
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def step(self, u):
self.state = self.dynamics(self.state, u)
rwrd = self.rewrad(self.state, u)
sigma = self.noise(self.state, u)
obs = self.np_random.multivariate_normal(self.observe(self.state), sigma)
return obs, rwrd, False, {}
def reset(self):
if self.uniform:
low = np.array([-0.1, -np.pi, -5.0, -10.0])
high = np.array([0.1, np.pi, 5.0, 10.0])
else:
low, high = np.array([0., np.pi - np.pi / 18., 0., -1.0]),\
np.array([0., np.pi + np.pi / 18., 0., 1.0])
self.state = self.np_random.uniform(low=low, high=high)
return self.observe(self.state)
# for plotting
def fake_step(self, x, u):
xn = self.dynamics(x, u)
return self.observe(xn)
class CartpoleWithCartesianObservation(Cartpole):
def __init__(self):
super(CartpoleWithCartesianObservation, self).__init__()
self.obs_dim = 5
# y = [x, cos, sin, xd, thd]
self.ymax = np.array([5., 1., 1., np.inf, np.inf])
self.observation_space = spaces.Box(low=-self.ymax,
high=self.ymax,
dtype=np.float64)
def observe(self, x):
return np.array([x[0],
np.cos(x[1]),
np.sin(x[1]),
x[2],
x[3]])
| [
"[email protected]"
] | |
9f360e44d1c0c0dcee68e96a596c20e516949e0a | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_3/pr0v3rbs/c.py | 5d1ec463d56cedc7496b8cb3f35b1bb73dc76551 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 1,290 | py | import math
powerArr = [[],[],[],[],[],[],[],[],[],[],[]]
baseArr = [0,0,0,0,0,0,0,0,0,0,0]
jamArr = [0,0,0,0,0,0,0,0,0,0,0]
input()
N, J = raw_input().split()
N = int(N)
J = int(J)
def JamCheck():
result = True
idx = 0
t = baseArr[2]
for i in range(3, 11):
baseArr[i] = 0
while t != 0:
if t % 2 == 1:
for i in range(3, 11):
baseArr[i] += powerArr[i][idx]
idx += 1
t /= 2
for i in range(2, 11):
primeCheck = True
j = 2
length = int(math.sqrt(baseArr[i])) + 1
while j <= 10000:
if baseArr[i] % j == 0:
#jamArr[i] = j
jamArr[i] = baseArr[i] / j
primeCheck = False
break
j+=1
if primeCheck:
result = False
break
return result
def PrintJam():
print bin(baseArr[2])[2:], jamArr[2], jamArr[3], jamArr[4], jamArr[5], jamArr[6], jamArr[7], jamArr[8], jamArr[9], jamArr[10]
for i in range(2, 11):
for j in range(N):
powerArr[i].append(i**j)
baseArr[2] = 2**(N-1) + 1
print "Case #1:"
while J != 0:
if JamCheck() :
PrintJam()
J -= 1
baseArr[2] += 2
| [
"[[email protected]]"
] | |
70db18cb9ff35971591998a62ddab059246ace2b | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_anarchist.py | f6c4dbb7d584b89e893652ff5df9b887a17eaf22 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py |
#calss header
class _ANARCHIST():
def __init__(self,):
self.name = "ANARCHIST"
self.definitions = [u'a person who believes in anarchism: ', u'someone who wishes to destroy the existing government and laws: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
] | |
b01999efc5b5875897213f5aaab988ee20c873f5 | d94b6845aeeb412aac6850b70e22628bc84d1d6d | /autoregressive_diffusion/experiments/audio/configs/sc09_wavenet.py | 957e0ecbd26bb689722cf752a90ff946ab2ea6d4 | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | ishine/google-research | 541aea114a68ced68736340e037fc0f8257d1ea2 | c1ae273841592fce4c993bf35cdd0a6424e73da4 | refs/heads/master | 2023-06-08T23:02:25.502203 | 2023-05-31T01:00:56 | 2023-05-31T01:06:45 | 242,478,569 | 0 | 0 | Apache-2.0 | 2020-06-23T01:55:11 | 2020-02-23T07:59:42 | Jupyter Notebook | UTF-8 | Python | false | false | 2,032 | py | # coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Config for the SC09 dataset.
"""
import ml_collections
D = lambda **kwargs: ml_collections.ConfigDict(kwargs)
def get_config():
"""Get the default hyperparameter configuration."""
config = ml_collections.ConfigDict()
# Dataset configuration.
config.dataset = D(
name='speech_commands09',
train_split='train',
eval_split='validation',
test_split='test',
max_length=16000 # Some audio files are shorter than 16000.
)
config.batch_size = 256
config.eval_batch_size = 512
config.mask_shape = (16000, 1)
# Training.
config.num_train_steps = 1_000_000
config.beta2 = 0.999
config.clip_grad = 1000.
config.weight_decay = 0.
config.ema_momentum = 0.995
config.learning_rate = D(
base_learning_rate=1e-4,
factors='constant',
warmup_steps=15000,
)
config.restore_checkpoints = True
config.checkpoint_every_steps = 5_000
config.eval_every_steps = 2_500
config.log_every_steps = 1_000
config.sample_every_steps = None
config.seed = 42
# Evaluation.
config.sample_batch_size = 16
config.num_eval_passes = 32
# Model.
config.model = 'arm'
config.output_distribution = 'discretized_logistic'
config.num_mixtures = 30
# Architecture.
config.arch = D(
name='diff_wave',
config=D(
num_blocks=36,
features=256,
dilation_cycle=12,
)
)
return config
| [
"[email protected]"
] | |
9aaa492c6143160703e493cdf16a62b43a86dd6e | b32ca71ab8127f7c3c1a73a6a8f01adefa1be026 | /mathsquiz6.py | 34a38051e90b696fc060dc3e5abac524d9bd01e5 | [] | no_license | Botany-Downs-Secondary-College/mathsquiz-Howard13312 | a50312776d686ab51eb6421c8af16889a3440c3c | 0c78c55ff99d058264791412d312eda4d37f9bef | refs/heads/main | 2023-03-28T14:37:25.155100 | 2021-03-26T12:12:50 | 2021-03-26T12:12:50 | 337,539,510 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,343 | py | #mathsquizv6.py
#Howard Li
#import the tool kit interface (tkinter) modules
from tkinter import*
from tkinter import ttk
from random import*
#parent class
class Mathquiz:
def __init__ (self,parent):
'''Widgets for welcome frame'''
self.Welcome = Frame(parent)
self.Welcome.grid(row=0, column=0)
self.Titlelabel1 = Label(self.Welcome, text = "Welcome to Maths quiz", bg = "black", fg = "white", width = 30, padx = 30, pady = 10, font = ("Time", '14', "bold italic"))
self.Titlelabel1.grid(columnspan = 2)
self.Nextbutton = ttk.Button(self.Welcome, text = 'Next', command = self.show_Questions)
self.Nextbutton.grid(row = 8, column = 1)
#Name and Age label
self.NameLabel = Label(self.Welcome, text = 'Name', anchor = W, fg = "black", width= 10, padx = 30, pady = 10, font = ("Time", '12', "bold italic"))
self.NameLabel.grid(row = 2, column = 0)
self.NameLabel = Label(self.Welcome, text = 'Age', anchor = W, fg = "black", width= 10, padx = 30, pady = 10, font = ("Time", "12", "bold italic"))
self.NameLabel.grid(row = 3, column = 0)
#name and age entry
self.NameEntry = ttk.Entry(self.Welcome, width = 20)
self.NameEntry.grid(row = 2, column = 1, columnspan = 2)
self.AgeEntry = ttk.Entry(self.Welcome, width = 20)
self.AgeEntry.grid(row = 3, column = 1)
#Warning, Difficulty level label and radio buttons
self.WarningLabel = Label(self.Welcome, text = '', anchor = W, fg = "red", width= 20, padx = 30, pady = 10)
self.WarningLabel.grid(row = 4, columnspan = 2)
self.DifficultyLabel = Label(self.Welcome, text = 'Choose diffculity', anchor = W, fg = "black", width= 14, padx = 30, pady = 20, font = ("Time", "12", "bold italic"))
self.DifficultyLabel.grid(row = 5, column = 0)
self.difficulty = ["Easy", "Medium", "Hard"]
self.diff_lvl = StringVar()
self.diff_lvl.set(0)
self.diff_btns = []
for i in range(len(self.difficulty)):
rb = Radiobutton(self.Welcome, variable = self.diff_lvl, value = i, text = self.difficulty[i], anchor = W, padx = 50, width = "5", height = "2")
self.diff_btns.append(rb)
rb.grid(row = i+6, column = 0, sticky = W)
'''Widgets for Questions frame'''
self.Questions = Frame(parent)
#self.Questions.grid(row = 0, column = 1)
self.QuestionLabel = Label(self.Questions, text = "Quiz Questions", bg = "black", fg = "white", width = 20, padx = 30, pady = 10, font = ("Time", '14', "bold italic"))
self.QuestionLabel.grid(columnspan = 2)
self.HomeButton = ttk.Button(self.Questions, text = 'Home', command = self.show_Welcome)
self.HomeButton.grid(row = 8, column = 0)
self.Problems = Label(self.Questions, text = "")
self.Problems.grid(row = 1, column = 0)
self.next_button = ttk.Button(self.Questions, text = "Next question", command = self.next_question)
self.next_button.grid(row=8, column=1)
def show_Welcome(self):
self.Questions.grid_remove()
self.Welcome.grid()
def show_Questions(self):
#Error checking for empty or non-text user entries for Name
try:
if self.NameEntry.get() == "":
self.WarningLabel.configure(text = "Please enter name")
self.NameEntry.focus()
elif self.NameEntry.get().isalpha() == False:
self.WarningLabel.configure(text = "Please enter text")
self.NameEntry.delete(0, END)
self.NameEntry.focus()
#Error for checking for empty and age limit cases
elif self.AgeEntry.get() == "":
self.WarningLabel.configure(text = "Please enter age")
self.AgeEntry.focus()
elif int(self.AgeEntry.get()) > 12:
self.WarningLabel.configure(text = "You are too old!")
self.AgeEntry.delete(0, END)
self.AgeEntry.focus()
elif int(self.AgeEntry.get()) < 0:
self.WarningLabel.configure(text = "You can't be smaller than 0?")
self.AgeEntry.delete(0, END)
self.AgeEntry.focus()
elif int(self.AgeEntry.get()) < 7:
self.WarningLabel.configure(text = "You are too young!")
self.AgeEntry.delete(0, END)
self.AgeEntry.focus()
else:
self.Welcome.grid_remove()
self.Questions.grid()
except ValueError:
self.WarningLabel.configure(text = "Please enter a number")
self.AgeEntry.delete(0, END)
self.AgeEntry.focus()
def next_question(self):
x = randrange(10)
y = randrange(10)
self.answer = x + y
question_text = str(x) + " + " + str(y) + " = "
self.Problems.configure(text = question_text)
#main rootine
if __name__ == "__main__":
root = Tk()
frames = Mathquiz(root)
root.title("Quiz")
root.mainloop()
| [
"[email protected]"
] | |
0a9e74b629c603dd1c1563a8e5ef7e8718e00864 | 0fe394b10b39864915fcc4073a5fa050aa02502e | /MatplotPractice/AfricaCSV.py | 107dee89fc18bac4c8c570c0efbedde89f3b5c09 | [] | no_license | JohnAssebe/Python | 9997d47bba4a056fdcd74c6e5207fc52b002cbfd | b88a7c2472f245dc6a0e8900bbea490cb0e0beda | refs/heads/master | 2022-05-14T10:08:37.311345 | 2022-05-09T19:48:53 | 2022-05-09T19:48:53 | 212,562,910 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 790 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Oct 17 11:30:26 2019
@author: Yohannes Assebe
"""
import csv
from matplotlib import pyplot as plt
plt.title("Africa Annual Inflation per years")
#print(plt.style.available)
#plt.style.use("seaborn-notebook")
with open("african_crises.csv") as af:
files=csv.DictReader(af)
x=[]
y=[]
for row in files:
x.append(int(row['year']))
y.append(float(row['inflation_annual_cpi']))
plt.scatter(x,y)
plt.scatter(x[1],y[1])
plt.xlabel("Year")
plt.ylabel("Annual Inflation")
plt.grid(True)
#plt.legend()
#plt.ylim(0,100)
plt.tight_layout()
#print(plt.style.available)
plt.savefig("AffricaAnnualInflations.png")
plt.show()
import pandas as pd
| [
"[email protected]"
] | |
105e0acf0a543fdb4c37897d8888f056971a6191 | 7f33c02743fbfd18726ffef08924f528354372dd | /Python_Projects/python3_selfstudy/priklady_z_knihy/k06/SortedList.py | 68a5e0a9974eaaf4e25c3d093c6eab3fd1be1e95 | [] | no_license | zabojnikp/study | a524eb9c2265a73e1db0b5f0e76b359c123a397b | 43424bfc6641cd8fa13ab119ce283fb460b4ffc1 | refs/heads/master | 2020-04-06T14:21:55.786353 | 2018-11-27T22:10:48 | 2018-11-27T22:10:48 | 157,538,244 | 0 | 0 | null | 2018-11-27T22:10:49 | 2018-11-14T11:24:20 | Python | UTF-8 | Python | false | false | 10,990 | py | #!/usr/bin/env python3
# Copyright (c) 2008-9 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. It is provided for educational
# purposes and is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
"""
>>> L = SortedList((5, 8, -1, 3, 4, 22))
>>> L[2] = 18 #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TypeError: use add() to insert a value and rely on the...
>>> list(L)
[-1, 3, 4, 5, 8, 22]
>>> L.add(5)
>>> L.add(5)
>>> L.add(6)
>>> list(L)
[-1, 3, 4, 5, 5, 5, 6, 8, 22]
>>> L.index(4)
2
>>> L.count(5), L.count(2)
(3, 0)
>>> L.insert(2, 9)
Traceback (most recent call last):
...
AttributeError: 'SortedList' object has no attribute 'insert'
>>> L.reverse()
Traceback (most recent call last):
...
AttributeError: 'SortedList' object has no attribute 'reverse'
>>> L.sort()
Traceback (most recent call last):
...
AttributeError: 'SortedList' object has no attribute 'sort'
>>> import collections
>>> isinstance(L, collections.Sequence)
False
"""
_identity = lambda x: x
class SortedList:
def __init__(self, sequence=None, key=None):
"""Vytvoří objekt typu SortedList, který řadí prvky pomocí operátoru <
aplikovaného na prvky nebo na výsledky použití zadané klíčové funkce
>>> L = SortedList()
>>> print(L)
[]
>>> L = SortedList((5, 8, -1, 3, 4, 22))
>>> print(L)
[-1, 3, 4, 5, 8, 22]
>>> L = SortedList({9, 8, 7, 6, -1, -2})
>>> print(L)
[-2, -1, 6, 7, 8, 9]
>>> L = SortedList([-5, 4, -3, 8, -2, 16, -1, 0, -3, 8])
>>> print(L)
[-5, -3, -3, -2, -1, 0, 4, 8, 8, 16]
>>> L2 = SortedList(L)
>>> print(L2)
[-5, -3, -3, -2, -1, 0, 4, 8, 8, 16]
>>> L = SortedList(("ta", "rychlá", "hnědá", "liška", "skáče"))
>>> print(L)
['hnědá', 'liška', 'rychlá', 'skáče', 'ta']
"""
self.__key = key or _identity
assert hasattr(self.__key, "__call__")
if sequence is None:
self.__list = []
elif (isinstance(sequence, SortedList) and
sequence.key == self.__key):
self.__list = sequence.__list[:]
else:
self.__list = sorted(list(sequence), key=self.__key)
@property
def key(self):
"""Vrátí klíčovou funkci používanou tímto seznamem
"""
return self.__key
def clear(self):
"""Vyčistí seznam
>>> L = SortedList((5, 8, -1, 3, 4, 22))
>>> print(L)
[-1, 3, 4, 5, 8, 22]
>>> L.clear()
>>> print(L)
[]
"""
self.__list = []
def __bisect_left(self, value):
"""Vrátí indexovou pozici hodnoty v seznamu
(nebo kam hodnota náleží, není-li v seznamu)
"""
key = self.__key(value)
left, right = 0, len(self.__list)
while left < right:
middle = (left + right) // 2
if self.__key(self.__list[middle]) < key:
left = middle + 1
else:
right = middle
return left
def add(self, value):
"""Přidá hodnotu do seznamu (duplicity jsou povoleny)
>>> L = SortedList((5, 8, -1, 3, 4, 22))
>>> print(L)
[-1, 3, 4, 5, 8, 22]
>>> L.add(5)
>>> L.add(5)
>>> L.add(7)
>>> L.add(-18)
>>> L.add(99)
>>> print(L)
[-18, -1, 3, 4, 5, 5, 5, 7, 8, 22, 99]
"""
index = self.__bisect_left(value)
if index == len(self.__list):
self.__list.append(value)
else:
self.__list.insert(index, value)
def pop(self, index=-1):
"""Odstraní a vrátí prvek na zadané pozici
>>> L = SortedList([-18, -1, 3, 4, 5, 5, 7, 8, 22, 99])
>>> print(L)
[-18, -1, 3, 4, 5, 5, 7, 8, 22, 99]
>>> L.pop()
99
>>> L.pop(0)
-18
>>> L.pop(5)
7
>>> print(L)
[-1, 3, 4, 5, 5, 8, 22]
>>> L.pop(12)
Traceback (most recent call last):
...
IndexError: pop index out of range
"""
return self.__list.pop(index)
def remove(self, value):
"""Odstraní první výskyt hodnoty ze seznamu
>>> L = SortedList([-18, -1, 3, 4, 5, 5, 7, 8, 22, 99])
>>> print(L)
[-18, -1, 3, 4, 5, 5, 7, 8, 22, 99]
>>> L.remove(20)
Traceback (most recent call last):
...
ValueError: SortedList.remove(x): x not in list
>>> L.remove(5)
>>> L.remove(-18)
>>> L.remove(99)
>>> print(L)
[-1, 3, 4, 5, 7, 8, 22]
"""
index = self.__bisect_left(value)
if index < len(self.__list) and self.__list[index] == value:
del self.__list[index]
else:
raise ValueError("{0}.remove(x): x not in list".format(
self.__class__.__name__))
def remove_every(self, value):
"""Odstraní každý výskyt hodnoty ze seznamu
Vrátí počet odstraněných výskytů (kterých může být i 0).
>>> L = SortedList([5, 5, -18, -1, 3, 4, 5, 5, 7, 8, 22, 99])
>>> L.add(5)
>>> L.add(5)
>>> print(L)
[-18, -1, 3, 4, 5, 5, 5, 5, 5, 5, 7, 8, 22, 99]
>>> L.remove_every(-3)
0
>>> L.remove_every(7)
1
>>> L.remove_every(5)
6
>>> print(L)
[-18, -1, 3, 4, 8, 22, 99]
"""
count = 0
index = self.__bisect_left(value)
while (index < len(self.__list) and
self.__list[index] == value):
del self.__list[index]
count += 1
return count
def count(self, value):
"""Spočítá všechny výskyty hodnoty v seznamu
>>> L = SortedList([5, 5, -18, -1, 3, 4, 5, 5, 7, 8, 22, 99])
>>> L.count(5)
4
>>> L.count(99)
1
>>> L.count(-17)
0
"""
count = 0
index = self.__bisect_left(value)
while (index < len(self.__list) and
self.__list[index] == value):
index += 1
count += 1
return count
def index(self, value):
"""Vrátí indexovou pozici prvního výskytu zadané hodnoty
>>> L = SortedList([5, 5, -18, -1, 3, 4, 7, 8, 22, 99, 2, 1, 3])
>>> L.index(5)
7
>>> L.index(0)
Traceback (most recent call last):
...
ValueError: SortedList.index(x): x not in list
>>> L.index(99)
12
"""
index = self.__bisect_left(value)
if index < len(self.__list) and self.__list[index] == value:
return index
raise ValueError("{0}.index(x): x not in list".format(
self.__class__.__name__))
def __delitem__(self, index):
"""Vymaže hodnotu na zadané indexové pozici
>>> L = SortedList([9, -5, 3, -7, 8, 14, 0, 8, 3])
>>> print(L)
[-7, -5, 0, 3, 3, 8, 8, 9, 14]
>>> del L[0]
>>> del L[-1]
>>> del L[5]
>>> print(L)
[-5, 0, 3, 3, 8, 9]
>>> del L[25]
Traceback (most recent call last):
...
IndexError: list assignment index out of range
>>> del L[-3:]
>>> print(L)
[-5, 0, 3]
"""
del self.__list[index]
def __getitem__(self, index):
"""Vrátí hodnotu na zadané indexové pozici
>>> L = SortedList([9, -5, 3, -7, 8, 14, 0, 8, 3])
>>> L[0], L[3], L[4], L[-1]
(-7, 3, 3, 14)
>>> L[15]
Traceback (most recent call last):
...
IndexError: list index out of range
>>> L[:3]
[-7, -5, 0]
>>> L[4:8]
[3, 8, 8, 9]
"""
return self.__list[index]
def __setitem__(self, index, value):
raise TypeError("pro vložení hodnoty použijte add() a nechte seznam, "
" aby ji umístil na správné místo")
def __iter__(self):
"""Vrátí iterátor pro seznam
>>> L = SortedList([5, 5, -18, -1, 3, 4, 7, 8, 22, 99, 2, 1, 3])
>>> result = []
>>> for x in L:
... result.append(x)
>>> print(result)
[-18, -1, 1, 2, 3, 3, 4, 5, 5, 7, 8, 22, 99]
"""
return iter(self.__list)
def __reversed__(self):
"""Vrátí obrácený iterátor pro seznam
>>> L = SortedList([5, 5, -18, -1, 3, 4, 7, 8, 22, 99, 2, 1, 3])
>>> result = []
>>> for x in reversed(L):
... result.append(x)
>>> print(result)
[99, 22, 8, 7, 5, 5, 4, 3, 3, 2, 1, -1, -18]
"""
return reversed(self.__list)
def __contains__(self, value):
"""Vrátí True, je-li hodntoa v seznamu. Jinak vrátí False
>>> L = SortedList([5, 5, -18, -1, 3, 4, 7, 8, 22, 99, 2, 1, 3])
>>> 5 in L
True
>>> 0 in L
False
>>> 99 in L
True
"""
index = self.__bisect_left(value)
return (index < len(self.__list) and
self.__list[index] == value)
def __len__(self):
"""Vrátí délku seznamu
>>> L = SortedList([5, 5, -18, -1, 3, 4, 7, 8, 22, 99, 2, 1, 3])
>>> len(L)
13
>>> L = SortedList()
>>> len(L)
0
"""
return len(self.__list)
def __str__(self):
"""Vrátí řetězcovou verzi seznamu čitelnou pro člověka.
výsledek může být velmi dlouhý
>>> L = SortedList([-1, 3, 4, 7, 8, 22, -9, 2, 1, 3])
>>> str(L)
'[-9, -1, 1, 2, 3, 3, 4, 7, 8, 22]'
>>> L = SortedList()
>>> str(L)
'[]'
>>> L = SortedList(("the", "quick", "brown", "fox", "jumped"))
>>> str(L)
"['brown', 'fox', 'jumped', 'quick', 'the']"
"""
return str(self.__list)
def copy(self):
"""Vrátí mělkou kopii seznamu se stejnou klíčovou funkcí
>>> L = SortedList([-1, 3, 4, 7, 8, 22, -9, 2, 1, 3])
>>> m = L.copy()
>>> str(m)
'[-9, -1, 1, 2, 3, 3, 4, 7, 8, 22]'
>>> m[:]
[-9, -1, 1, 2, 3, 3, 4, 7, 8, 22]
>>> import copy
>>> n = copy.copy(L)
>>> str(n)
'[-9, -1, 1, 2, 3, 3, 4, 7, 8, 22]'
"""
return SortedList(self, self.__key)
__copy__ = copy
if __name__ == "__main__":
import doctest
doctest.testmod()
| [
"[email protected]"
] | |
b409f72e18569cb332729b7e95a20bd361d3e851 | 3b84ca7d132e6ca5004029d39bfa7c8fead07fe1 | /seexpr/3.0.1/package.py | 4a03d862bddca4dccd83610ebaf068b03b7781ef | [] | no_license | est77/rez-packages | 05a5a05224e02c0a28bc37a81cbd07ca7447d604 | 449ade7acf92196efda2e8ec883c52ba4e33262d | refs/heads/master | 2020-05-27T10:35:02.323417 | 2020-02-23T19:03:05 | 2020-02-23T19:03:05 | 82,542,112 | 22 | 7 | null | null | null | null | UTF-8 | Python | false | false | 180 | py | # -*- coding: utf-8 -*-
name = "seexpr"
version = "3.0.1"
description = "SeExpr"
variants = [
['gcc-6.3.1']
]
def commands():
env.LD_LIBRARY_PATH.append("{root}/lib")
| [
"[email protected]"
] | |
e8128ec06bdbba0ad8f05aea2b9b0c4a03325995 | 5c096fea6033e9c07f38e773e32c901bb7146d09 | /04/jiangk/reboot-website/app/modules/manufacturers.py | 85c4605e190298b07eecad20527588335cd92b34 | [] | no_license | keepauto/devops2 | 8b043ed258030b75c9b2c7129ae1468af5b42850 | b93c840bcca4bd8589546a171a6458bcc6d73047 | refs/heads/master | 2021-01-21T10:37:48.378259 | 2016-07-29T05:42:46 | 2016-07-29T05:42:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,957 | py | #!/usr/bin/python
# coding:utf-8
from app.models import Manufacturers
from flask import current_app
from app.models import db
from app.utils import check_field_exists
from app.utils import check_output_field
from app.utils import check_order_by
from app.utils import check_limit
from app.utils import process_result
from app.utils import check_update_params
def create(**kwargs):
# 1 获取参数
# print kwargs
# 2 检查参数
check_field_exists(Manufacturers, kwargs)
# 3 插入到数据库
manufacturers = Manufacturers(**kwargs)
db.session.add(manufacturers)
try:
db.session.commit()
except Exception, e:
# logging
current_app.logger.warning(
"commit error: {}".format(e.message)
)
raise Exception("commit error")
# 4 返回插入的状态
return manufacturers.id
def get(**kwargs):
# output: [manufacturers_name, user_interface, user_phone]
# where: {
# id: 1
# }
# limit: 10
# order_by: id
# 1 整理条件
output = kwargs.get("output", [])
limit = kwargs.get("limit", 10)
order_by = kwargs.get("order_by", "id desc")
where = kwargs.get("where", {})
# 2 验证
# 验证output
check_output_field(Manufacturers, output)
# 验证 order_by,字符串分割,字段是否在表中 第二个字段必须为asc desc
order_by_list = check_order_by(Manufacturers, order_by)
# 验证 limit 必须为数字
check_limit(limit)
# 验证 where 条件,先不验证
pass
# print callable(getattr(getattr(Manufacturers, "id"), "desc"))
# 函数对象
# getattr(getattr(Manufacturers, tmp_order_by[0]), tmp_order_by[1])
# 调用函数
# getattr(getattr(Manufacturers, tmp_order_by[0]), tmp_order_by[1])()
data = db.session.query(Manufacturers).filter_by(**where)\
.order_by(getattr(getattr(Manufacturers, order_by_list[0]), order_by_list[1])())\
.limit(limit).all()
db.session.close()
# process result
return process_result(data, output)
def update(**kwargs):
data = kwargs.get("data", {})
where = kwargs.get("where", {})
# # 1 验证data
# # 2 验证where
# # 3 更新必须提供id,只按照id更新
# # id 要为数字且大于0的整数
check_update_params(Manufacturers, data, where)
# update
# ret = db.session.query(Manufacturers).filter_by(**where).update(**data)
# 调用模块执行出现错误:update() got an unexpected keyword argument 'rel_cabinet_num'
ret = db.session.query(Manufacturers).filter_by(**where).update(data)
try:
db.session.commit()
except Exception, e:
# logging
current_app.logger.warning("commit error: {}".format(e.message))
raise Exception("commit error")
# print ret
return ret
| [
"[email protected]"
] | |
11c6ff5edd9c5239ca96577fc4e0da53c251d1a9 | 70a960186af21ae6f7a8fcd4d13fde54892f1821 | /odds_and_ends/EVAunit00/dumps/serverscript/Windows Image Capture | 2bdb0607338cfe16a61903f35dd46c65c877f937 | [] | no_license | apua/altair_mat | d6f31bacae62d4490d561c2f9ec07a745693e15e | 37dd580fd011aaae9ca52f99bb13757bab2df325 | refs/heads/master | 2021-04-29T05:49:01.561238 | 2015-08-21T09:40:50 | 2015-08-21T09:40:50 | 78,004,547 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,161 | #!/usr/bin/python
# -*- mode: Python; tab-width: 4; indent-tabs-mode: nil; -*-
# ex: set tabstop=4 :
# Please do not change the two lines above. See PEP 8, PEP 263.
#
import sys
import os
from optparse import OptionError
from osprov.optparse_ext import OptionParser
from osprov.scripts import bootmode
from osprov.osbp import logger
from osprov.util import process
from osprov.decorators import HandleShowErrorMessage
from osprov import diskmgr
from osprov.diskmgr import const, disk, diskmanager, partition
from osprov.server import ThisLocalServer
from tempfile import mkstemp
LOG = logger.getIt("windows_image_capture")
IMAGEX_CONFIG_FILE = """
[ExclusionList]
"\\Boot"
"\\Program Files\\Opsware"
"\\Program Files\\Common Files\\Opsware"
"""
class WindowsImageCaptureOptionParser(OptionParser):
""" Option parser for this step """
def defineOptions(self):
self.add_option("--bootMode", type="string",
help="boot mode of the server, can be UEFI or Legacy (boot mode is autodetected if this parameter is not passed).")
self.add_option("--systemDiskNumber", type="int", default=0,
help="system disk number where Windows is installed (default disk number is '0').")
self.add_option("--systemPartitionLabel", type="string", default="System",
help="label of partition where Windows is installed (default partition label is 'System').")
self.add_option("--wimFilePath", type="string",
help="path where to save WIM file (Currently using CA 'WimFileName' to provide WIM file name).")
self.add_option("--wimScript", type="string",
help="path to ImageX config file.")
def validateArgs(self, opt, args):
if opt.bootMode and not opt.bootMode.lower() in [x.lower() for x in bootmode.SUPPORTED_BOOT_MODES]:
raise OptionError("Invalid boot mode: " + opt.bootMode, "bootMode")
if not opt.wimFilePath:
raise OptionError("Missing parameter: --wimFilePath", "wimFilePath")
def captureESP(freeLetter, wimFilePath, log=LOG):
process.runIt("imagex.exe /check /verify /capture %s: \"%s_ESP\" \"ESP\"" %
(freeLetter, wimFilePath), checkExitCode=(0,), log=log)
def capturePartition(windowsDriveLetter, wimFilePath, configFilePath=None, log=LOG):
if not configFilePath:
fd, configFilePath = mkstemp()
with os.fdopen(fd, 'w') as f:
f.write(IMAGEX_CONFIG_FILE)
process.runIt("imagex.exe /config %s /check /verify /capture %s: \"%s\" \"System\"" %
(configFilePath, windowsDriveLetter, wimFilePath), checkExitCode=(0,), log=log)
@HandleShowErrorMessage("Windows Image Capture", LOG)
def main():
# get and parse arguments
options, remainingArgs = WindowsImageCaptureOptionParser().parse_args()
wimFilePath = options.wimFilePath.strip()
systemDiskNumber = options.systemDiskNumber
# get bootmode (legacy bios or uefi)
if options.bootMode:
bootMode = options.bootMode
else:
bootMode = bootmode.getCurrentBootMode(ThisLocalServer(), log=LOG)
windowsDriveLetter = disk.WindowsDisk(systemDiskNumber).getPartitionWithLabel(
options.systemPartitionLabel).letter
partitionTable = diskmgr.getPartitionTable(bootMode)
if const.PARTITION_TABLE_MBR == partitionTable:
print "Capturing Windows Image based on Legacy Windows Partitioning Schema"
capturePartition(windowsDriveLetter, wimFilePath, configFilePath=options.wimScript)
elif const.PARTITION_TABLE_GPT == partitionTable:
print "Capturing Windows Image based on Uefi Windows Partitioning Schema"
freeLetter = diskmanager.WindowsDiskManager().findFirstAvailableDriveLetter()
partition.WindowsPartition(systemDiskNumber, 1).setPartitionLetter(freeLetter)
captureESP(freeLetter, wimFilePath)
capturePartition(windowsDriveLetter, wimFilePath, configFilePath=options.wimScript)
if __name__ == "__main__":
sys.exit(main())
| [
"[email protected]"
] | ||
b3f108e068454d46e36179dc7ee8c4f4e19a3dcd | 9c517135ceebe11b88a673f707c865470eac91a8 | /layers/classifier.py | 6d27ff88218a39e7247f85c3e1d23be4a34a947d | [] | no_license | chenun123/reid_baseline | fd6d516a8badcda81484a98c567c92a34bf0b457 | 315ef25801208fc0d3f7d91fda009089b0901313 | refs/heads/master | 2020-12-17T15:32:30.223860 | 2020-01-17T10:55:57 | 2020-01-17T10:55:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,489 | py | # encoding: utf-8
"""
@author: liaoxingyu
@contact: [email protected]
"""
from torch import nn
from modeling.losses import *
from modeling.backbones import *
from .batch_norm import bn_no_bias
from modeling.utils import *
class ClassBlock(nn.Module):
"""
Define the bottleneck and classifier layer
|--bn--|--relu--|--linear--|--classifier--|
"""
def __init__(self, in_features, num_classes, relu=True, num_bottleneck=512, fc_layer='softmax'):
super().__init__()
block1 = []
block1 += [nn.Linear(in_features, num_bottleneck, bias=False)]
block1 += [nn.BatchNorm1d(in_features)]
if relu:
block1 += [nn.LeakyReLU(0.1)]
self.block1 = nn.Sequential(*block1)
self.bnneck = bn_no_bias(num_bottleneck)
if fc_layer == 'softmax':
self.classifier = nn.Linear(num_bottleneck, num_classes, bias=False)
elif fc_layer == 'circle_loss':
self.classifier = CircleLoss(num_bottleneck, num_classes, s=256, m=0.25)
def init_parameters(self):
self.block1.apply(weights_init_kaiming)
self.bnneck.apply(weights_init_kaiming)
self.classifier.apply(weights_init_classifier)
def forward(self, x, label=None):
x = self.block1(x)
x = self.bnneck(x)
if self.training:
# cls_out = self.classifier(x, label)
cls_out = self.classifier(x)
return cls_out
else:
return x
| [
"[email protected]"
] | |
f158396e7bf336520c1beb9a6a9c2e52c1bdb526 | 79e45a6e4846927da432087aba845036b11c5622 | /UAT/bin/MarketData/Daily/MNSTdailyOHLC_withvol.py | 96dad0a3c752759bf948dcdbd7ebce990c11a5b9 | [] | no_license | mjserpico/Scarlett-Trading | cba2bcfaacf886b9d851d978683b4ce641c8f6ad | 9778717393dbb0818ee026356996d1806345a6c2 | refs/heads/master | 2020-03-21T21:39:51.108503 | 2019-05-09T02:06:26 | 2019-05-09T02:06:26 | 139,076,454 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,164 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Jan 08 09:16:43 2017
@author: Michael
"""
import mysql.connector
from ib.opt import Connection
from ib.ext.Contract import Contract
import time
import logging
import datetime
import datalink #universal logins for environment
import math
Flag = 0
CCY1 = "MN"
CCY2 = "ST"
Table = 'MNST'
yClose = 0
logging.basicConfig(filename='DailyOHLC' + str(datetime.date.today()) + '.txt', level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
fh = logging.FileHandler('DailyOHLC' + str(datetime.date.today()) + '.txt')
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logger.addHandler(fh)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
logger.debug('Starting DailyOHLC')
def truncate(f, n):
'''Truncates/pads a float f to n decimal places without rounding'''
s = '{}'.format(f)
if 'e' in s or 'E' in s:
return '{0:.{1}f}'.format(f, n)
i, p, d = s.partition('.')
return '.'.join([i, (d+'0'*n)[:n]])
def reply_handler(msg):
#print(msg.value)
logger.debug('In beginning of Reply Handler')
print("Reply:", msg)
test = msg.open
test2 = msg.high
test3 = msg.low
test4 = msg.close
test5 = msg.volume
logger.debug('test %s', test)
logger.debug('test5 %s', test5)
global Flag
logger.debug('Flag %s', Flag)
#test5 - msg.volume
logger.debug('In Reply Handler')
if float(test) != -1:
import time
logger.debug('Valid Price Found (OPEN NOT -1)')
#cnx = mysql.connector.connect(user='mjserpico', password='UrzE8B66',host="scar01.cqxmc7cib5oh.us-east-1.rds.amazonaws.com", database='SCAR01')
#cnx = mysql.connector.connect(user='Scarlett01', password='scar01lett',host="serpdb01.cqxmc7cib5oh.us-east-1.rds.amazonaws.com", database='SERPDB01')
cnx = mysql.connector.connect(user=datalink.DB_User, password=datalink.DB_Pass,host=datalink.DB_Host, database=datalink.DB_Path)
logger.debug('Connected to Database')
cur = cnx.cursor()
cur.execute("Insert Into "+ Table + """(Date, Open, High, Low, Close) values(%s,%s,%s,%s,%s)""",(time.strftime("%m/%d/%Y"),float(test),float(test2),float(test3),float(test4)))
cnx.commit()
logger.debug('Ran Insert Script')
today = datetime.date.today( )
dayofweek = datetime.datetime.today().weekday()
if dayofweek == 0: #if Today is Monday
yesterday = today - datetime.timedelta(days=3) #Get Friday
month = (str(0) + str(yesterday.month))
day = (str(0)+ str(yesterday.day))
yesterday2 = (month[-2:] +"/"+ day[-2:] +"/"+str(yesterday.year))
logger.debug('Yesterday2 was %s', str(yesterday2))
else:
yesterday = today - datetime.timedelta(days=1) #Take 1 Day back
month = (str(0) + str(yesterday.month))
day = (str(0)+ str(yesterday.day))
yesterday2 = (month[-2:] +"/"+ day[-2:] +"/"+str(yesterday.year))
logger.debug('Yesterday2 was %s', str(yesterday2))
#MovingAverage Calculation
#Step 1 Get earliest Date to calculate avg from
#reformat date to DB convention first
logger.debug('Today is still %s', today)
backdate = today - datetime.timedelta(days=13)
logger.debug('Date shifted back 10 is %s', backdate)
dayofweek = backdate.weekday()
month = (str(0) + str(backdate.month))
day = (str(0)+ str(backdate.day))
backdate2 = (month[-2:] +"/"+ day[-2:] +"/"+str(backdate.year))
logger.debug('First Date of Moving Average is %s', backdate2)
query = ("SELECT max(ID) from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID1 = ID
logger.debug('ID1 is %s', ID1)
query = ("SELECT (max(ID)-20) from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID2 = ID
logger.debug('ID1 is %s', ID1)
logger.debug('ID2 is %s', ID2)
query = ("SELECT (max(ID)-1) from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID3 = ID
logger.debug('ID3 is %s', ID3)
#Pull ATR Length From RiskParameter Table
query = ("Select RiskParametersValue from RiskParameters where RiskParametersName = 'ATRlength';")
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
atrlength = ID
logger.debug('ID4 is %s', atrlength)
#ID for ATR length start point
query = ("SELECT (max(ID)-" + str(atrlength[0]) + ") from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID4 = ID
logger.debug('ID4 is %s', ID4)
#Pull MovingAvg Length RiskParameter Table
query = ("Select RiskParametersValue from RiskParameters where RiskParametersName = 'MovAvgLength';")
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
movavglength = ID
logger.debug('ID is %s', atrlength)
#ID for MovAvg length start point
query = ("SELECT (max(ID)-" + str(movavglength[0]) + ") from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID5 = ID
logger.debug('ID5 is %s', ID5)
query = ("SELECT (max(ID)-30) from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID30 = ID
logger.debug('ID30 is %s', ID30)
query = ("SELECT (max(ID)-60) from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID60 = ID
logger.debug('ID60 is %s', ID60)
query = ("SELECT (max(ID)-90) from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID90 = ID
logger.debug('ID90 is %s', ID90)
query = ("SELECT Close from " + CCY1 + CCY2 + " where ID = " + str(ID3[0]) + ";")
cur.execute(query)
for (Close) in cur:
yClose = Close
logger.debug('yClose is %s', yClose[0])
query = ("SELECT Close from " + CCY1 + CCY2 + " where ID = " + str(ID1[0]) + ";")
cur.execute(query)
for (Close) in cur:
tClose = Close
logger.debug('tClose is %s', tClose[0])
#Interday Return
CloseReturn = float(tClose[0])
yCloseReturn = float(yClose[0])
logger.debug('yClose is %s', yClose[0])
logger.debug('Close is %s', tClose[0])
returns = round(((CloseReturn / yCloseReturn) - 1) * 100,2)
logger.debug('Return is %s', returns)
query = ("UPDATE " + CCY1 + CCY2 + " SET PercentReturn = " + str(returns) + " where ID = " + str(ID1[0]) +";")
logger.debug('Query is %s', query)
cur.execute(query)
cnx.commit()
# period Moving Average
query = ("SELECT round(Avg(Close),2) as Avg from " + CCY1 + CCY2 + " where ID BETWEEN " + str(ID5[0]) + " AND " + str(ID1[0]) + ";")
logger.debug('Query is %s', query)
cur.execute(query)
for (Avg) in cur:
BBMovAvg = Avg #Final Moving Average Value
logger.debug('MovAvg is %s', BBMovAvg)
##Puts Moving Average Value in hasPosition Table for Reference with intraday strategies
query = ("UPDATE hasPosition SET MovingAvgValue = " + str(BBMovAvg[0]) + " where CCY =\'" + CCY1 + CCY2 +"\';")
logger.debug('Query is %s', query)
cur.execute(query)
cnx.commit()
#True Range
TR1 = (test2-test3)
TR2 = abs(test2-float(yClose[0]))
TR3 = abs(test3-float(yClose[0]))
TR = truncate(max(TR1,TR2,TR3),4)
print(TR)
print(TR1)
print(TR2)
print(TR3)
query = ("UPDATE "+ Table +" SET TrueRange = " + str(TR) + " where ID =\'" + str(ID1[0]) +"\';")
logger.debug('Query is %s', query)
print(query)
cur.execute(query)
cnx.commit()
#ATR Daily
query = ("SELECT round(Avg(TrueRange),2) as Avg from " + CCY1 + CCY2 + " where ID BETWEEN " + str(ID4[0]) + " AND " + str(ID1[0]) + ";")
logger.debug('Query is %s', query)
print(query)
cur.execute(query)
for (Avg) in cur:
ATRAvg = Avg #Final Moving Average Value
logger.debug('ATR is %s', ATRAvg)
##Puts ATR in hasPosition Table for Reference with intraday strategies
query = ("UPDATE hasPosition SET ATRValue = " + str(ATRAvg[0]) + " where CCY =\'" + CCY1 + CCY2 +"\';")
logger.debug('Query is %s', query)
cur.execute(query)
print(query)
cnx.commit()
#Calculate 30D Vol
query = ("SELECT round(stddev(PercentReturn),2) as vol30 from " + CCY1 + CCY2 + " where ID BETWEEN " + str(ID30[0]) + " AND " + str(ID1[0]) + ";")
logger.debug('Query is %s', query)
cur.execute(query)
for (vol30) in cur:
thirtyd = truncate((vol30[0] * math.sqrt(252)),2) #Final Moving Average Value
logger.debug('30d is %s', thirtyd)
query = ("UPDATE "+ Table +" SET thirtyvol = " + str(thirtyd) + " where ID =\'" + str(ID1[0]) +"\';")
logger.debug('Query is %s', query)
print(query)
cur.execute(query)
cnx.commit()
#Calculate 60D Vol
query = ("SELECT round(stddev(PercentReturn),2) as vol60 from " + CCY1 + CCY2 + " where ID BETWEEN " + str(ID60[0]) + " AND " + str(ID1[0]) + ";")
logger.debug('Query is %s', query)
cur.execute(query)
for (vol60) in cur:
sixtyd = truncate((vol60[0] * math.sqrt(252)),2) #Final Moving Average Value
logger.debug('sixtyd is %s', sixtyd)
query = ("UPDATE "+ Table +" SET sixtyvol = " + str(sixtyd) + " where ID =\'" + str(ID1[0]) +"\';")
logger.debug('Query is %s', query)
print(query)
cur.execute(query)
cnx.commit()
#Calculate 90D Vol
query = ("SELECT round(stddev(PercentReturn),2) as vol90 from " + CCY1 + CCY2 + " where ID BETWEEN " + str(ID90[0]) + " AND " + str(ID1[0]) + ";")
logger.debug('Query is %s', query)
cur.execute(query)
for (vol90) in cur:
ninetyd = truncate((vol90[0] * math.sqrt(252)),2) #Final Moving Average Value
logger.debug('ninetyd is %s', ninetyd)
query = ("UPDATE "+ Table +" SET ninetyvol = " + str(ninetyd) + " where ID =\'" + str(ID1[0]) +"\';")
logger.debug('Query is %s', query)
print(query)
cur.execute(query)
cnx.commit()
Flag = 1
logger.debug('Flag set to %s', Flag)
print(Flag)
return(Flag)
while Flag == 0:
logger.debug('Flag set to %s', Flag)
conn = Connection.create(port=4002, clientId=999)
conn.connect()
logger.debug('Connecting to Server')
time.sleep(1)
conn.register(reply_handler,'HistoricalData') #By registering "HistoricalData" --the Method name only --we can eliminate all the open order garbage
logger.debug('Registered HistoricalData Reply Handler')
time.sleep(1)
qqq = Contract()
qqq.m_symbol = Table
qqq.m_secType = 'STK'
qqq.m_exchange = 'SMART:ISLAND'
qqq.m_currency = 'USD'
logger.debug('Requesting historical data')
conn.reqHistoricalData(1, qqq, '', '1 D', '1 day', 'TRADES', 0, 1)
logger.debug('Returned from Reply Handler')
time.sleep(1) #give IB time to send us messages
logger.debug('Disconnecting from Server')
conn.disconnect()
logger.debug('Finished Daily OHLC') | [
"[email protected]"
] | |
18e7fca8d4143e471221237e49f1a2367953adc1 | 1f2445a190e1fdb36b8ace15cdd1de799c4c61b4 | /arst/command_push.py | cc707d2a8c5c30ec522be85979a30474551b1f43 | [] | no_license | bmustiata/ars-py | f9860a48bb63bdf781627108e57d4a8431e35f0a | 0a641399807f17efeb50d355770f014721aa4d38 | refs/heads/master | 2021-06-13T01:05:40.023617 | 2020-11-08T23:50:01 | 2020-11-08T23:50:01 | 140,120,138 | 0 | 0 | null | 2021-03-26T00:27:14 | 2018-07-07T22:15:34 | Python | UTF-8 | Python | false | false | 1,156 | py | import os.path
import pathlib
import shutil
from typing import List
from termcolor_util import yellow
def push_files_to_template(
projects_folder: str, project_name: str, files_to_push: List[str]
) -> None:
for file_name in files_to_push:
recursively_push_file(projects_folder, project_name, file_name)
def recursively_push_file(
projects_folder: str, project_name: str, file_name: str
) -> None:
print(
yellow("Pushing"),
yellow(file_name, bold=True),
yellow("to"),
yellow(project_name, bold=True),
)
target_file_name = os.path.join(projects_folder, project_name, file_name)
if os.path.isdir(file_name):
pathlib.Path(target_file_name).mkdir(parents=True, exist_ok=True)
for nested_file_name in os.listdir(file_name):
recursively_push_file(
projects_folder=projects_folder,
project_name=project_name,
file_name=os.path.join(file_name, nested_file_name),
)
return
pathlib.Path(target_file_name).parent.mkdir(parents=True, exist_ok=True)
shutil.copy(file_name, target_file_name)
| [
"[email protected]"
] | |
56e5633ed8f47feba79f4997982cf5abf1919f18 | 941b25a0d0ccd25e4e64293defc2b50a61fccb01 | /Board.py | cb026329b6c90b83267f78d6cc90800b607a043b | [] | no_license | fanzhangg/sliding-tiles | c5a396818ec2d7449309f773df37a46ec7b41c8e | 334bb7df76436aa9429ff6132db8a9ea1afce35f | refs/heads/master | 2020-04-08T20:06:37.554387 | 2018-11-29T15:20:52 | 2018-11-29T15:20:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 568 | py | class Board:
def __init__(self, row, col):
self.ROW = row
self.COL = col
self.tiles = []
def initialize_board(self, tiles: tuple or list) -> None:
expected_size = self.ROW * self.COL - 1
print('The expected size is', expected_size)
if len(tiles) == expected_size:
self.tiles.append(tiles)
elif len(tiles) >= expected_size:
raise IndexError("The size of tiles exceeds the expected size")
else:
raise IndexError("The size of tiles less than the expected size")
| [
"[email protected]"
] | |
285aa96f98f22097cb153d7b2e18d6c25cb6e2a0 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_152/ch28_2020_09_16_11_46_15_586916.py | 09f170029eaaabc036fde28d6663bd5331c376db | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 64 | py | s =0
n =0
while n<100:
s = s + 1/(2**n)
n = n+1
print(s) | [
"[email protected]"
] | |
f37a13bf85a34be9aa47183fa9763dcb850c8bbf | 4fb1239a9fd39b0abecde0f0f0b81e1cb3b8f44a | /isy994/items/devices/device_insteon_contact.py | 3a2c8b43a4fe7d50bf9c0317dd32185ab7f7d5a2 | [
"MIT"
] | permissive | simplextech/ISY994v5 | f361066e7712bc590077bdf2799223b136ac6266 | f8e485452a02113f95b1a0f1a57a4d30075c1070 | refs/heads/master | 2020-07-23T00:26:16.730900 | 2019-09-10T03:16:34 | 2019-09-10T03:16:34 | 207,382,914 | 0 | 0 | MIT | 2019-09-09T18:58:59 | 2019-09-09T18:58:59 | null | UTF-8 | Python | false | false | 932 | py | #! /usr/bin/env python
from .device_contact import Device_Contact
from .device_insteon_base import Device_Insteon_Base
class Device_Insteon_Contact(Device_Contact,Device_Insteon_Base):
def __init__(self, container, device_info):
Device_Contact.__init__(self,container,device_info.name,device_info.address)
Device_Insteon_Base.__init__(self, device_info)
if device_info.property_value:
try:
if int(device_info.property_value) > 0:
self.set_property('contact','open')
else:
self.set_property('contact','closed')
except:
pass
def process_websocket_event(self,event):
if event.control == 'ST':
if int(event.action) > 0:
self.set_property('contact','open')
else:
self.set_property('contact','closed')
| [
"[email protected]"
] | |
a4412ae9e4d031c23ba431dffde914d8ead12b9e | a55fcbe94032f98f8a858cebf7cfe843410aea76 | /hw4/env/lib/python3.6/config-3.6m-darwin/python-config.py | 6eaf21054fc295f5954d9f15d26efbc54c161360 | [] | no_license | quentintruong/UCLA-ECE239AS-W19 | a3aca9302125ee7b85f8cecf82485782718d8266 | f734aa7a8178b64b309761f4a390ed4689c4caed | refs/heads/master | 2022-12-10T19:01:28.589101 | 2020-02-14T08:00:33 | 2020-02-14T08:00:33 | 166,895,199 | 1 | 0 | null | 2022-12-08T01:43:39 | 2019-01-21T23:34:01 | Jupyter Notebook | UTF-8 | Python | false | false | 80 | py | /Users/quentintruong/anaconda3/lib/python3.6/config-3.6m-darwin/python-config.py | [
"[email protected]"
] | |
ad0204c58539514ac30daab92ddf9c5e97cfa7ad | d49390f27ee954ddf5f7f1a9876320e1adec8ad4 | /tests/test_utils.py | 7e47d12b47398cef00ccd93ae15f60caac575053 | [
"MIT"
] | permissive | ssato/pytest-data-from-files | 431e83827d4a2e14dd029089c8fa020b821b0683 | deca8e3ae50351299f90bfe1561e5a32474812fa | refs/heads/main | 2023-08-19T06:32:59.056406 | 2021-10-13T15:15:07 | 2021-10-13T15:15:07 | 412,952,889 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 690 | py | #
# Copyright (C) 2021 Satoru SATOH <[email protected]>
# SPDX-License-Identifier: MIT
#
# pylint: disable=missing-docstring
"""Test cases for tests.utils."""
import pytest
from . import utils as TT
def test_get_basename():
assert TT.get_basename(__file__) == 'utils'
@pytest.mark.parametrize(
('xss', 'exp'),
(
([[]], []),
((()), []),
([[1, 2, 3], [4, 5]], [1, 2, 3, 4, 5]),
([[1, 2, 3], [4, 5, [6, 7]]], [1, 2, 3, 4, 5, [6, 7]]),
(((1, 2, 3), (4, 5, (6, 7))), [1, 2, 3, 4, 5, (6, 7)]),
(((i, i * 2) for i in range(3)), [0, 0, 1, 2, 2, 4]),
)
)
def test_concat(xss, exp):
assert list(TT.concat(xss)) == exp
# vim:sw=4:ts=4:et:
| [
"[email protected]"
] | |
f0272683f3edfbcf1f9c2e00b91bf63b2b057c61 | a9e051485379fb7e569a7c8458045e9eb56d4cf8 | /docker/water/sph/tags/pysph/pysph/sph/tests/test_acceleration_eval.py | b17fee871d6139baca63491dc96ca7de83677b09 | [
"LicenseRef-scancode-public-domain",
"Apache-2.0",
"BSD-3-Clause"
] | permissive | liujiamingustc/phd | 7634056500c481d39fa036bf0ed744c1d13b0035 | 4f815a738abad43531d02ac66f5bd0d9a1def52a | refs/heads/master | 2020-05-17T07:02:56.000146 | 2019-04-24T15:04:19 | 2019-04-24T15:04:19 | 183,567,207 | 4 | 0 | null | 2019-04-26T06:04:37 | 2019-04-26T06:04:37 | null | UTF-8 | Python | false | false | 17,647 | py | # Standard library imports.
try:
# This is for Python-2.6.x
import unittest2 as unittest
except ImportError:
import unittest
# Library imports.
import pytest
import numpy as np
# Local imports.
from pysph.base.config import get_config
from pysph.base.utils import get_particle_array
from pysph.sph.equation import Equation, Group
from pysph.sph.acceleration_eval import (AccelerationEval,
check_equation_array_properties)
from pysph.sph.basic_equations import SummationDensity
from pysph.base.kernels import CubicSpline
from pysph.base.nnps import LinkedListNNPS as NNPS
from pysph.sph.sph_compiler import SPHCompiler
from pysph.base.reduce_array import serial_reduce_array
class DummyEquation(Equation):
def initialize(self, d_idx, d_rho, d_V):
d_rho[d_idx] = d_V[d_idx]
def loop(self, d_idx, d_rho, s_idx, s_m, s_u, WIJ):
d_rho[d_idx] += s_m[s_idx]*WIJ
def post_loop(self, d_idx, d_rho, s_idx, s_m, s_V, WIJ):
d_rho[d_idx] += s_m[s_idx]*WIJ
class FindTotalMass(Equation):
def initialize(self, d_idx, d_m, d_total_mass):
# FIXME: This is stupid and should be fixed if we add a separate
# initialize_once function or so.
d_total_mass[0] = 0.0
def post_loop(self, d_idx, d_m, d_total_mass):
d_total_mass[0] += d_m[d_idx]
class TestCheckEquationArrayProps(unittest.TestCase):
def test_should_raise_runtime_error_when_invalid_dest_source(self):
# Given
f = get_particle_array(name='f')
# When
eq = SummationDensity(dest='fluid', sources=['f'])
# Then
self.assertRaises(
RuntimeError,
check_equation_array_properties,
eq, [f]
)
# When
eq = SummationDensity(dest='f', sources=['fluid'])
# Then
self.assertRaises(
RuntimeError,
check_equation_array_properties,
eq, [f]
)
def test_should_pass_when_properties_exist(self):
# Given
f = get_particle_array(name='f')
# When
eq = SummationDensity(dest='f', sources=['f'])
# Then
check_equation_array_properties(eq, [f])
def test_should_fail_when_props_dont_exist(self):
# Given
f = get_particle_array(name='f')
# When
eq = DummyEquation(dest='f', sources=['f'])
# Then
self.assertRaises(RuntimeError,
check_equation_array_properties, eq, [f])
def test_should_fail_when_src_props_dont_exist(self):
# Given
f = get_particle_array(name='f')
f.add_property('V')
s = get_particle_array(name='s')
# When
eq = DummyEquation(dest='f', sources=['f', 's'])
# Then
self.assertRaises(RuntimeError,
check_equation_array_properties, eq, [f, s])
def test_should_pass_when_src_props_exist(self):
# Given
f = get_particle_array(name='f')
f.add_property('V')
s = get_particle_array(name='s')
s.add_property('V')
# When
eq = DummyEquation(dest='f', sources=['f', 's'])
# Then
check_equation_array_properties(eq, [f, s])
def test_should_check_constants(self):
# Given
f = get_particle_array(name='f')
# When
eq = FindTotalMass(dest='f', sources=['f'])
# Then.
self.assertRaises(RuntimeError,
check_equation_array_properties, eq, [f])
# When.
f.add_constant('total_mass', 0.0)
# Then.
check_equation_array_properties(eq, [f])
class SimpleEquation(Equation):
def __init__(self, dest, sources):
super(SimpleEquation, self).__init__(dest, sources)
self.count = 0
def initialize(self, d_idx, d_u, d_au):
d_u[d_idx] = 0.0
d_au[d_idx] = 0.0
def loop(self, d_idx, d_au, s_idx, s_m):
d_au[d_idx] += s_m[s_idx]
def post_loop(self, d_idx, d_u, d_au):
d_u[d_idx] = d_au[d_idx]
def converged(self):
self.count += 1
result = self.count - 1
if result > 0:
# Reset the count for the next loop.
self.count = 0
return result
class MixedTypeEquation(Equation):
def initialize(self, d_idx, d_u, d_au, d_pid, d_tag):
d_u[d_idx] = 0.0 + d_pid[d_idx]
d_au[d_idx] = 0.0 + d_tag[d_idx]
def loop(self, d_idx, d_au, s_idx, s_m, s_pid, s_tag):
d_au[d_idx] += s_m[s_idx] + s_pid[s_idx] + s_tag[s_idx]
def post_loop(self, d_idx, d_u, d_au, d_pid):
d_u[d_idx] = d_au[d_idx] + d_pid[d_idx]
class SimpleReduction(Equation):
def initialize(self, d_idx, d_au):
d_au[d_idx] = 0.0
def reduce(self, dst):
dst.total_mass[0] = serial_reduce_array(dst.m, op='sum')
if dst.gpu is not None:
dst.gpu.push('total_mass')
class TestAccelerationEval1D(unittest.TestCase):
def setUp(self):
self.dim = 1
n = 10
dx = 1.0/(n-1)
x = np.linspace(0, 1, n)
m = np.ones_like(x)
h = np.ones_like(x)*dx*1.05
pa = get_particle_array(name='fluid', x=x, h=h, m=m)
self.pa = pa
def _make_accel_eval(self, equations, cache_nnps=False):
arrays = [self.pa]
kernel = CubicSpline(dim=self.dim)
a_eval = AccelerationEval(
particle_arrays=arrays, equations=equations, kernel=kernel
)
comp = SPHCompiler(a_eval, integrator=None)
comp.compile()
nnps = NNPS(dim=kernel.dim, particles=arrays, cache=cache_nnps)
nnps.update()
a_eval.set_nnps(nnps)
return a_eval
def test_should_support_constants(self):
# Given
pa = self.pa
pa.add_constant('total_mass', 0.0)
equations = [FindTotalMass(dest='fluid', sources=['fluid'])]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.1, 0.1)
# Then
self.assertEqual(pa.total_mass, 10.0)
def test_should_not_iterate_normal_group(self):
# Given
pa = self.pa
equations = [SimpleEquation(dest='fluid', sources=['fluid'])]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.1, 0.1)
# Then
expect = np.asarray([3., 4., 5., 5., 5., 5., 5., 5., 4., 3.])
self.assertListEqual(list(pa.u), list(expect))
def test_should_work_with_cached_nnps(self):
# Given
pa = self.pa
equations = [SimpleEquation(dest='fluid', sources=['fluid'])]
a_eval = self._make_accel_eval(equations, cache_nnps=True)
# When
a_eval.compute(0.1, 0.1)
# Then
expect = np.asarray([3., 4., 5., 5., 5., 5., 5., 5., 4., 3.])
self.assertListEqual(list(pa.u), list(expect))
def test_should_iterate_iterated_group(self):
# Given
pa = self.pa
equations = [Group(
equations=[
SimpleEquation(dest='fluid', sources=['fluid']),
SimpleEquation(dest='fluid', sources=['fluid']),
],
iterate=True
)]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.1, 0.1)
# Then
expect = np.asarray([3., 4., 5., 5., 5., 5., 5., 5., 4., 3.])*2
self.assertListEqual(list(pa.u), list(expect))
def test_should_iterate_nested_groups(self):
pa = self.pa
equations = [Group(
equations=[
Group(
equations=[SimpleEquation(dest='fluid', sources=['fluid'])]
),
Group(
equations=[SimpleEquation(dest='fluid', sources=['fluid'])]
),
],
iterate=True,
)]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.1, 0.1)
# Then
expect = np.asarray([3., 4., 5., 5., 5., 5., 5., 5., 4., 3.])
self.assertListEqual(list(pa.u), list(expect))
def test_should_run_reduce(self):
# Given.
pa = self.pa
pa.add_constant('total_mass', 0.0)
equations = [SimpleReduction(dest='fluid', sources=['fluid'])]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.1, 0.1)
# Then
expect = np.sum(pa.m)
self.assertAlmostEqual(pa.total_mass[0], expect, 14)
def test_should_work_with_non_double_arrays(self):
# Given
pa = self.pa
equations = [MixedTypeEquation(dest='fluid', sources=['fluid'])]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.1, 0.1)
# Then
expect = np.asarray([3., 4., 5., 5., 5., 5., 5., 5., 4., 3.])
self.assertListEqual(list(pa.u), list(expect))
class EqWithTime(Equation):
def initialize(self, d_idx, d_au, t, dt):
d_au[d_idx] = t + dt
def loop(self, d_idx, d_au, s_idx, s_m, t, dt):
d_au[d_idx] += t + dt
class TestAccelerationEval1DGPU(unittest.TestCase):
# Fix this to be a subclass of TestAccelerationEval1D
def setUp(self):
self.dim = 1
n = 10
dx = 1.0/(n-1)
x = np.linspace(0, 1, n)
m = np.ones_like(x)
h = np.ones_like(x)*dx*1.05
pa = get_particle_array(name='fluid', x=x, h=h, m=m)
self.pa = pa
def _make_accel_eval(self, equations, cache_nnps=True):
pytest.importorskip('pysph.base.gpu_nnps')
from pysph.base.gpu_nnps import ZOrderGPUNNPS as GPUNNPS
arrays = [self.pa]
kernel = CubicSpline(dim=self.dim)
a_eval = AccelerationEval(
particle_arrays=arrays, equations=equations, kernel=kernel,
backend='opencl'
)
comp = SPHCompiler(a_eval, integrator=None)
comp.compile()
self.sph_compiler = comp
nnps = GPUNNPS(dim=kernel.dim, particles=arrays, cache=cache_nnps)
nnps.update()
a_eval.set_nnps(nnps)
return a_eval
def test_accel_eval_should_work_on_gpu(self):
# Given
pa = self.pa
equations = [SimpleEquation(dest='fluid', sources=['fluid'])]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.1, 0.1)
# Then
expect = np.asarray([3., 4., 5., 5., 5., 5., 5., 5., 4., 3.])
pa.gpu.pull('u')
self.assertListEqual(list(pa.u), list(expect))
def test_precomputed_should_work_on_gpu(self):
# Given
pa = self.pa
equations = [SummationDensity(dest='fluid', sources=['fluid'])]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.1, 0.1)
# Then
expect = np.asarray([7.357, 9.0, 9., 9., 9., 9., 9., 9., 9., 7.357])
pa.gpu.pull('rho')
print(pa.rho, pa.gpu.rho)
self.assertTrue(np.allclose(expect, pa.rho, atol=1e-2))
def test_precomputed_should_work_on_gpu_with_double(self):
orig = get_config().use_double
def _cleanup():
get_config().use_double = orig
get_config().use_double = True
self.addCleanup(_cleanup)
# Given
pa = self.pa
equations = [SummationDensity(dest='fluid', sources=['fluid'])]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.1, 0.1)
# Then
expect = np.asarray([7.357, 9.0, 9., 9., 9., 9., 9., 9., 9., 7.357])
pa.gpu.pull('rho')
print(pa.rho, pa.gpu.rho)
self.assertTrue(np.allclose(expect, pa.rho, atol=1e-2))
def test_equation_with_time_should_work_on_gpu(self):
# Given
pa = self.pa
equations = [EqWithTime(dest='fluid', sources=['fluid'])]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.2, 0.1)
# Then
expect = np.asarray([4., 5., 6., 6., 6., 6., 6., 6., 5., 4.])*0.3
pa.gpu.pull('au')
print(pa.au, expect)
self.assertTrue(np.allclose(expect, pa.au))
def test_update_nnps_is_called_for_opencl(self):
# Given
equations = [
Group(
equations=[
SummationDensity(dest='fluid', sources=['fluid']),
],
update_nnps=True
),
Group(
equations=[EqWithTime(dest='fluid', sources=['fluid'])]
),
]
# When
a_eval = self._make_accel_eval(equations)
# Then
h = a_eval.c_acceleration_eval.helper
assert len(h.calls) == 5
call = h.calls[0]
assert call['type'] == 'kernel'
assert call['method'].function_name == 'g0_fluid_initialize'
assert call['loop'] is False
call = h.calls[1]
assert call['type'] == 'kernel'
assert call['method'].function_name == 'g0_fluid_on_fluid_loop'
assert call['loop'] is True
call = h.calls[2]
assert call['type'] == 'method'
assert call['method'] == 'update_nnps'
call = h.calls[3]
assert call['type'] == 'kernel'
assert call['method'].function_name == 'g1_fluid_initialize'
assert call['loop'] is False
call = h.calls[4]
assert call['type'] == 'kernel'
assert call['method'].function_name == 'g1_fluid_on_fluid_loop'
assert call['loop'] is True
def test_should_stop_iteration_with_max_iteration_on_gpu(self):
pa = self.pa
class SillyEquation(Equation):
def loop(self, d_idx, d_au, s_idx, s_m):
d_au[d_idx] += s_m[s_idx]
def converged(self):
return 0
equations = [Group(
equations=[
Group(
equations=[SillyEquation(dest='fluid', sources=['fluid'])]
),
Group(
equations=[SillyEquation(dest='fluid', sources=['fluid'])]
),
],
iterate=True, max_iterations=2,
)]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.1, 0.1)
# Then
expect = np.asarray([3., 4., 5., 5., 5., 5., 5., 5., 4., 3.])*4.0
pa.gpu.pull('au')
self.assertListEqual(list(pa.au), list(expect))
def test_should_stop_iteration_with_converged_on_gpu(self):
pa = self.pa
class SillyEquation1(Equation):
def __init__(self, dest, sources):
super(SillyEquation1, self).__init__(dest, sources)
self.conv = 0
def loop(self, d_idx, d_au, s_idx, s_m):
d_au[d_idx] += s_m[s_idx]
def post_loop(self, d_idx, d_au):
if d_au[d_idx] > 19.0:
self.conv = 1
def converged(self):
return self.conv
equations = [Group(
equations=[
Group(
equations=[SillyEquation1(dest='fluid', sources=['fluid'])]
),
Group(
equations=[SillyEquation1(dest='fluid', sources=['fluid'])]
),
],
iterate=True, max_iterations=10,
)]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.1, 0.1)
# Then
expect = np.asarray([3., 4., 5., 5., 5., 5., 5., 5., 4., 3.])*6.0
pa.gpu.pull('au')
self.assertListEqual(list(pa.au), list(expect))
def test_should_handle_helper_functions_on_gpu(self):
pa = self.pa
def helper(x=1.0):
return x*1.5
class SillyEquation2(Equation):
def initialize(self, d_idx, d_au, d_m):
d_au[d_idx] += helper(d_m[d_idx])
def _get_helpers_(self):
return [helper]
equations = [SillyEquation2(dest='fluid', sources=['fluid'])]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.1, 0.1)
# Then
expect = np.ones(10)*1.5
pa.gpu.pull('au')
self.assertListEqual(list(pa.au), list(expect))
def test_should_run_reduce_when_using_gpu(self):
# Given.
pa = self.pa
pa.add_constant('total_mass', 0.0)
equations = [SimpleReduction(dest='fluid', sources=['fluid'])]
a_eval = self._make_accel_eval(equations)
# When
a_eval.compute(0.1, 0.1)
# Then
expect = np.sum(pa.m)
pa.gpu.pull('total_mass')
self.assertAlmostEqual(pa.total_mass[0], expect, 14)
def test_get_equations_with_converged(self):
pytest.importorskip('pysph.base.gpu_nnps')
from pysph.sph.acceleration_eval_opencl_helper import \
get_equations_with_converged
# Given
se = SimpleEquation(dest='fluid', sources=['fluid'])
se1 = SimpleEquation(dest='fluid', sources=['fluid'])
sd = SummationDensity(dest='fluid', sources=['fluid'])
me = MixedTypeEquation(dest='fluid', sources=['fluid'])
eq_t = EqWithTime(dest='fluid', sources=['fluid'])
g = Group(
equations=[
Group(equations=[Group(equations=[se, sd])],
iterate=True, max_iterations=10),
Group(equations=[me, eq_t, se1]),
],
)
# When
eqs = get_equations_with_converged(g)
# Then
assert eqs == [se, se1]
| [
"[email protected]"
] | |
a59342015429a8fe98d84c3688d5f300989c9754 | ef158af9d47fb1f0c974b49405174ba5b34e4721 | /polu/prediction_site/incendie.py | 7d9797880c7e7d88bf9c409c3146b094c9c89755 | [] | no_license | LeGrosLezard/bobo | 1227bcae22d9eb7d9e0423009cae154df5466994 | 7c50de512fb22c8bdf1a1127307fc4fd2f371152 | refs/heads/master | 2020-07-01T17:38:14.145955 | 2019-07-01T21:29:49 | 2019-07-01T21:29:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,284 | py | import os
import cv2
import json
import requests
import datetime
import urllib.request
from bs4 import *
import datetime
PATH_LYON = 'https://www.lyoncapitale.fr/?s=incendie'
PATH_MARSEILLE = 'https://www.20minutes.fr/search?q=incendie+marseille'
PATH_PARIS = 'https://www.20minutes.fr/search?q=incendie+paris'
def incendie(ville):
date = datetime.datetime.now()
jour = date.day
mois = date.month
année = date.year
dico = {'1':'janvier','2':'fevrier','3':'mars','4':'avril',
'5':'mai','6':'juin','7':'juillet','8':'août',
'9':'septembre','10':'octobre','11':'novembre','12':'decembre'}
for cle, valeur in dico.items():
if str(mois) == cle:
mois = valeur
ville = ville.lower()
if ville == 'lyon':
path = PATH_LYON
r = requests.get(path)
page = r.content
soup = BeautifulSoup(page, "html.parser")
propriete = soup.find_all("div")
liste = []
liste.append(str(propriete))
daate = str(jour) + ' ' + str(mois) + ' ' + str(année)
a = str(liste).find(str(daate))
elif ville == 'paris':
path = PATH_PARIS
elif ville == 'marseille':
path = PATH_MARSEILLE
r = requests.get(path)
page = r.content
soup = BeautifulSoup(page, "html.parser")
propriete = soup.find_all("div")
liste = []
liste.append(str(propriete))
a = str(liste).find('incendie')
liste = liste[0][a-1000:a+1000]
mois_chi = date.month
c = 0
for i in str(mois_chi):
c+=1
if c == 1:
daate1 = str(année) + '-0' + str(mois_chi)+'-'+str(jour)
daate3 = str(jour) + '-0' + str(mois_chi)+'-'+str(année)
else:
daate1 = str(année) + '-' + str(mois_chi)+'-'+str(jour)
daate3 = str(jour) + '-' + str(mois_chi)+'-'+str(année)
daate = str(jour) + ' ' + str(mois) + ' ' + str(année)
b = str(liste).find(daate)
c = str(liste).find(daate1)
d = str(liste).find(daate3)
if b >= 0 or c >= 0 or d >=0:
return 'oui'
| [
"[email protected]"
] | |
301e49dbb4d7faa407729ff514dd2a4fecdb2b45 | 540dbf1622959cc1b3a6853379f6f1f502bdbc24 | /offers/urls.py | 4ea3e242310521d73bca6bf928f385ece1bf7173 | [
"ISC"
] | permissive | pmaigutyak/mp-shop-offers | a5031af2827cee34e671793164ea786d2c4ee016 | 6f3d0934193c7d727d41481b0ebb164b03e34808 | refs/heads/master | 2023-08-02T23:02:09.498271 | 2023-07-19T11:01:34 | 2023-07-19T11:01:34 | 148,797,797 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py |
from django.urls import path
from offers import views
app_name = 'offers'
urlpatterns = [
path('modal/<int:product_id>/', views.get_price_offer_modal, name='modal'),
path('send/<int:product_id>/', views.send_price_offer, name='send'),
]
| [
"[email protected]"
] | |
99eb738456b493ec54c68eba2eeb0e72884b2519 | 09a8e7ca229a6a95cdd4a426a1c220e9daf98355 | /RBSP/Tools/IntegrateSpectrum.py | afe1dc5f26134ae4cc762e18d03b76f09fcdcc78 | [
"MIT"
] | permissive | mattkjames7/RBSP | 8547bda29ec6c878483c21dc3281852abb256bd4 | 25c37cbba18b681a2f9a4e7955e49a49fc36d698 | refs/heads/master | 2023-07-20T00:31:43.487076 | 2023-07-14T12:30:04 | 2023-07-14T12:30:04 | 177,828,232 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,344 | py | import numpy as np
from .RelVelocity import RelVelocity
def IntegrateSpectrum(E,PSD,m,Omega,Erange=(0.0,np.inf)):
'''
Integrate the phase space density to get the partial density.
Inputs
======
E : float
Energy in keV.
PSD : float
Phase space density s^3 m^-6. (this includes density)
m : float
Mass in kg.
Erange : tuple
2-element tuple specifying the energy range over which to
integrate.
Returns
=======
n : float
Partial density.
NOTE: This probably won't work for reletavistic particles, so I
should probably rewrite this in terms of E instead of V
'''
#firstly work out the number of spectra
if len(PSD.shape) == 1:
ns = 1
else:
ns = PSD.shape[0]
#limit E and PSD to within the energy range
if len(E.shape) == 1:
e = np.array([E]*ns)
else:
e = np.array(E)
etmp = np.nanmean(e,axis=0)
use = np.where((etmp >= Erange[0]) & (etmp <= Erange[1]))[0]
e = e[:,use]
if len(PSD.shape) == 1:
p = np.array([PSD[use]]).astype('float64')
else:
p = PSD[:,use].astype('float64')
#convert E to V
v = RelVelocity(e,m).astype('float64')
#integrate, convert to cm^-2
n = np.zeros(ns,dtype='float64')
pv2 = p*v**2
for i in range(0,ns):
use = np.where(p[i] > 0)[0]
if use.size > 1:
n[i] = 1e-6*np.trapz(pv2[i,use],v[i,use])*Omega
else:
n[i] = np.nan
return n
| [
"[email protected]"
] | |
7f0a5b1b270b7e230361b6f791d4e0774ca6ed98 | dbe7e1d9fe2457c26f83095d941e4392e7d30f8c | /django_dashboard/api/test.py | 81e1e97da6599b88239c60f04be0a535b2b26525 | [
"MIT"
] | permissive | keepexploring/smartbiogas | 51e124735ec04bc6b87a8ac75c66c83de6865001 | ca663435b05666113e3c0cb55e6f087c61497208 | refs/heads/master | 2022-12-12T10:42:37.412038 | 2018-07-18T15:29:04 | 2018-07-18T15:29:04 | 111,402,799 | 0 | 0 | MIT | 2022-12-08T00:56:54 | 2017-11-20T11:39:05 | JavaScript | UTF-8 | Python | false | false | 1,051 | py | class Container(models.Model):
pass
class ContainerItem(models.Model):
blog = models.ForeignKey('Container', related_name='items')
# For testing purposes only
class ContainerResource(ModelResource):
class Meta:
queryset = Container.objects.all()
authorization = Authorization()
class ContainerItemResource(ModelResource):
blog = tastypie.fields.ForeignKey(ContainerResource, 'blog')
class Meta:
queryset = ContainerItem.objects.all()
authorization = Authorization()
class BiogasPlants(models.Model):
pass
class PendingJobs(models.Model):
blog = models.ForeignKey('BiogasPlants', related_name='items')
# For testing purposes only
class BiogasPlantResource(ModelResource):
class Meta:
queryset = BiogasPlants.objects.all()
authorization = Authorization()
class PendingJobResource(ModelResource):
blog = tastypie.fields.ForeignKey(BiogasPlantResource, 'blog')
class Meta:
queryset = ContainerItem.objects.all()
authorization = Authorization() | [
"[email protected]"
] | |
b7243813f8bf290dd27c60441ca3c453a67e225f | 024214bff5cdd43874d30143ee90131a985c5ce3 | /vycontrol/interface/urls.py | 0c3aaab513ffb0106d0732d02cc9e8dd19f611e2 | [
"MIT"
] | permissive | KennethEhmsen/vycontrol | c20e2d793bd03005ddfd314abc62b4ca3fd292e3 | 5dbebfa1b299ad20b60131d95291ee91c3b9df5c | refs/heads/master | 2023-08-10T11:43:36.001119 | 2020-05-23T05:03:47 | 2020-05-23T05:03:47 | 267,380,513 | 1 | 0 | MIT | 2021-09-22T19:40:01 | 2020-05-27T17:13:06 | null | UTF-8 | Python | false | false | 388 | py | from django.urls import path
from . import views
app_name = 'interface'
urlpatterns = [
path('', views.index, name='interface-list'),
path('interface-show/<slug:interface_type>/<slug:interface_name>', views.interfaceshow, name='interface-show'),
path('interface-firewall/<slug:interface_type>/<slug:interface_name>', views.interfacefirewall, name='interface-firewall'),
]
| [
"[email protected]"
] | |
270166ede0ea9aa8ef6cdedfbe9dc16ea08db746 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/RADLAN-GVRP-MIB.py | c9e0c0b6e626f01b902af3235250ea18cac1799f | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 11,809 | py | #
# PySNMP MIB module RADLAN-GVRP-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/RADLAN-GVRP-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:38:27 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint", "ValueRangeConstraint")
dot1dBasePort, = mibBuilder.importSymbols("BRIDGE-MIB", "dot1dBasePort")
EnabledStatus, = mibBuilder.importSymbols("P-BRIDGE-MIB", "EnabledStatus")
rnd, = mibBuilder.importSymbols("RADLAN-MIB", "rnd")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Integer32, Gauge32, ModuleIdentity, MibIdentifier, iso, IpAddress, ObjectIdentity, TimeTicks, NotificationType, Unsigned32, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, Bits, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "Gauge32", "ModuleIdentity", "MibIdentifier", "iso", "IpAddress", "ObjectIdentity", "TimeTicks", "NotificationType", "Unsigned32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "Bits", "Counter32")
TimeInterval, DisplayString, TextualConvention, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "TimeInterval", "DisplayString", "TextualConvention", "TruthValue")
rlGvrp = ModuleIdentity((1, 3, 6, 1, 4, 1, 89, 64))
rlGvrp.setRevisions(('2007-01-02 00:00',))
if mibBuilder.loadTexts: rlGvrp.setLastUpdated('200701020000Z')
if mibBuilder.loadTexts: rlGvrp.setOrganization('Radlan - a MARVELL company. Marvell Semiconductor, Inc.')
rlPortGvrpTimersTable = MibTable((1, 3, 6, 1, 4, 1, 89, 64, 1), )
if mibBuilder.loadTexts: rlPortGvrpTimersTable.setStatus('current')
rlPortGvrpTimersEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 64, 1, 1), ).setIndexNames((0, "BRIDGE-MIB", "dot1dBasePort"))
if mibBuilder.loadTexts: rlPortGvrpTimersEntry.setStatus('current')
rlPortGvrpJoinTime = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 1, 1, 1), TimeInterval().clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlPortGvrpJoinTime.setStatus('current')
rlPortGvrpLeaveTime = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 1, 1, 2), TimeInterval().clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlPortGvrpLeaveTime.setStatus('current')
rlPortGvrpLeaveAllTime = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 1, 1, 3), TimeInterval().clone(1000)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlPortGvrpLeaveAllTime.setStatus('current')
rlPortGvrpOverrideGarp = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 1, 1, 4), EnabledStatus().clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlPortGvrpOverrideGarp.setStatus('current')
rlGvrpSupported = MibScalar((1, 3, 6, 1, 4, 1, 89, 64, 2), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlGvrpSupported.setStatus('current')
rlGvrpMibVersion = MibScalar((1, 3, 6, 1, 4, 1, 89, 64, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlGvrpMibVersion.setStatus('current')
rlPortGvrpStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 89, 64, 4), )
if mibBuilder.loadTexts: rlPortGvrpStatisticsTable.setStatus('current')
rlPortGvrpStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 64, 4, 1), ).setIndexNames((0, "BRIDGE-MIB", "dot1dBasePort"))
if mibBuilder.loadTexts: rlPortGvrpStatisticsEntry.setStatus('current')
rlPortGvrpStatisticsRJE = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 4, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpStatisticsRJE.setStatus('current')
rlPortGvrpStatisticsRJIn = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 4, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpStatisticsRJIn.setStatus('current')
rlPortGvrpStatisticsREmp = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 4, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpStatisticsREmp.setStatus('current')
rlPortGvrpStatisticsRLIn = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 4, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpStatisticsRLIn.setStatus('current')
rlPortGvrpStatisticsRLE = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 4, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpStatisticsRLE.setStatus('current')
rlPortGvrpStatisticsRLA = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 4, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpStatisticsRLA.setStatus('current')
rlPortGvrpStatisticsSJE = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 4, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpStatisticsSJE.setStatus('current')
rlPortGvrpStatisticsSJIn = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 4, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpStatisticsSJIn.setStatus('current')
rlPortGvrpStatisticsSEmp = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 4, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpStatisticsSEmp.setStatus('current')
rlPortGvrpStatisticsSLIn = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 4, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpStatisticsSLIn.setStatus('current')
rlPortGvrpStatisticsSLE = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 4, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpStatisticsSLE.setStatus('current')
rlPortGvrpStatisticsSLA = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 4, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpStatisticsSLA.setStatus('current')
rlPortGvrpStatisticsClear = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 4, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("activate", 1), ("passive", 2))).clone('passive')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlPortGvrpStatisticsClear.setStatus('current')
rlPortGvrpErrorStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 89, 64, 5), )
if mibBuilder.loadTexts: rlPortGvrpErrorStatisticsTable.setStatus('current')
rlPortGvrpErrorStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 64, 5, 1), ).setIndexNames((0, "BRIDGE-MIB", "dot1dBasePort"))
if mibBuilder.loadTexts: rlPortGvrpErrorStatisticsEntry.setStatus('current')
rlPortGvrpErrorStatisticsInvProt = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 5, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpErrorStatisticsInvProt.setStatus('current')
rlPortGvrpErrorStatisticsInvAtyp = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 5, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpErrorStatisticsInvAtyp.setStatus('current')
rlPortGvrpErrorStatisticsInvAval = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 5, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpErrorStatisticsInvAval.setStatus('current')
rlPortGvrpErrorStatisticsInvPlen = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 5, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpErrorStatisticsInvPlen.setStatus('current')
rlPortGvrpErrorStatisticsInvAlen = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 5, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpErrorStatisticsInvAlen.setStatus('current')
rlPortGvrpErrorStatisticsInvEvent = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 5, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rlPortGvrpErrorStatisticsInvEvent.setStatus('current')
rlPortGvrpErrorStatisticsClear = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 5, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("activate", 1), ("passive", 2))).clone('passive')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlPortGvrpErrorStatisticsClear.setStatus('current')
rlPortGvrpApplicantStatusTable = MibTable((1, 3, 6, 1, 4, 1, 89, 64, 6), )
if mibBuilder.loadTexts: rlPortGvrpApplicantStatusTable.setStatus('current')
rlPortGvrpApplicantStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 64, 6, 1), ).setIndexNames((0, "BRIDGE-MIB", "dot1dBasePort"))
if mibBuilder.loadTexts: rlPortGvrpApplicantStatusEntry.setStatus('current')
rlPortGvrpApplicantStatusValue = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 6, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("participant", 1), ("nonParticipant", 2))).clone('participant')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlPortGvrpApplicantStatusValue.setStatus('current')
rlPortGvrpRegistrationModeTable = MibTable((1, 3, 6, 1, 4, 1, 89, 64, 8), )
if mibBuilder.loadTexts: rlPortGvrpRegistrationModeTable.setStatus('current')
rlPortGvrpRegistrationModeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 89, 64, 8, 1), ).setIndexNames((0, "BRIDGE-MIB", "dot1dBasePort"))
if mibBuilder.loadTexts: rlPortGvrpRegistrationModeEntry.setStatus('current')
rlPortGvrpRegistrationModeForbidden = MibTableColumn((1, 3, 6, 1, 4, 1, 89, 64, 8, 1, 1), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rlPortGvrpRegistrationModeForbidden.setStatus('current')
mibBuilder.exportSymbols("RADLAN-GVRP-MIB", PYSNMP_MODULE_ID=rlGvrp, rlPortGvrpStatisticsEntry=rlPortGvrpStatisticsEntry, rlPortGvrpStatisticsSJE=rlPortGvrpStatisticsSJE, rlGvrp=rlGvrp, rlGvrpMibVersion=rlGvrpMibVersion, rlPortGvrpErrorStatisticsInvAlen=rlPortGvrpErrorStatisticsInvAlen, rlPortGvrpTimersTable=rlPortGvrpTimersTable, rlPortGvrpErrorStatisticsEntry=rlPortGvrpErrorStatisticsEntry, rlPortGvrpStatisticsSLA=rlPortGvrpStatisticsSLA, rlGvrpSupported=rlGvrpSupported, rlPortGvrpStatisticsRJE=rlPortGvrpStatisticsRJE, rlPortGvrpErrorStatisticsInvAtyp=rlPortGvrpErrorStatisticsInvAtyp, rlPortGvrpErrorStatisticsClear=rlPortGvrpErrorStatisticsClear, rlPortGvrpStatisticsSJIn=rlPortGvrpStatisticsSJIn, rlPortGvrpLeaveAllTime=rlPortGvrpLeaveAllTime, rlPortGvrpRegistrationModeEntry=rlPortGvrpRegistrationModeEntry, rlPortGvrpStatisticsRJIn=rlPortGvrpStatisticsRJIn, rlPortGvrpOverrideGarp=rlPortGvrpOverrideGarp, rlPortGvrpStatisticsRLA=rlPortGvrpStatisticsRLA, rlPortGvrpApplicantStatusTable=rlPortGvrpApplicantStatusTable, rlPortGvrpStatisticsSEmp=rlPortGvrpStatisticsSEmp, rlPortGvrpStatisticsClear=rlPortGvrpStatisticsClear, rlPortGvrpStatisticsRLE=rlPortGvrpStatisticsRLE, rlPortGvrpStatisticsSLE=rlPortGvrpStatisticsSLE, rlPortGvrpApplicantStatusEntry=rlPortGvrpApplicantStatusEntry, rlPortGvrpStatisticsTable=rlPortGvrpStatisticsTable, rlPortGvrpErrorStatisticsInvEvent=rlPortGvrpErrorStatisticsInvEvent, rlPortGvrpErrorStatisticsInvProt=rlPortGvrpErrorStatisticsInvProt, rlPortGvrpStatisticsRLIn=rlPortGvrpStatisticsRLIn, rlPortGvrpRegistrationModeForbidden=rlPortGvrpRegistrationModeForbidden, rlPortGvrpTimersEntry=rlPortGvrpTimersEntry, rlPortGvrpLeaveTime=rlPortGvrpLeaveTime, rlPortGvrpErrorStatisticsInvAval=rlPortGvrpErrorStatisticsInvAval, rlPortGvrpJoinTime=rlPortGvrpJoinTime, rlPortGvrpStatisticsSLIn=rlPortGvrpStatisticsSLIn, rlPortGvrpApplicantStatusValue=rlPortGvrpApplicantStatusValue, rlPortGvrpStatisticsREmp=rlPortGvrpStatisticsREmp, rlPortGvrpErrorStatisticsTable=rlPortGvrpErrorStatisticsTable, rlPortGvrpRegistrationModeTable=rlPortGvrpRegistrationModeTable, rlPortGvrpErrorStatisticsInvPlen=rlPortGvrpErrorStatisticsInvPlen)
| [
"[email protected]"
] | |
3a9e4f92085e755aa3e85445bf48abb902e2c0a1 | df560dde5ffbae51187041f422c87f7d1544cbe9 | /leetcode/python/841_keys_and_rooms.py | f8f42bb5c92c2b5c43d5bc8fa4770db223f3a8f4 | [
"MIT"
] | permissive | VVKot/coding-competitions | 61c97dbc4fdaeb0a35ff7fa8e55529b579fd1ebb | 7d6e599b223d89a7861929190be715d3b3604fa4 | refs/heads/master | 2021-07-04T17:47:34.246535 | 2020-09-23T20:28:30 | 2020-09-23T20:28:30 | 174,696,391 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 414 | py | class Solution:
def canVisitAllRooms(self, rooms):
stack = [0]
visited = set(stack)
while stack:
curr = stack.pop()
for room in rooms[curr]:
if room not in visited:
stack.append(room)
visited.add(room)
if len(visited) == len(rooms): return True
return len(visited) == len(rooms) | [
"[email protected]"
] | |
98d67adeb2aa151c263faa61a4956cb8f620a667 | fa76cf45d7bf4ed533e5a776ecd52cea15da8c90 | /robocorp-python-ls-core/src/robocorp_ls_core/libs/robocop_lib/robocop/exceptions.py | a5d30bd12b3f4d63a76dca5367602c808aafd756 | [
"Apache-2.0"
] | permissive | martinRenou/robotframework-lsp | 8a5d63b7cc7d320c9fed2372a79c8c6772d6481e | 5f23b7374139e83d0aa1ebd30675e762d7a0db86 | refs/heads/master | 2023-08-18T22:26:01.386975 | 2021-10-25T13:46:11 | 2021-10-25T13:46:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,180 | py | class RobocopFatalError(ValueError):
pass
class ConfigGeneralError(RobocopFatalError):
pass
class DuplicatedRuleError(RobocopFatalError):
def __init__(self, rule_type, rule, checker, checker_prev):
msg = (
f"Fatal error: Message {rule_type} '{rule}' defined in {checker.__class__.__name__} "
f"was already defined in {checker_prev.__class__.__name__}"
)
super().__init__(msg)
class InvalidRuleSeverityError(RobocopFatalError):
def __init__(self, rule, severity_val):
msg = f"Fatal error: Tried to configure message {rule} with invalid severity: {severity_val}"
super().__init__(msg)
class InvalidRuleBodyError(RobocopFatalError):
def __init__(self, rule_id, rule_body):
msg = f"Fatal error: Rule '{rule_id}' has invalid body:\n{rule_body}"
super().__init__(msg)
class InvalidRuleConfigurableError(RobocopFatalError):
def __init__(self, rule_id, rule_body):
msg = f"Fatal error: Rule '{rule_id}' has invalid configurable:\n{rule_body}"
super().__init__(msg)
class InvalidRuleUsageError(RobocopFatalError):
def __init__(self, rule_id, type_error):
msg = f"Fatal error: Rule '{rule_id}' failed to prepare message description with error: {type_error}"
super().__init__(msg)
class InvalidExternalCheckerError(RobocopFatalError):
def __init__(self, path):
msg = f'Fatal error: Failed to load external rules from file "{path}". Verify if the file exists'
super().__init__(msg)
class FileError(RobocopFatalError):
def __init__(self, source):
msg = f'File "{source}" does not exist'
super().__init__(msg)
class ArgumentFileNotFoundError(RobocopFatalError):
def __init__(self, source):
msg = f'Argument file "{source}" does not exist'
super().__init__(msg)
class NestedArgumentFileError(RobocopFatalError):
def __init__(self, source):
msg = f'Nested argument file in "{source}"'
super().__init__(msg)
class InvalidArgumentError(RobocopFatalError):
def __init__(self, msg):
super().__init__(f"Invalid configuration for Robocop:\n{msg}")
| [
"[email protected]"
] | |
4a6c127e11f77fef064cd6c080ef8f541b4ba12b | 10ec999d273b9a75b3cc5c3e353d611ea7f163b6 | /grappa/operators/attributes.py | 7748bfdcbeb82c4d1d8d8cbe054f6887bb88da79 | [
"MIT"
] | permissive | jcassee/grappa | 721bec7ee2d21fc3fc857f77b9ff6d58b941bfd9 | b128da8aef67501c310701c47508e7318241aa8b | refs/heads/master | 2020-05-05T02:24:30.106837 | 2018-10-02T10:29:16 | 2018-10-02T10:29:16 | 179,636,623 | 0 | 0 | MIT | 2019-04-05T07:24:31 | 2019-04-05T07:24:31 | null | UTF-8 | Python | false | false | 690 | py | # -*- coding: utf-8 -*-
from ..decorators import attribute
@attribute(
operators=(
'to', 'has', 'have', 'satisfy', 'that', 'that_is',
'satisfies', 'include', 'do', '_is', 'which', 'which_is'
)
)
def be(ctx):
"""
Semantic attributes providing chainable declarative DSL
for assertions.
"""
ctx.negate = False
@attribute(operators=(
'not_to', 'to_not', 'does_not', 'do_not', '_not', 'not_satisfy',
'not_have', 'not_has', 'have_not', 'has_not', 'dont', 'is_not',
'which_not', 'that_not'
))
def not_be(ctx):
"""
Semantic negation attributes providing chainable declarative DSL
for assertions.
"""
ctx.negate = True
| [
"[email protected]"
] | |
af2208aab51a655605a7d800189d0ced90fe2709 | 34d88082307281333ef4aeeec012a3ff5f8ec06e | /Runood_100/R048.py | f1206ba36ee2dabd3566bd2a2b35e2a30c35bb34 | [] | no_license | JKChang2015/Python | a6f8b56fa3f9943682470ae57e5ad3266feb47a7 | adf3173263418aee5d32f96b9ea3bf416c43cc7b | refs/heads/master | 2022-12-12T12:24:48.682712 | 2021-07-30T22:27:41 | 2021-07-30T22:27:41 | 80,747,432 | 1 | 8 | null | 2022-12-08T04:32:06 | 2017-02-02T17:05:19 | HTML | UTF-8 | Python | false | false | 101 | py | # -*- coding: UTF-8 -*-
# R048
# Created by JKChang
# Fri, 12/05/2017, 11:30
# Tag:
# Description:
| [
"[email protected]"
] | |
2ce6302758b39eeb9593bfed87110360f9bea9b6 | 6d54a7b26d0eb82152a549a6a9dfde656687752c | /scripts/tools/memory/memdf/__init__.py | fa0e1070112ae4544ccf65b7e5fe3cdafc2d8467 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | project-chip/connectedhomeip | 81a123d675cf527773f70047d1ed1c43be5ffe6d | ea3970a7f11cd227ac55917edaa835a2a9bc4fc8 | refs/heads/master | 2023-09-01T11:43:37.546040 | 2023-09-01T08:01:32 | 2023-09-01T08:01:32 | 244,694,174 | 6,409 | 1,789 | Apache-2.0 | 2023-09-14T20:56:31 | 2020-03-03T17:05:10 | C++ | UTF-8 | Python | false | false | 886 | py | #
# Copyright (c) 2021 Project CHIP Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Package for working with memory usage information using Pandas DataFrame."""
from memdf.df import DF, DFs, ExtentDF, SectionDF, SegmentDF, SymbolDF
from memdf.util.config import Config, ConfigDescription
__all__ = [DF, SymbolDF, SectionDF, SegmentDF, ExtentDF, DFs, Config, ConfigDescription]
| [
"[email protected]"
] | |
dd4e5116fd1f9eecb02b439401c88eaef08b236e | 603a54e14440fbe1c8aaab34c54aedff83489ca5 | /violate_ratio/without_batching/edge_serving_resnet/server.py | 28b9ec0e0aff9025b90bc2aaecb24a34f9b54f54 | [
"MIT"
] | permissive | fkh12345/ICE | 211fdbc62211e6df6c3d90b6ff309bc8c854e01f | f6ed4933c85b541fb971f5e1bfaa814e4e613c32 | refs/heads/main | 2023-04-12T02:05:57.478650 | 2023-03-01T18:08:55 | 2023-03-01T18:08:55 | 500,040,223 | 51 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,093 | py |
import grpc
import time
from concurrent import futures
import inference_pb2 as data_pb2
import inference_pb2_grpc as data_pb2_grpc
from serving import Serving
import io
import torch
import torch.nn as nn
import numpy as np
import torchvision.models as models
import argparse
from dnn_model.lapsrn import Net
from dnn_model.dfcnn import DFCNN
from dnn_model.yolo import Yolov3
#from runtime import change_waiting_queue
import numpy as np
parser = argparse.ArgumentParser()
parser.add_argument('--bs', type=int, default=64)
parser.add_argument('--qos', type=float, default=0.05)
parser.add_argument('--worker', type=int, default=0)
args = parser.parse_args()
model = models.resnet50()
#model = DFCNN(1000, 200)
#model = Yolov3()
model.set_profile(True)
model.set_input([0, 109])
model.set_profile(False)
model_input = model.get_input()
device = 'cuda:{}'.format(args.worker)
no_batching_server = Serving(model, device)
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
_HOST = '0.0.0.0'
_PORT = '808{}'.format(args.worker)
class FormatData(data_pb2_grpc.FormatDataServicer):
def DoFormat(self, request, context):
start1 = time.time()
str = request.text
start = request.start
end = request.end
#buffer = io.BytesIO(str)
#buffer.seek(0)
#input = torch.load(buffer)
index1 = model.push_data(start, str, [0, 109], "cpu")
launch = start1
index = no_batching_server.push_index(input, start, end, index1)
out, duration = no_batching_server.get_result(index)
#print(out.shape)
out_time = time.time()
return data_pb2.actionresponse(text=out_time - start1, queue=duration)
def serve():
grpcServer = grpc.server(futures.ThreadPoolExecutor(max_workers=5000))
data_pb2_grpc.add_FormatDataServicer_to_server(FormatData(), grpcServer)
grpcServer.add_insecure_port(_HOST + ':' + _PORT)
grpcServer.start()
try:
no_batching_server()
except KeyboardInterrupt:
grpcServer.stop(0)
if __name__ == '__main__':
serve() | [
"[email protected]"
] | |
647c137facd838ac6488a092188e30f81a6926b1 | 82aace1431e0af949b1294d979a16f8dc18f48c2 | /Python-Web-Basics-Softuni/recipes/venv/Scripts/django-admin.py | a220eda67fd73b94ee6387a63bff85b7b9484757 | [
"MIT"
] | permissive | borisboychev/SoftUni | 6778450417f889f8e89c709897b9e26c7129dbf6 | 22062312f08e29a1d85377a6d41ef74966d37e99 | refs/heads/master | 2023-03-27T13:11:17.378197 | 2021-03-26T09:14:25 | 2021-03-26T09:14:25 | 295,463,442 | 1 | 0 | null | 2020-10-12T12:54:13 | 2020-09-14T15:46:12 | Python | UTF-8 | Python | false | false | 717 | py | #!C:\Users\boris\SoftUni\Python-Web-Basics-Softuni\recipes\venv\Scripts\python.exe
# When the django-admin.py deprecation ends, remove this script.
import warnings
from django.core import management
try:
from django.utils.deprecation import RemovedInDjango40Warning
except ImportError:
raise ImportError(
'django-admin.py was deprecated in Django 3.1 and removed in Django '
'4.0. Please manually remove this script from your virtual environment '
'and use django-admin instead.'
)
if __name__ == "__main__":
warnings.warn(
'django-admin.py is deprecated in favor of django-admin.',
RemovedInDjango40Warning,
)
management.execute_from_command_line()
| [
"[email protected]"
] | |
933bcaa20d4e347f811ae71388ddb4c071ba5196 | a796865c5ff4dcb7c6a0c848364bd6f7cb3d7a29 | /tests/test_newsgroup20.py | 108161cf2027bfb2e2fbd7932f31709c2db1dccc | [
"Apache-2.0"
] | permissive | yk/chazutsu | d625f6f7f682d713910ce59953841e507ad27262 | ecc42c9ff0f8d47632ba4b4c7385a5fdf4386c10 | refs/heads/master | 2020-03-18T19:43:40.016170 | 2018-05-28T14:52:00 | 2018-05-28T14:52:00 | 135,173,517 | 0 | 0 | Apache-2.0 | 2018-05-28T14:41:16 | 2018-05-28T14:41:16 | null | UTF-8 | Python | false | false | 1,716 | py | import os
import sys
import shutil
import unittest
import requests
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
import chazutsu.datasets
DATA_ROOT = os.path.join(os.path.dirname(__file__), "data")
class TestNewsGroup20(unittest.TestCase):
def test_extract(self):
r = chazutsu.datasets.NewsGroup20().download(directory=DATA_ROOT, test_size=0)
try:
with open(r.data_file_path, encoding="utf-8") as f:
for ln in f:
els = ln.split("\t")
if len(els) != 5:
print(els)
print(len(els))
raise Exception("data file is not constructed by label and text.")
except Exception as ex:
if os.path.isfile(r.data_file_path):
os.remove(r.data_file_path)
self.fail(ex)
self.assertTrue(len(r.data().columns), 5)
if os.path.isfile(r.data_file_path):
os.remove(r.data_file_path)
shutil.rmtree(r.root)
def test_parse(self):
d = chazutsu.datasets.NewsGroup20()
subject, author, text = d.parse(raw_text=sample_text)
self.assertEqual(subject, "Re: Political Atheists?")
self.assertEqual(author, "Keith Allan Schneider")
self.assertTrue(text.startswith("If I"))
sample_text = """
From: [email protected] (Keith Allan Schneider)
Subject: Re: Political Atheists?
[email protected] (Robert Beauchaine) writes:
>>If I kill this person [an innocent person convicted of murder],
>>then a murder would be committed, but I would not be the murderer. At least,
"""
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
a6cdc1e51da55bb4298536a804dbb7eb2605ffe4 | 1cec8afab42c7ed104d414089e30c647b1c79974 | /bubble_sort/main.py | e580cdb2e530946fa2fc46e6f50df54592bcdd83 | [] | no_license | yerassyl94/problems | ea84572aa89178a30fb030a16b2e4e23df3529b9 | c5f3b391c2d94445d96726aaa8572580c4c662bc | refs/heads/master | 2020-08-27T23:57:44.672878 | 2019-11-11T11:35:55 | 2019-11-11T11:35:55 | 217,526,514 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 259 | py | def bubble_sort_list(_list: list):
length = len(_list) - 1
for x in range(length):
for y in range(length-x):
if _list[y] > _list[y+1]:
_list[y],_list[y+1]=_list[y+1],_list[y]
return _list
list_=[32,5,3,6,7,54,87]
print(bubble_sort_list(list_))
| [
"[email protected]"
] | |
83d7fc9253ee2cb0396a0315ed154dabcc0928ab | d3efc82dfa61fb82e47c82d52c838b38b076084c | /Autocase_Result/KCB_MM/YW_KCB_HASPRICELIMIT_GPMM_SHSJ_WDZC_129.py | d6747c9ad8bfc225ae04ed5405c0247a043c057c | [] | no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,273 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test//xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test//service")
from ServiceConfig import *
from mainService import *
from QueryStkPriceQty import *
from log import *
sys.path.append("/home/yhl2/workspace/xtp_test//mysql")
from CaseParmInsertMysql import *
sys.path.append("/home/yhl2/workspace/xtp_test//utils")
from QueryOrderErrorMsg import queryOrderErrorMsg
class YW_KCB_HASPRICELIMIT_GPMM_SHSJ_WDZC_129(xtp_test_case):
def setUp(self):
pass
# YW_KCB_HASPRICELIMIT_GPMM_SHSJ_WDZC_129
def test_YW_KCB_HASPRICELIMIT_GPMM_SHSJ_WDZC_129(self):
title = '交易日五档即成转撤销卖-非最后一次卖:余额不足200股部分'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '废单',
'errorID': 11010111,
'errorMSG': queryOrderErrorMsg(11010111),
'是否生成报单': '是',
'是否是撤废': '否',
# '是否是新股申购': '',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('688000', '1', '4', '2', '0', 'S', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
print(stkparm['错误原因'])
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':2,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_CANCEL'],
'price': stkparm['涨停价'],
'quantity': 99,
'position_effect':Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT']
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = serviceTest(Api, case_goal, wt_reqs)
logger.warning('执行结果为' + str(rs['用例测试结果']) + ','
+ str(rs['用例错误源']) + ',' + str(rs['用例错误原因']))
self.assertEqual(rs['用例测试结果'], True) # 201
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
c82a3e03bf55d608e3c42b98c26484346d260228 | 7f8082a1a88ad6895d1e4b5cf9f3952fe8951ec3 | /miniProject/bookcoversearch/search.py | 1ded489e624618a5a884f2ce23c3e41bfed477d3 | [
"MIT"
] | permissive | matiji66/opencv_study | a33e3fed3e320598d8b3612b387b63658539176d | 9b6354907609c9841915f6300ee5915a9d80906f | refs/heads/master | 2020-03-11T03:34:28.198825 | 2018-01-15T14:04:08 | 2018-01-15T14:04:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,574 | py | from __future__ import print_function
from pyimagesearch.coverdecriptor import CoverDescriptor
from pyimagesearch.covermatcher import CoverMatcher
import argparse
import glob
import csv
import cv2
ap=argparse.ArgumentParser()
ap.add_argument('-d', '--db', required=True,
help="path to the book database")
ap.add_argument('-c', '--covers', required=True,
help="path to the directory that contains the book covers")
ap.add_argument('-q', '--query', required=True,
help='path to the query book cover')
ap.add_argument('-s', '--sift',type=int, default=0,
help="whether or not SIFT should be used")
args=vars(ap.parse_args())
db={}
for l in csv.reader(open(args["db"])):
db[l[0]]=l[1:]
useSIFT=args['sift']>0
useHamming=args['sift']==0
ratio=0.7
minMatches=40
if useSIFT:
minMatches=50
cd=CoverDescriptor(useSIFT=useSIFT)
cv=CoverMatcher(cd, glob.glob(args['covers']+"/*.png"),
ratio=ratio, minMatches=minMatches, userHamming=useHamming)
queryImage=cv2.imread(args['query'])
gray=cv2.cvtColor(queryIzmage,cv2.COLOR_BGR2GRAY)
(queryKps, queryDescs)=cd.describe(gray)
results=cv.search(queryKps, queryDescs)
cv2.imshow("Query", queryImage)
if len(results)==0:
print("I could not find a match for that cover")
cv2.waitKey(0)
else:
for (i (score, coverPath)) in enumerate(results):
(author, title) =db[coverPath[coverPath.rfind("/")+1:]]
print("{}. {:.2f}% : {} - {}".format(i + 1, score * 100,
author, title))
result = cv2.imread(coverPath)
cv2.imshow("Result", result)
cv2.waitKey(0)
| [
"[email protected]"
] | |
a757418f6ddf8e7675ef47658b0381be8f8ab2c4 | 4a1b61cf551db7843050cc7080cec6fd60c4f8cc | /2020/백준문제/IM대비/14696_딱지놀이.py | b574a887a21deac01c3a3e0c4ea705dd1508157c | [] | no_license | phoenix9373/Algorithm | 4551692027ca60e714437fd3b0c86462f635d8ff | c66fd70e14bb8357318e8b8f386d2e968f0c4d98 | refs/heads/master | 2023-08-24T10:01:20.798430 | 2021-10-15T07:57:36 | 2021-10-15T07:57:36 | 288,092,774 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 453 | py | # 별4 > 동그라미3 > 네모2 > 세모1 / 무승부
# 무승부: D
N = int(input()) # 1~1000
for _ in range(N):
a = list(map(int, input().split()))[1:]
b = list(map(int, input().split()))[1:]
for i in range(4, 0, -1):
a_cnt = a.count(i)
b_cnt = b.count(i)
if a_cnt > b_cnt:
print('A')
break
elif b_cnt > a_cnt:
print('B')
break
else:
print('D') | [
"[email protected]"
] | |
e2f95965a7e29da94724dd80ff83e894d2a77927 | a875495c32524d1f7b05f07337f54df52de2aad0 | /algo/clustering.py | 6bc5b199bbb559d716d424618a3629e094646459 | [
"Apache-2.0"
] | permissive | adrianopls/GRIPy-X | a43188e1ffda37f9f668133f47c3e90589325753 | 21c7fa1f32f8dbb0a5dff93c2bac5acf1f9181ca | refs/heads/master | 2021-07-01T16:57:16.094069 | 2021-03-09T19:29:23 | 2021-03-09T19:29:23 | 218,369,555 | 1 | 1 | Apache-2.0 | 2019-10-30T18:13:12 | 2019-10-29T19:41:01 | Python | UTF-8 | Python | false | false | 2,962 | py | # -*- coding: utf-8 -*-
import numpy as np
from sklearn.cluster import KMeans
from sklearn.mixture import GMM
from sklearn.preprocessing import scale
from sklearn import metrics
def locate_nans(data):
return np.sum(np.isnan(data), axis=1, dtype=bool)
def reorder_clusters(clusters, centers, covars=None):
nc = centers.shape[0]
nf = centers.shape[1]
if covars is None:
covars = np.empty((nc, nf, nf))
for i in range(nc):
covars[i] = np.eye(nf)
d2 = np.empty(nc)
for i in range(nc):
d2[i] = np.dot(np.dot(centers[i], covars[i]), centers[i].T)
argsort = np.argsort(d2)
new_clusters = np.empty_like(clusters)
for i in range(nc):
new_clusters[clusters == argsort[i]] = i
return new_clusters, argsort
def k_means(data, nc, req_info=None):
means = np.mean(data, axis=0)
stds = np.std(data, axis=0)
sdata = (data - means)/stds
km = KMeans(init='k-means++', n_clusters=nc, n_init=10)
km.fit(sdata)
if req_info == 'all':
req_info = ['silhouette', 'inertia', 'centers']
elif req_info is None:
req_info = []
info = {}
if 'silhouette' in req_info:
info['silhouette'] = metrics.silhouette_score(data, km.labels_)
if 'inertia' in req_info:
info['inertia'] = km.inertia_
if 'centers' in req_info:
info['centers'] = km.cluster_centers_*stds + means
return km.labels_, info
def expectation_maximization(data, nc, cv_type='full', req_info=None):
gmm = GMM(n_components=nc, covariance_type=cv_type, thresh=1.0E-4, n_init=10)
gmm.fit(data)
labels = gmm.predict(data)
if req_info == 'all':
req_info = ['aic', 'bic', 'converged', 'weights', 'means', 'covars',
'silhouette', 'proba']
elif req_info is None:
req_info = []
info = {}
if 'aic' in req_info:
info['aic'] = gmm.aic(data)
if 'bic' in req_info:
info['bic'] = gmm.bic(data)
if 'converged' in req_info:
info['converged'] = gmm.converged_
if 'weights' in req_info:
info['weights'] = gmm.weights_
if 'means' in req_info:
info['means'] = gmm.means_
if 'covars' in req_info:
if cv_type == 'full':
info['covars'] = gmm.covars_
elif cv_type == 'tied':
cov = np.empty((nc, gmm.covars_.shape[0], gmm.covars_.shape[1]))
for i in range(nc):
cov[i] = gmm.covars_.copy()
info['covars'] = cov
else:
cov = np.empty((nc, gmm.covars_.shape[0], gmm.covars_.shape[1]))
for i in range(nc):
cov[i] = np.diag(gmm.covars_[i])
info['covars'] = cov
if 'silhouette' in req_info:
info['silhouette'] = metrics.silhouette_score(data, labels)
if 'proba' in req_info:
info['proba'] = gmm.predict_proba(data).T
return labels, info
| [
"[email protected]"
] | |
12d3fed73e37e5e9e59556cf369f0593ca6170b4 | 5cd4ddfb9a929337fc3c48ee3af6cf973fd6c539 | /eSSP/__init__.py | f8effba76e1a17a61c1ed7cbcdf2a8599d6218e3 | [] | no_license | smontoya/eSSP_P3 | c77724e4c19b5b57ce2e11cc0e0b3a8cb91704be | a1733e1d6637f12354b18af0934bcdea7c56117b | refs/heads/master | 2021-01-13T13:14:52.913414 | 2016-11-03T04:23:58 | 2016-11-03T04:23:58 | 72,707,679 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22 | py | from eSSP import eSSP
| [
"[email protected]"
] | |
fd7d229621c66ffd6a7ebdf6880af5d34b8f6d9a | bf10231754c95afd40d15b859f31b6157e1d991d | /Conversores a flotante.py | 2372562d67d2de753418adaf194ee28d043af7dd | [] | no_license | smith-sanchez/validadores_en_python | de7849b0f6dde93fff6c8a2db53070f60d101688 | 8dab14429c1e2c5345cddeabff5e10269b6dc6e9 | refs/heads/master | 2020-09-06T22:10:59.153382 | 2019-11-09T01:12:27 | 2019-11-09T01:12:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 828 | py | #Convertir el entero 43 a flotante
x=43
a=float(x)
print(a, type(a))
#Convertir el entero 1453 a flotante
x=1453
b=float(x)
print(b, type(b))
#Convertir el entero 4685 a flotante
x=4685
y=float(x)
print(y, type(y))
#convertir la cadena 1128 a flotante
x="1128"
c=float(x)
print(c, type(c))
#Convertir la cadena -30 a flotante
x="-30"
d=float(x)
print(d, type(d))
#Convertir el booleano 50!=61 a flotante
x=(50!=61)
e=float(x)
print(e, type(e))
#Convertir el booleano 90>91 a flotante
x=(90>91)
g=float(x)
print(g, type(g))
#Convertir el booleano 500>=500 a flotante
x=(500>=500)
h=float(x)
print(h, type(h))
#Convertir el flotante 346.89 a flotante
x=346.89
j=float(x)
print(j, type(j))
#Convertir el flotante 0.68 a flotante
x=0.68
n=float(x)
print(n, type(n))
| [
"[email protected]"
] | |
780352d72181bf550978615068022bcd6a8c8189 | 643c1132ac737dcaa68b887ee77155883a762329 | /static_frame/core/index_auto.py | 034e2af0da3c4bf524906972bdb89548aec3fffb | [
"MIT"
] | permissive | flexatone/static-frame | baa071ce61143baabba9b6cd6bae453757ce861b | b41ff788c651ee9fe7006a2404a615c09034a3b6 | refs/heads/master | 2020-08-28T05:06:46.000715 | 2019-10-24T16:34:16 | 2019-10-24T16:34:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,403 | py |
import typing as tp
# from static_frame.core.index_base import IndexBase
from static_frame.core.index import Index
# from static_frame.core.index_hierarchy import IndexHierarchy
from static_frame.core.index import IndexGO
from static_frame.core.util import IndexConstructor
from static_frame.core.util import DTYPE_INT_DEFAULT
IndexAutoInitializer = int
# could create trival subclasses for these indices, but the type would would not always describe the instance; for example, an IndexAutoGO could grow inot non-contiguous integer index, as loc_is_iloc is reevaluated with each append can simply go to false.
#
# class IndexAuto(Index):
# pass
# class IndexAutoGO(IndexGO):
# pass
class IndexAutoFactory:
# @classmethod
# def from_is_static(cls,
# initializer: IndexAutoInitializer,
# *,
# is_static: bool,
# ) -> tp.Union[Index, IndexGO]:
# '''
# Args:
# initializer: An integer, or a sizable iterable.
# is_static: Boolean if this should be a static (not grow-only) index.
# '''
# labels = range(initializer)
# constructor = Index if is_static else IndexGO
# return constructor(
# labels=labels,
# loc_is_iloc=True,
# dtype=DTYPE_INT_DEFAULT
# )
# @classmethod
# def from_constructor(cls,
# initializer: IndexAutoInitializer,
# *,
# constructor: IndexConstructor,
# ) -> tp.Union[Index, IndexHierarchy]:
# labels = range(initializer)
# return constructor(labels)
@classmethod
def from_optional_constructor(cls,
initializer: IndexAutoInitializer,
*,
default_constructor: IndexConstructor,
explicit_constructor: tp.Optional[IndexConstructor] = None,
) -> tp.Union[Index, IndexGO]:
labels = range(initializer)
if explicit_constructor:
return explicit_constructor(labels)
else: # get from default constructor
constructor = Index if default_constructor.STATIC else IndexGO
return constructor(
labels=labels,
loc_is_iloc=True,
dtype=DTYPE_INT_DEFAULT
)
IndexAutoFactoryType = tp.Type[IndexAutoFactory]
| [
"[email protected]"
] | |
880d01f91b79c5bae80d1fbcc5492413031f7d76 | 5f86944bdf1b810a84c63adc6ed01bbb48d2c59a | /kubernetes/test/test_v1_service_status.py | 1ce1ef5f3c6c9bd4502827d24bfdea8989ba9849 | [
"Apache-2.0"
] | permissive | m4ttshaw/client-python | 384c721ba57b7ccc824d5eca25834d0288b211e2 | 4eac56a8b65d56eb23d738ceb90d3afb6dbd96c1 | refs/heads/master | 2021-01-13T06:05:51.564765 | 2017-06-21T08:31:03 | 2017-06-21T08:31:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 859 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_service_status import V1ServiceStatus
class TestV1ServiceStatus(unittest.TestCase):
""" V1ServiceStatus unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1ServiceStatus(self):
"""
Test V1ServiceStatus
"""
model = kubernetes.client.models.v1_service_status.V1ServiceStatus()
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
6c6b92be69dae51b2f5d2845518c3defea495abe | 6adc166c33bdada82f7509301e25d1c451852d24 | /log/log_config.py | 3fb9c0d9f12ce15977115f4fc21f827713b9fd2f | [] | no_license | liushiwen555/unified_management_platform_backend | ab0a92f38be7f68bc1d3c4570560ea89bf8fcb07 | ae1ade20044b59de1e29288fcd61ba0b71d92be3 | refs/heads/master | 2023-06-25T22:45:24.669052 | 2021-07-27T01:08:24 | 2021-07-27T01:08:24 | 389,808,950 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,172 | py | from log.log_content import log_config
"""
LOG_CONFIG_DICT结构为:
LOG_CONFIG_DICT{
URL_NAME:{
HTTP_METHOD: 生成log的类
}
}
"""
LOG_CONFIG_DICT = log_config.get_config()
class ModelLog(object):
def __init__(self):
self.conf_dict = LOG_CONFIG_DICT
def log(self, request, request_body, result, response, *args, **kwargs):
try:
# 获取url对应的name
url_name = request.resolver_match.url_name
method = request.method
log_generator = self.conf_dict[url_name][method]
log_generator_instance = log_generator(
request, request_body, result, response, *args, **kwargs)
log_generator_instance.generate_log()
except Exception as e:
# print(e)
pass
# url_name = request.resolver_match.url_name
# method = request.method
# log_generator = self.conf_dict[url_name][method]
# log_generator_instance = log_generator(
# request, request_body, result, response, *args, **kwargs)
# log_generator_instance.generate_log() | [
"[email protected]"
] | |
2630abbdd53c1fc1ab734fcfcc1a981f0a351e73 | 8bbe2351bbd157a46ccf8530cde4e4cc7b0bd3b7 | /trashed_20170508/evan/Entropy.py | ed75ce66a6a3b227a188a9ff686da63efd021400 | [] | no_license | airuibel/py_code | 8dc98d71e79a4c0f785ad5cf81b2ca2073061ebf | 1da9a9dcd37475dd14bab6ae58bca1e2dff4c251 | refs/heads/master | 2020-06-18T03:47:43.754204 | 2018-03-20T09:31:00 | 2018-03-20T09:31:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,366 | py | from __future__ import division
__author__ = 'Victor Ruiz, [email protected]'
import pandas as pd
import numpy as np
from math import log
import random
def entropy(data_classes, base=2):
'''
Computes the entropy of a set of labels (class instantiations)
:param base: logarithm base for computation
:param data_classes: Series with labels of examples in a dataset
:return: value of entropy
'''
if not isinstance(data_classes, pd.core.series.Series):
raise AttributeError('input array should be a pandas series')
classes = data_classes.unique()
N = len(data_classes)
ent = 0 # initialize entropy
# iterate over classes
for c in classes:
partition = data_classes[data_classes == c] # data with class = c
proportion = len(partition) / N
#update entropy
ent -= proportion * log(proportion, base)
return ent
def cut_point_information_gain(dataset, cut_point, feature_label, class_label):
'''
Return de information gain obtained by splitting a numeric attribute in two according to cut_point
:param dataset: pandas dataframe with a column for attribute values and a column for class
:param cut_point: threshold at which to partition the numeric attribute
:param feature_label: column label of the numeric attribute values in data
:param class_label: column label of the array of instance classes
:return: information gain of partition obtained by threshold cut_point
'''
if not isinstance(dataset, pd.core.frame.DataFrame):
raise AttributeError('input dataset should be a pandas data frame')
entropy_full = entropy(dataset[class_label]) # compute entropy of full dataset (w/o split)
#split data at cut_point
data_left = dataset[dataset[feature_label] <= cut_point]
data_right = dataset[dataset[feature_label] > cut_point]
(N, N_left, N_right) = (len(dataset), len(data_left), len(data_right))
gain = entropy_full - (N_left / N) * entropy(data_left[class_label]) - \
(N_right / N) * entropy(data_right[class_label])
return gain
def datset_infomation_gain(datset,class_label='y'):
'''
Return information gain of attributes
'''
if not isinstance(dataset, pd.core.frame.DataFrame):
raise AttributeError('input dataset should be a pandas data frame')
| [
"l"
] | l |
f1ed1e674e834203088f1ed3f035050f5a400868 | 6aa7e203f278b9d1fd01244e740d5c944cc7c3d3 | /docs/build | 257c7678eb2f52275843d2e5a3c2f2434f7b1bcb | [
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"Python-2.0"
] | permissive | laserpedro/airflow | 83fc991d91749550b151c81876d9e7864bff3946 | a28afa8172489e41ecf7c381674a0cb91de850ff | refs/heads/master | 2023-01-02T04:55:34.030935 | 2020-10-24T15:55:11 | 2020-10-24T15:55:11 | 285,867,990 | 1 | 0 | Apache-2.0 | 2020-08-07T15:56:49 | 2020-08-07T15:56:49 | null | UTF-8 | Python | false | false | 19,712 | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import ast
import atexit
import os
import re
import shlex
import shutil
import sys
from contextlib import suppress
from functools import total_ordering
from glob import glob
from itertools import chain
from subprocess import run
from tempfile import NamedTemporaryFile
from typing import Iterable, List, NamedTuple, Optional, Set
if __name__ != "__main__":
raise Exception(
"This file is intended to be executed as an executable program. You cannot use it as a module."
"To run this script, run the ./build command"
)
@total_ordering
class DocBuildError(NamedTuple):
file_path: Optional[str]
line_no: Optional[int]
message: str
def __eq__(self, other):
left = (self.file_path, self.line_no, self.message)
right = (other.file_path, other.line_no, other.message)
return left == right
def __ne__(self, other):
return not (self == other)
def __lt__(self, other):
file_path_a = self.file_path or ''
file_path_b = other.file_path or ''
line_no_a = self.line_no or 0
line_no_b = other.line_no or 0
return (file_path_a, line_no_a, self.message) < (file_path_b, line_no_b, other.message)
build_errors: List[DocBuildError] = []
os.chdir(os.path.dirname(os.path.abspath(__file__)))
ROOT_PROJECT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
ROOT_PACKAGE_DIR = os.path.join(ROOT_PROJECT_DIR, "airflow")
def clean_files() -> None:
print("Removing content of the _build and _api folders")
with suppress(FileNotFoundError):
for filename in glob("_build/*"):
shutil.rmtree(f"_build/{filename}")
with suppress(FileNotFoundError):
for filename in glob("_api/*"):
shutil.rmtree(f"_api/{filename}")
print("Removed content of the _build and _api folders")
def prepare_directories() -> None:
if os.path.exists("/.dockerenv"):
# This script can be run both - in container and outside of it.
# Here we are inside the container which means that we should (when the host is Linux)
# fix permissions of the _build and _api folders via sudo.
# Those files are mounted from the host via docs folder and we might not have permissions to
# write to those directories (and remove the _api folder).
# We know we have sudo capabilities inside the container.
print("Creating the _build and _api folders in case they do not exist")
run(["sudo", "mkdir", "-pv", "_build"], check=True)
run(["sudo", "mkdir", "-pv", "_api"], check=True)
print("Created the _build and _api folders in case they do not exist")
def restore_ownership() -> None:
# We are inside the container which means that we should fix back the permissions of the
# _build and _api folder files, so that they can be accessed by the host user
# The _api folder should be deleted by then but just in case we should change the ownership
host_user_id = os.environ["HOST_USER_ID"]
host_group_id = os.environ["HOST_GROUP_ID"]
print(f"Changing ownership of docs/_build folder back to {host_user_id}:{host_group_id}")
run(["sudo", "chown", "-R", f'{host_user_id}:{host_group_id}', "_build"], check=True)
if os.path.exists("_api"):
run(["sudo", "chown", "-R", f'{host_user_id}:{host_group_id}', "_api"], check=True)
print(f"Changed ownership of docs/_build folder back to {host_user_id}:{host_group_id}")
atexit.register(restore_ownership)
else:
# We are outside the container so we simply make sure that the directories exist
print("Creating the _build and _api folders in case they do not exist")
run(["mkdir", "-pv", "_build"], check=True)
run(["mkdir", "-pv", "_api"], check=True)
print("Creating the _build and _api folders in case they do not exist")
def display_errors_summary() -> None:
for warning_no, error in enumerate(sorted(build_errors), 1):
print("=" * 20, f"Error {warning_no:3}", "=" * 20)
print(error.message)
print()
if error.file_path and error.line_no:
print(f"File path: {error.file_path} ({error.line_no})")
print()
print(prepare_code_snippet(error.file_path, error.line_no))
elif error.file_path:
print(f"File path: {error.file_path}")
print("=" * 50)
def find_existing_guide_operator_names():
operator_names = set()
paths = glob("howto/operator/**/*.rst", recursive=True)
for path in paths:
with open(path) as f:
operator_names |= set(re.findall(".. _howto/operator:(.+?):", f.read()))
return operator_names
def extract_ast_class_def_by_name(ast_tree, class_name):
class ClassVisitor(ast.NodeVisitor):
def __init__(self):
self.found_class_node = None
def visit_ClassDef(self, node):
if node.name == class_name:
self.found_class_node = node
visitor = ClassVisitor()
visitor.visit(ast_tree)
return visitor.found_class_node
def check_guide_links_in_operator_descriptions():
def generate_build_error(path, line_no, operator_name):
return DocBuildError(
file_path=path,
line_no=line_no,
message=(
f"Link to the guide is missing in operator's description: {operator_name}.\n"
f"Please add link to the guide to the description in the following form:\n"
f"\n"
f".. seealso::\n"
f" For more information on how to use this operator, take a look at the guide:\n"
f" :ref:`howto/operator:{operator_name}`\n"
)
)
# Extract operators for which there are existing .rst guides
operator_names = find_existing_guide_operator_names()
# Extract all potential python modules that can contain operators
python_module_paths = chain(
glob(f"{ROOT_PACKAGE_DIR}/operators/*.py"),
glob(f"{ROOT_PACKAGE_DIR}/sensors/*.py"),
glob(f"{ROOT_PACKAGE_DIR}/providers/**/operators/*.py", recursive=True),
glob(f"{ROOT_PACKAGE_DIR}/providers/**/sensors/*.py", recursive=True),
glob(f"{ROOT_PACKAGE_DIR}/providers/**/transfers/*.py", recursive=True),
)
for py_module_path in python_module_paths:
with open(py_module_path) as f:
py_content = f.read()
if "This module is deprecated" in py_content:
continue
for existing_operator in operator_names:
if f"class {existing_operator}" not in py_content:
continue
# This is a potential file with necessary class definition.
# To make sure it's a real Python class definition, we build AST tree
ast_tree = ast.parse(py_content)
class_def = extract_ast_class_def_by_name(ast_tree, existing_operator)
if class_def is None:
continue
docstring = ast.get_docstring(class_def)
if "This class is deprecated." in docstring:
continue
if f":ref:`howto/operator:{existing_operator}`" in ast.get_docstring(class_def):
continue
build_errors.append(
generate_build_error(py_module_path, class_def.lineno, existing_operator)
)
def assert_file_not_contains(file_path: str, pattern: str, message: str) -> None:
with open(file_path, "rb", 0) as doc_file:
pattern_compiled = re.compile(pattern)
for num, line in enumerate(doc_file, 1):
line_decode = line.decode()
if re.search(pattern_compiled, line_decode):
build_errors.append(DocBuildError(file_path=file_path, line_no=num, message=message))
def filter_file_list_by_pattern(file_paths: Iterable[str], pattern: str) -> List[str]:
output_paths = []
pattern_compiled = re.compile(pattern)
for file_path in file_paths:
with open(file_path, "rb", 0) as text_file:
text_file_content = text_file.read().decode()
if re.findall(pattern_compiled, text_file_content):
output_paths.append(file_path)
return output_paths
def find_modules(deprecated_only: bool = False) -> Set[str]:
file_paths = glob(f"{ROOT_PACKAGE_DIR}/**/*.py", recursive=True)
# Exclude __init__.py
file_paths = (f for f in file_paths if not f.endswith("__init__.py"))
if deprecated_only:
file_paths = filter_file_list_by_pattern(file_paths, r"This module is deprecated.")
# Make path relative
file_paths = (os.path.relpath(f, ROOT_PROJECT_DIR) for f in file_paths)
# Convert filename to module
modules_names = {file_path.rpartition(".")[0].replace("/", ".") for file_path in file_paths}
return modules_names
def check_class_links_in_operators_and_hooks_ref() -> None:
with open("operators-and-hooks-ref.rst") as ref_file:
content = ref_file.read()
current_modules_in_file = set(re.findall(r":mod:`(.+?)`", content))
airflow_modules = find_modules() - find_modules(deprecated_only=True)
airflow_modules = {
o for o in airflow_modules if any(f".{d}." in o for d in
["operators", "hooks", "sensors", "transfers"])
}
missing_modules = airflow_modules - current_modules_in_file
missing_modules -= {"airflow.providers.google.common.hooks.base_google"}
if missing_modules:
module_text_list = " * " + "\n* ".join(missing_modules)
build_errors.append(
DocBuildError(
file_path="operators-and-hooks-ref.rst",
line_no=0,
message=(
f"New module detected."
f"Please add them to the list of operators and hooks - `operators-and-hooks-ref.rst` "
f"file.\n"
f"\n"
f"New modules:\n"
f"{module_text_list}"
),
)
)
def check_guide_links_in_operators_and_hooks_ref() -> None:
all_guides = glob("howto/operator/**/*.rst", recursive=True)
# Remove extension
all_guides = (
guide.rpartition(".")[0]
for guide in all_guides
if "_partials" not in guide
)
# Remove partials and index
all_guides = (
guide
for guide in all_guides
if "/_partials/" not in guide and not guide.endswith("index")
)
with open("operators-and-hooks-ref.rst") as ref_file:
content = ref_file.read()
missing_guides = [
guide
for guide in all_guides
if guide not in content
]
if missing_guides:
guide_text_list = "\n".join(f":doc:`How to use <{guide}>`" for guide in missing_guides)
build_errors.append(
DocBuildError(
file_path="operators-and-hooks-ref.rst",
line_no=0,
message=(
f"New guide detected. "
f"Please add them to the list of operators and hooks - `operators-and-hooks-ref.rst` "
f"file.\n"
f"You can copy the relevant parts of the link from the section below:\n"
f"\n"
f"{guide_text_list}"
),
)
)
def check_exampleinclude_for_example_dags():
all_docs_files = glob("**/*rst", recursive=True)
for doc_file in all_docs_files:
assert_file_not_contains(
file_path=doc_file,
pattern=r"literalinclude::.+example_dags",
message=(
"literalinclude directive is is prohibited for example DAGs. \n"
"You should use a exampleinclude directive to include example DAGs."
)
)
def check_enforce_code_block():
all_docs_files = glob("**/*rst", recursive=True)
for doc_file in all_docs_files:
assert_file_not_contains(
file_path=doc_file,
pattern=r"^.. code::",
message=(
"We recommend using the code-block directive instead of the code directive. "
"The code-block directive is more feature-full."
)
)
MISSING_GOOGLLE_DOC_GUIDES = {
"ads_to_gcs",
'adls_to_gcs',
'bigquery_to_bigquery',
'bigquery_to_gcs',
'bigquery_to_mysql',
'cassandra_to_gcs',
'dataflow',
'dlp',
'gcs_to_bigquery',
'mssql_to_gcs',
'mysql_to_gcs',
'postgres_to_gcs',
's3_to_gcs',
'sql_to_gcs',
'tasks',
}
def check_google_guides():
doc_files = glob(f"{ROOT_PROJECT_DIR}/docs/howto/operator/google/**/*.rst", recursive=True)
doc_names = {f.split("/")[-1].rsplit(".")[0] for f in doc_files}
operators_files = chain(*[
glob(f"{ROOT_PROJECT_DIR}/airflow/providers/google/*/{resource_type}/*.py")
for resource_type in ["operators", "sensors", "transfers"]
])
operators_files = (f for f in operators_files if not f.endswith("__init__.py"))
operator_names = {f.split("/")[-1].rsplit(".")[0] for f in operators_files}
# Detect missing docs:
missing_guide = operator_names - doc_names
missing_guide -= MISSING_GOOGLLE_DOC_GUIDES
if missing_guide:
missing_guide_text = " * " + "\n * ".join(missing_guide)
message = (
"You've added a new operators, but it looks like you haven't added the guide.\n"
f"{missing_guide_text}"
"\n"
"Could you add it?\n"
)
build_errors.append(DocBuildError(file_path=None, line_no=None, message=message))
# Keep update missing missing guide list
new_guides = set(doc_names).intersection(set(MISSING_GOOGLLE_DOC_GUIDES))
if new_guides:
new_guides_text = " * " + "\n * ".join(new_guides)
message = (
"You've added a guide currently listed as missing:\n"
f"{new_guides_text}"
"\n"
"Thank you very much.\n"
"Can you remove it from the list of missing guide, please?"
)
build_errors.append(DocBuildError(file_path=__file__, line_no=None, message=message))
def prepare_code_snippet(file_path: str, line_no: int, context_lines_count: int=5) -> str:
def guess_lexer_for_filename(filename):
from pygments.lexers import get_lexer_for_filename
from pygments.util import ClassNotFound
try:
lexer = get_lexer_for_filename(filename)
except ClassNotFound:
from pygments.lexers.special import TextLexer
lexer = TextLexer()
return lexer
with open(file_path) as text_file:
# Highlight code
code = text_file.read()
with suppress(ImportError):
import pygments
from pygments.formatters.terminal import TerminalFormatter
code = pygments.highlight(
code=code, formatter=TerminalFormatter(), lexer=guess_lexer_for_filename(file_path)
)
code_lines = code.split("\n")
# Prepend line number
code_lines = [f"{line_no:4} | {line}" for line_no, line in enumerate(code_lines, 1)]
# # Cut out the snippet
start_line_no = max(0, line_no - context_lines_count)
end_line_no = line_no + context_lines_count
code_lines = code_lines[start_line_no:end_line_no]
# Join lines
code = "\n".join(code_lines)
return code
def parse_sphinx_warnings(warning_text: str) -> List[DocBuildError]:
sphinx_build_errors = []
for sphinx_warning in warning_text.split("\n"):
if not sphinx_warning:
continue
warning_parts = sphinx_warning.split(":", 2)
if len(warning_parts) == 3:
try:
sphinx_build_errors.append(
DocBuildError(
file_path=warning_parts[0], line_no=int(warning_parts[1]), message=warning_parts[2]
)
)
except Exception: # pylint: disable=broad-except
# If an exception occurred while parsing the warning message, display the raw warning message.
sphinx_build_errors.append(
DocBuildError(
file_path=None, line_no=None, message=sphinx_warning
)
)
else:
sphinx_build_errors.append(DocBuildError(file_path=None, line_no=None, message=sphinx_warning))
return sphinx_build_errors
def build_sphinx_docs() -> None:
with NamedTemporaryFile() as tmp_file:
build_cmd = [
"sphinx-build",
"-b", # builder to use
"html",
"-d", # path for the cached environment and doctree files
"_build/doctrees",
"--color", # do emit colored output
"-w", # turn warnings into errors
tmp_file.name,
".", # path to documentation source files
"_build/html", # path to output directory
]
print("Executing cmd: ", " ".join([shlex.quote(c) for c in build_cmd]))
completed_proc = run(build_cmd)
if completed_proc.returncode != 0:
build_errors.append(
DocBuildError(
file_path=None,
line_no=None,
message=f"Sphinx returned non-zero exit status: {completed_proc.returncode}.",
)
)
tmp_file.seek(0)
warning_text = tmp_file.read().decode()
# Remove 7-bit C1 ANSI escape sequences
warning_text = re.sub(r"\x1B[@-_][0-?]*[ -/]*[@-~]", "", warning_text)
sphinx_build_errors = parse_sphinx_warnings(warning_text)
build_errors.extend(sphinx_build_errors)
print("Current working directory: ", os.getcwd())
prepare_directories()
clean_files()
check_guide_links_in_operator_descriptions()
check_class_links_in_operators_and_hooks_ref()
check_guide_links_in_operators_and_hooks_ref()
check_enforce_code_block()
check_exampleinclude_for_example_dags()
check_google_guides()
CHANNEL_INVITATION = """\
If you need help, write to #documentation channel on Airflow's Slack.
Channel link: https://apache-airflow.slack.com/archives/CJ1LVREHX
Invitation link: https://apache-airflow-slack.herokuapp.com/\
"""
if build_errors:
display_errors_summary()
print()
print("The documentation has errors. Fix them to build documentation.")
print()
print(CHANNEL_INVITATION)
sys.exit(1)
build_sphinx_docs()
if build_errors:
display_errors_summary()
print()
print("The documentation has errors.")
print()
print(CHANNEL_INVITATION)
sys.exit(1)
| [
"[email protected]"
] | ||
b0629394abb4bc1fca15b22b03658e2e3db5dbaf | 89a90707983bdd1ae253f7c59cd4b7543c9eda7e | /programming_python/Dbase/Sql/dumpdb.py | 5f9388d122656adc52dde04932b87d1bff4696fb | [] | no_license | timothyshull/python_reference_code | 692a7c29608cadfd46a6cc409a000023e95b9458 | f3e2205dd070fd3210316f5f470d371950945028 | refs/heads/master | 2021-01-22T20:44:07.018811 | 2017-03-17T19:17:22 | 2017-03-17T19:17:22 | 85,346,735 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,284 | py | """
display table contents as raw tuples, or formatted with field names
command-line usage: dumpdb.py dbname? table? [-] (dash=formatted display)
"""
def showformat(recs, sept=('-' * 40)):
print(len(recs), 'records')
print(sept)
for rec in recs:
maxkey = max(len(key) for key in rec) # max key len
for key in rec: # or: \t align
print('%-*s => %s' % (maxkey, key, rec[key])) # -ljust, *len
print(sept)
def dumpdb(cursor, table, format=True):
if not format:
cursor.execute('select * from ' + table)
while True:
rec = cursor.fetchone()
if not rec: break
print(rec)
else:
from makedicts import makedicts
recs = makedicts(cursor, 'select * from ' + table)
showformat(recs)
if __name__ == '__main__':
import sys
dbname, format, table = 'dbase1', False, 'people'
cmdargs = sys.argv[1:]
if '-' in cmdargs: # format if '-' in cmdline args
format = True # dbname if other cmdline arg
cmdargs.remove('-')
if cmdargs: dbname = cmdargs.pop(0)
if cmdargs: table = cmdargs[0]
from loaddb import login
conn, curs = login(dbname)
dumpdb(curs, table, format)
| [
"[email protected]"
] | |
af73c60d36aefec8b5af8ad2808fa6cfd39e2542 | 7dcd840d896eccb1bed7868fda88973b1382ed94 | /setup.py | 7556608d1336b1fabc9185e643f28c41ec147e28 | [
"MIT"
] | permissive | Keita1/pycrunchbase | 18a341eca43e9d8a3f62e2c82026050b05fe9701 | d172577d979899ca7cf0f97caa1f079476906a0c | refs/heads/master | 2020-12-25T23:36:20.860263 | 2015-10-21T02:50:59 | 2015-10-21T02:50:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,791 | py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
from __future__ import absolute_import, print_function
import io
import os
import re
from glob import glob
from os.path import basename
from os.path import dirname
from os.path import join
from os.path import relpath
from os.path import splitext
from setuptools import find_packages
from setuptools import setup
def read(*names, **kwargs):
return io.open(
join(dirname(__file__), *names),
encoding=kwargs.get("encoding", "utf8")
).read()
setup(
name="pycrunchbase",
version="0.3.6",
license="MIT",
description="Python bindings to CrunchBase",
long_description="{0}\n{1}".format(read("README.rst"), re.sub(":obj:`~?(.*?)`", r"``\1``", read("CHANGELOG.rst"))),
author="Ng Zhi An",
author_email="[email protected]",
url="https://github.com/ngzhian/pycrunchbase",
packages=find_packages("src"),
package_dir={"": "src"},
py_modules=[splitext(basename(path))[0] for path in glob("src/*.py")],
include_package_data=True,
zip_safe=False,
classifiers=[
# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: Unix",
"Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Utilities",
],
keywords=[
"crunchbase"
],
install_requires=[
"requests==2.5.1", "six==1.9.0"
],
)
| [
"[email protected]"
] | |
5d81f00e6ed97d46ea3a1c80d7a52c23666db239 | 91b2fb1fb6df216f2e365c3366bab66a567fc70d | /Week08/每日一题/342. 4的幂.py | 90f118bb6733e9b684bf6a93850ca73ef1b6af84 | [] | no_license | hrz123/algorithm010 | d17aee642f03f607a7984beb099eec18f2de1c8e | 817911d4282d2e226518b3533dff28282a91b3d4 | refs/heads/master | 2022-12-20T14:09:26.365781 | 2020-10-11T04:15:57 | 2020-10-11T04:15:57 | 270,178,423 | 1 | 0 | null | 2020-06-07T03:21:09 | 2020-06-07T03:21:09 | null | UTF-8 | Python | false | false | 2,606 | py | # 342. 4的幂.py
# 1. 暴力解,不解释
# 2. 暴力解法+预计算
# 我们知道输入的整数是32位整数x<2**31-1的,因此我们最大的4的幂次是15
# 所以我们总共有0-15 16种可能性
# 3.大于0且log2后是一个偶数
# 4.位操作:4的幂,二进制位都在1,3,5等位置,00000001, 00000100, 00010000
# 而只是2的幂,二进制都在2,4,6等位置
# 135位置的16进制为0x55555555 32位表示,246的16进制为0xaaaaaaaa,位与上其中一个等于0或1
# 5. 位操作判断是2的幂,对3的余数为1的是4的幂,为2的话是2的幂
class Solution:
def __init__(self):
s = 1
self.nums = {1}
for i in range(15):
s *= 4
self.nums.add(s)
def isPowerOfFour(self, num: int) -> bool:
return num in self.nums
class Powers:
def __init__(self):
s = 1
self.nums = {1}
for i in range(15):
s *= 4
self.nums.add(s)
class Solution:
p = Powers()
def isPowerOfFour(self, num: int) -> bool:
return num in self.p.nums
class Solution:
def isPowerOfFour(self, num: int) -> bool:
return num > 0 and not num & (num - 1) and not num & 0xaaaaaaaa
class Solution:
def isPowerOfFour(self, num: int) -> bool:
return num > 0 and not num & (num - 1) and num & 0x55555555
class Solution:
def isPowerOfFour(self, num: int) -> bool:
return num > 0 and (num & 0x55555555) and not num & (num - 1)
class Solution:
def isPowerOfFour(self, num: int) -> bool:
return num > 0 and not num & (num - 1) and bool(num & 0x55555555)
class Solution:
def isPowerOfFour(self, num: int) -> bool:
return num > 0 and not num & (num - 1) and bool(num & 0x55555555)
class Solution:
def isPowerOfFour(self, num: int) -> bool:
return num > 0 and not num & (num - 1) and bool(num & 0x55555555)
class Solution:
def isPowerOfFour(self, num: int) -> bool:
return num > 0 and not num & (num - 1) and bool(num & 0x55555555)
class Solution:
def isPowerOfFour(self, num: int) -> bool:
return num > 0 and num & 0x55555555 and not num & (num - 1)
class Solution:
def isPowerOfFour(self, num: int) -> bool:
return num > 0 and num & 0x55555555 and not num & (num - 1)
class Solution:
def isPowerOfFour(self, num: int) -> bool:
return num > 0 and num & 0x55555555 and not num & (num - 1)
def main():
sol = Solution()
num = 16
res = sol.isPowerOfFour(num)
print(res)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
81a42ccdd857bad5f3dc6eb2f2b2d73362e6ce9e | 09fd456a6552f42c124c148978289fae1af2d5c3 | /Array/1380.py | 1060e1504031b93129f5620c6c68d483b799b102 | [] | no_license | hoang-ng/LeetCode | 60b4e68cbcf54cbe763d1f98a70f52e628ab32fb | 5407c6d858bfa43325363503c31134e560522be3 | refs/heads/master | 2021-04-10T11:34:35.310374 | 2020-07-28T10:22:05 | 2020-07-28T10:22:05 | 248,932,393 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,332 | py | # 1380. Lucky Numbers in a Matrix
# Given a m * n matrix of distinct numbers, return all lucky numbers in the matrix in any order.
# A lucky number is an element of the matrix such that it is the minimum element in its row and maximum in its column.
# Example 1:
# Input: matrix = [[3,7,8],[9,11,13],[15,16,17]]
# Output: [15]
# Explanation: 15 is the only lucky number since it is the minimum in its row and the maximum in its column
# Example 2:
# Input: matrix = [[1,10,4,2],[9,3,8,7],[15,16,17,12]]
# Output: [12]
# Explanation: 12 is the only lucky number since it is the minimum in its row and the maximum in its column.
# Example 3:
# Input: matrix = [[7,8],[1,2]]
# Output: [7]
# Constraints:
# m == mat.length
# n == mat[i].length
# 1 <= n, m <= 50
# 1 <= matrix[i][j] <= 10^5.
# All elements in the matrix are distinct.
class Solution:
def luckyNumbers (self, matrix):
cand = []
res = []
for i in range(len(matrix)):
cand.append(min(matrix[i]))
for j in range(len(matrix[0])):
max_col = matrix[0][j]
for i in range(0, len(matrix)):
if matrix[i][j] > max_col:
max_col = matrix[i][j]
if max_col in cand:
res.append(max_col)
return res | [
"[email protected]"
] | |
a9b57132d2463d5a1544bb28dc24c79e9975f645 | bb33e6be8316f35decbb2b81badf2b6dcf7df515 | /source/res/scripts/client/gui/Scaleform/daapi/view/meta/FalloutTankCarouselMeta.py | 7f535a3045bd53b0a9708717d041352cc1658195 | [] | no_license | StranikS-Scan/WorldOfTanks-Decompiled | 999c9567de38c32c760ab72c21c00ea7bc20990c | d2fe9c195825ececc728e87a02983908b7ea9199 | refs/heads/1.18 | 2023-08-25T17:39:27.718097 | 2022-09-22T06:49:44 | 2022-09-22T06:49:44 | 148,696,315 | 103 | 39 | null | 2022-09-14T17:50:03 | 2018-09-13T20:49:11 | Python | UTF-8 | Python | false | false | 812 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/meta/FalloutTankCarouselMeta.py
from gui.Scaleform.daapi.view.lobby.hangar.carousels.basic.tank_carousel import TankCarousel
class FalloutTankCarouselMeta(TankCarousel):
def changeVehicle(self, id):
self._printOverrideError('changeVehicle')
def clearSlot(self, vehicleId):
self._printOverrideError('clearSlot')
def shiftSlot(self, vehicleId):
self._printOverrideError('shiftSlot')
def as_setMultiselectionInfoS(self, data):
return self.flashObject.as_setMultiselectionInfo(data) if self._isDAAPIInited() else None
def as_getMultiselectionDPS(self):
return self.flashObject.as_getMultiselectionDP() if self._isDAAPIInited() else None
| [
"[email protected]"
] | |
ad39f09b7874e552aa32a908630a17b34bcdcd25 | 9b722ca41671eb2cea19bac5126d0920639261bd | /.history/app_20201130150055.py | a316d9ebab63b4be424a874dca4efbd8ca1abc4d | [] | no_license | thawalk/db_flask_server | 7928fd481f99d30bdccc60d97f02db78324cfdbe | cd55f1c9bf84c734457ee02d9f64a6833e295fad | refs/heads/master | 2023-01-25T02:40:19.097457 | 2020-12-06T07:45:50 | 2020-12-06T07:45:50 | 314,229,480 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,928 | py | import json
import pymongo
from flask import Flask, jsonify, url_for, request, redirect,Response,Request
import pymongo
from bson.json_util import dumps
import mysql.connector
from werkzeug.serving import run_simple
import os
from dotenv import load_dotenv
import datetime
import time
from flask_cors import CORS
import re
import sys
app = Flask(__name__)
CORS(app)
load_dotenv()
test_collection='test_collection'
mongo = pymongo.MongoClient('mongodb://18.209.236.31:27017/?readPreference=primary&appname=MongoDB%20Compass&ssl=false')
# mongo = pymongo.MongoClient('mongodb://{}:27017/?readPreference=primary&appname=MongoDB%20Compass&ssl=false'.format(os.getenv("mongo_url")))
metadata_db = pymongo.database.Database(mongo, 'test')
metadata_col = pymongo.collection.Collection(metadata_db, 'test_collection')
userlogging_db = pymongo.database.Database(mongo,'user_analytics')
userlogging_col = pymongo.collection.Collection(userlogging_db,'logging')
bookdetails_db = pymongo.database.Database(mongo,'extra')
bookdetails_col = pymongo.collection.Collection(bookdetails_db,'book_details_collection')
print(metadata_col.count())
print(userlogging_col.count())
print(bookdetails_col.count())
metadata_db = mysql.connector.connect(
host ='54.163.143.77',
user = 'root',
password = '',
database = 'reviews',
)
# metadata_db = mysql.connector.connect(
# host = os.getenv("host"),
# user = 'root',
# password = '',
# database = 'reviews',
# )
cur = metadata_db.cursor()
# mySQL_sort_query = """SELECT * FROM reviews.kindle_reviews ORDER BY overall ASC LIMIT 10;"""
# cur.execute(mySQL_sort_query)
# result_set = cur.fetchall()
# print(result_set)
def user_logging(userid,timestamp,req,res):
return userlogging_col.insert({"id":userid,"timestamp":timestamp,"request":req,"response":res})
@app.route('/',methods=["GET"])
def api_root():
data = {
'message': 'Welcome to our website. Where reviews are our number one priority'
}
js = json.dumps(data)
response = Response(js, status=200, mimetype='application/json')
user_logging(123,datetime.datetime.now().isoformat(),"GET",200)
return response
@app.route('/reviews/<ASIN>' ,methods = ['GET'])
def get_review_by_ASIN(ASIN):
try:
mySQL_search_asin_query = f"""SELECT * FROM reviews.kindle_reviews WHERE asin = "{ASIN}" """
print(mySQL_search_asin_query)
cur.execute(mySQL_search_asin_query)
result_set = cur.fetchall()
r = [dict((cur.description[i][0], value) \
for i, value in enumerate(row)) for row in result_set]
js = json.dumps(r)
response = Response(js, status=200, mimetype='application/json')
user_logging(123,datetime.datetime.now().isoformat(),"GET",200)
return response
except:
errMsg = "An error occurred. Please try again."
js = json.dumps(errMsg)
user_logging(123,datetime.datetime.now().isoformat(),"GET",400)
response = Response(js, status=400, mimetype='application/json')
return response
@app.route('/categories', methods = ['GET']) #TODO: #returns list of categories
def get_categories():
categories = []
js = json.dumps(data)
response = Response(js, status=200, mimetype='application/json')
user_logging(123,datetime.datetime.now().isoformat(),"GET",200)
return response
@app.route('/search', methods=['GET']) #now it only searches for TITLE. the mongo metadata does not have author
def search_book():
try:
title = request.args.get("title")
author = request.args.get("author")
pattern = re.compile(f'({title})', re.I)
result = bookdetails_col.find({"$or":[{"book_title":{'$regex': pattern}},{"author_names":author}]}).limit(10) #{ $text: { $search: title } }
temp_result_array = list(result)
final_result = []
for data in temp_result_array:
asin = data['asin']
a = metadata_col.find({"asin":asin}).limit(10)
a_list = list(a)
# print(a_list[0], file=sys.stderr)
a_list[0]['book_title'] = data['book_title']
a_list[0]['author_names'] = data['author_names']
final_result.append(a_list[0])
result_array = dumps(final_result)
print(final_result, file=sys.stderr)
response = Response(result_array, status=200, mimetype='application/json')
user_logging(123,datetime.datetime.now().isoformat(),"GET",200)
return response
except:
errMsg = "Please include title."
js = json.dumps(errMsg)
user_logging(123,datetime.datetime.now().isoformat(),"GET",400)
response = Response(js, status=400, mimetype='application/json')
return response
# @app.route('/review', methods=['POST'])
# def add_review():
# if not request.json or not request.json['asin'] or type(request.json['asin']) != str or not request.json['overall'] or not request.json['reviewText'] or type(request.json['reviewText']) != str or not request.json['reviewTime'] or type(request.json['reviewTime']) != str or not request.json['reviewerID'] or type(request.json['reviewerID']) != str or not request.json['reviewerName'] or type(request.json['reviewerName']) != str or not request.json['summary'] or type(request.json['summary']) != str or not request.json['unixReviewTime'] or type(request.json['unixReviewTime']) != int :
# return 'invalid request msg', 404
# txt = "INSERT INTO 'kindle_reviews' ('id', 'asin', 'overall', 'reviewText', 'reviewTime', 'reviewerID', 'reviewerName', 'summary', 'unixReviewTime') VALUES (%s)"
# values = (None, request.json['asin'], request.json['overall'], request.json['reviewText'], request.json['reviewTime'], request.json['reviewerID'], request.json['reviewerName'], request.json['summary'], request.json['unixReviewTime'])
# cur.execute(txt, values)
# return 'successfully uploaded new review', 200
@app.route('/addBook',methods= ['POST'])
def add_book():
# if not request.json or not request.json['asin'] or type(request.json['asin']) != str or not request.json['overall'] or not request.json['reviewText'] or type(request.json['reviewText']) != str or not request.json['reviewTime'] or type(request.json['reviewTime']) != str or not request.json['reviewerID'] or type(request.json['reviewerID']) != str or not request.json['reviewerName'] or type(request.json['reviewerName']) != str or not request.json['summary'] or type(request.json['summary']) != str or not request.json['unixReviewTime'] or type(request.json['unixReviewTime']) != int :
# return 'invalid request msg', 404
try:
data = request.json
title = data['title']
asin = data['asin']
description = data['description']
price = data['price']
categories = data['categories']
message = "Book added successfully"
metadata_col.insert({"title":title,"asin":asin,"description":description,"price":price,"categories":categories})
js = json.dumps(message)
response = Response(js, status=201, mimetype='application/json')
user_logging(123,datetime.datetime.now().isoformat(),"POST",201)
return response
except:
errMsg = "Please include title, asin, description, price and categories."
js = json.dumps(errMsg)
response = Response(js, status=400, mimetype='application/json')
user_logging(123,datetime.datetime.now().isoformat(),"POST",400)
return response
@app.route('/addReview',methods = ['POST']) #TODO: add review INTO sql part
def add_review():
try:
data = request.json
asin = data["asin"]
helpful = [0,0]
overall = data["overall"]
reviewText = data["reviewText"]
reviewTime = data["reviewTime"]
reviewerID = data["reviewerID"]
reviewerName = data["reviewerName"]
summary = data["summary"]
unixReviewTime = int(time.time())
mySQL_insert_query = f"""INSERT INTO reviews.kindle_reviews (asin, helpful, overall, reviewText, reviewTime, reviewerID, reviewerName, summary, unixReviewTime) VALUES ("{asin}","{helpful}",{overall},"{reviewText}","{reviewTime}","{reviewerID}","{reviewerName}","{summary}","{unixReviewTime}");"""
cur.execute(mySQL_insert_query)
metadata_db.commit()
message = "Successfully uploaded review"
js = json.dumps(message)
response = Response(js, status=201, mimetype='application/json')
user_logging(123,datetime.datetime.now().isoformat(),"POST",201)
return response
except Exception as e:
errMsg = "An error occurred. Please check if you have all fields."
js = json.dumps(e)
response = Response(js, status=400, mimetype='application/json')
user_logging(123,datetime.datetime.now().isoformat(),"POST",400)
return response
@app.route('/sortByGenres', methods= ['GET']) #TODO: sort by genres from mongo metadata categories
def sort_by_genres():
pass
@app.route('/sortByRating' , methods = ['GET'])
def sort_by_ratings(): #sort by increasing ratings, decreasing rating
try:
rating_preference = request.args.get("rating_preference")
if(rating_preference == 'increasing'): #means rating 1 will come out first
mySQL_sort_query = """SELECT asin as asin,CAST(AVG(overall) AS CHAR) as rating FROM reviews.kindle_reviews GROUP BY asin ORDER BY AVG(overall) ASC limit 2;"""
else: #means rating 5 will come out first
mySQL_sort_query = """SELECT asin as asin,CAST(AVG(overall) AS CHAR) as rating FROM reviews.kindle_reviews GROUP BY asin ORDER BY AVG(overall) DESC limit 2;"""
cur.execute(mySQL_sort_query)
result_set = cur.fetchall()
r = [dict((cur.description[i][0], value) \
for i, value in enumerate(row)) for row in result_set]
final_result = []
for data in r:
asin = data['asin']
met = metadata_col.find({"asin":asin}).limit(1)
metadata = list(met)
metadata[0]['description']=data['description']
metadata[0]['price']=data['price']
metadata[0]
print(metadata)
js = json.dumps(final_result)
response = Response(js, status=200, mimetype='application/json')
user_logging(123,datetime.datetime.now().isoformat(),"GET",200)
return response
except Exception as e:
print(e)
errMsg = "An error occurred. Please check if you have all fields."
js = json.dumps(errMsg)
response = Response(js, status=400, mimetype='application/json')
user_logging(123,datetime.datetime.now().isoformat(),"GET",400)
return response
if __name__ == '__main__':
#app.run(host="0.0.0.0", port=5000) #remember to change this part
app.run(debug=True)
| [
"[email protected]"
] | |
cd8dbe19a67e8ef7bed57e61394fe2abd45dd9cc | c48221dbd1335701178a8b4bfadd22b16fa168fd | /tests/example_app/admin.py | 3bc84c5af51a76dbe4fa83ca91d69cf12bbdce1f | [
"BSD-2-Clause"
] | permissive | prokaktus/django-meta | e92e9c45239993e4d6b350083a84c1d98a01f515 | 21e740f083ea32b150ad012c3bc7941ed920de20 | refs/heads/develop | 2020-04-05T14:35:42.801057 | 2017-05-06T12:26:17 | 2017-05-06T12:26:17 | 94,702,571 | 1 | 0 | null | 2017-06-18T17:31:16 | 2017-06-18T17:31:16 | null | UTF-8 | Python | false | false | 185 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
from django.contrib import admin
from .models import Post
admin.site.register(Post)
| [
"[email protected]"
] | |
0fffdfcf4e1ec8477a77939f528d77ad467a4a16 | fd25231975acd147e04dc3ed3627c92cb1a4f86c | /FlaskAPI/vir_env/lib/python3.7/site-packages/scipy/sparse/bsr.py | f86c448e1d216ac11586184af2d1e5370407136f | [] | no_license | sumitkutty/Flight-Price-Prediction | 832a2802a3367e655b46d3b44f073d917abd2320 | d974a8b75fbcbfa42f11703602af3e45a3f08b3c | refs/heads/master | 2022-12-25T07:13:06.375888 | 2020-10-08T18:46:44 | 2020-10-08T18:46:44 | 302,366,725 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | version https://git-lfs.github.com/spec/v1
oid sha256:ef0f9bb9c9a9bd37389ba380aa25a50d381d571eaa83888b4205f2d207020fc5
size 24882
| [
"[email protected]"
] | |
3161a5cd27238b9cfa5bc819abae49d5b6fca114 | 36785c0893ab1e2c81c6a03305f42459776a84e0 | /ambra_sdk/service/entrypoints/order.py | f2e0fe900a88d10a1589a4cb0442c3d0b940b6d2 | [
"Apache-2.0"
] | permissive | dicomgrid/sdk-python | 06589f87f33850bd15e6e99fb683bada6492775f | 2618e682d38339439340d86080e8bc6ee6cf21b5 | refs/heads/master | 2022-08-28T14:50:35.864012 | 2022-08-22T12:36:50 | 2022-08-22T12:36:50 | 253,867,502 | 11 | 6 | Apache-2.0 | 2022-04-13T10:06:38 | 2020-04-07T17:36:56 | HTML | UTF-8 | Python | false | false | 259 | py | from ambra_sdk.service.entrypoints.generated.order import \
AsyncOrder as GAsyncOrder
from ambra_sdk.service.entrypoints.generated.order import Order as GOrder
class Order(GOrder):
"""Order."""
class AsyncOrder(GAsyncOrder):
"""AsyncOrder."""
| [
"[email protected]"
] | |
b866c9d98d721c2e46b53ade178d935ac345b7f0 | 20564b667fe6a9fa7c75e9b20e2f0446ec3440c8 | /venv/bin/pip3 | 27feb6ae67391b30f70bc833820ae9d3ff02bf94 | [] | no_license | prashantpandey9/Covid19-India-tracker | a7e544264df92df7c790e5745ef70b69fc39263a | 03df61342dffd12520f5f4172f879e35e9e6fa85 | refs/heads/master | 2023-08-14T00:36:19.974705 | 2021-02-27T11:16:49 | 2021-02-27T11:16:49 | 258,137,688 | 7 | 3 | null | 2021-09-22T19:02:07 | 2020-04-23T08:14:55 | Python | UTF-8 | Python | false | false | 255 | #!/home/prashant/my_project/covid_19/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
e2b8e855db9448e5dc41f8545f0571ae8baf5419 | 6444622ad4a150993955a0c8fe260bae1af7f8ce | /djangoenv/lib/python2.7/site-packages/django/conf/global_settings.py | 35255ffe5295aaf95f5516e907739800d49c5501 | [] | no_license | jeremyrich/Lesson_RestAPI_jeremy | ca965ef017c53f919c0bf97a4a23841818e246f9 | a44263e45b1cc1ba812059f6984c0f5be25cd234 | refs/heads/master | 2020-04-25T23:13:47.237188 | 2019-03-22T09:26:58 | 2019-03-22T09:26:58 | 173,138,073 | 0 | 0 | null | 2019-03-22T09:26:59 | 2019-02-28T15:34:19 | Python | UTF-8 | Python | false | false | 22,077 | py | # -*- coding: utf-8 -*-
"""
Default Django settings. Override these with settings in the module pointed to
by the DJANGO_SETTINGS_MODULE environment variable.
"""
from __future__ import unicode_literals
# This is defined here as a do-nothing function because we can't import
# django.utils.translation -- that module depends on the settings.
def gettext_noop(s):
return s
####################
# CORE #
####################
DEBUG = False
# Whether the framework should propagate raw exceptions rather than catching
# them. This is useful under some testing situations and should never be used
# on a live site.
DEBUG_PROPAGATE_EXCEPTIONS = False
# Whether to use the "ETag" header. This saves bandwidth but slows down performance.
# Deprecated (RemovedInDjango21Warning) in favor of ConditionalGetMiddleware
# which sets the ETag regardless of this setting.
USE_ETAGS = False
# People who get code error notifications.
# In the format [('Full Name', '[email protected]'), ('Full Name', '[email protected]')]
ADMINS = []
# List of IP addresses, as strings, that:
# * See debug comments, when DEBUG is true
# * Receive x-headers
INTERNAL_IPS = []
# Hosts/domain names that are valid for this site.
# "*" matches anything, ".example.com" matches example.com and all subdomains
ALLOWED_HOSTS = []
# Local time zone for this installation. All choices can be found here:
# https://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all
# systems may support all possibilities). When USE_TZ is True, this is
# interpreted as the default user time zone.
TIME_ZONE = "America/Chicago"
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = False
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en-us"
# Languages we provide translations for, out of the box.
LANGUAGES = [
("af", gettext_noop("Afrikaans")),
("ar", gettext_noop("Arabic")),
("ast", gettext_noop("Asturian")),
("az", gettext_noop("Azerbaijani")),
("bg", gettext_noop("Bulgarian")),
("be", gettext_noop("Belarusian")),
("bn", gettext_noop("Bengali")),
("br", gettext_noop("Breton")),
("bs", gettext_noop("Bosnian")),
("ca", gettext_noop("Catalan")),
("cs", gettext_noop("Czech")),
("cy", gettext_noop("Welsh")),
("da", gettext_noop("Danish")),
("de", gettext_noop("German")),
("dsb", gettext_noop("Lower Sorbian")),
("el", gettext_noop("Greek")),
("en", gettext_noop("English")),
("en-au", gettext_noop("Australian English")),
("en-gb", gettext_noop("British English")),
("eo", gettext_noop("Esperanto")),
("es", gettext_noop("Spanish")),
("es-ar", gettext_noop("Argentinian Spanish")),
("es-co", gettext_noop("Colombian Spanish")),
("es-mx", gettext_noop("Mexican Spanish")),
("es-ni", gettext_noop("Nicaraguan Spanish")),
("es-ve", gettext_noop("Venezuelan Spanish")),
("et", gettext_noop("Estonian")),
("eu", gettext_noop("Basque")),
("fa", gettext_noop("Persian")),
("fi", gettext_noop("Finnish")),
("fr", gettext_noop("French")),
("fy", gettext_noop("Frisian")),
("ga", gettext_noop("Irish")),
("gd", gettext_noop("Scottish Gaelic")),
("gl", gettext_noop("Galician")),
("he", gettext_noop("Hebrew")),
("hi", gettext_noop("Hindi")),
("hr", gettext_noop("Croatian")),
("hsb", gettext_noop("Upper Sorbian")),
("hu", gettext_noop("Hungarian")),
("ia", gettext_noop("Interlingua")),
("id", gettext_noop("Indonesian")),
("io", gettext_noop("Ido")),
("is", gettext_noop("Icelandic")),
("it", gettext_noop("Italian")),
("ja", gettext_noop("Japanese")),
("ka", gettext_noop("Georgian")),
("kk", gettext_noop("Kazakh")),
("km", gettext_noop("Khmer")),
("kn", gettext_noop("Kannada")),
("ko", gettext_noop("Korean")),
("lb", gettext_noop("Luxembourgish")),
("lt", gettext_noop("Lithuanian")),
("lv", gettext_noop("Latvian")),
("mk", gettext_noop("Macedonian")),
("ml", gettext_noop("Malayalam")),
("mn", gettext_noop("Mongolian")),
("mr", gettext_noop("Marathi")),
("my", gettext_noop("Burmese")),
("nb", gettext_noop("Norwegian Bokmål")),
("ne", gettext_noop("Nepali")),
("nl", gettext_noop("Dutch")),
("nn", gettext_noop("Norwegian Nynorsk")),
("os", gettext_noop("Ossetic")),
("pa", gettext_noop("Punjabi")),
("pl", gettext_noop("Polish")),
("pt", gettext_noop("Portuguese")),
("pt-br", gettext_noop("Brazilian Portuguese")),
("ro", gettext_noop("Romanian")),
("ru", gettext_noop("Russian")),
("sk", gettext_noop("Slovak")),
("sl", gettext_noop("Slovenian")),
("sq", gettext_noop("Albanian")),
("sr", gettext_noop("Serbian")),
("sr-latn", gettext_noop("Serbian Latin")),
("sv", gettext_noop("Swedish")),
("sw", gettext_noop("Swahili")),
("ta", gettext_noop("Tamil")),
("te", gettext_noop("Telugu")),
("th", gettext_noop("Thai")),
("tr", gettext_noop("Turkish")),
("tt", gettext_noop("Tatar")),
("udm", gettext_noop("Udmurt")),
("uk", gettext_noop("Ukrainian")),
("ur", gettext_noop("Urdu")),
("vi", gettext_noop("Vietnamese")),
("zh-hans", gettext_noop("Simplified Chinese")),
("zh-hant", gettext_noop("Traditional Chinese")),
]
# Languages using BiDi (right-to-left) layout
LANGUAGES_BIDI = ["he", "ar", "fa", "ur"]
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
LOCALE_PATHS = []
# Settings for language cookie
LANGUAGE_COOKIE_NAME = "django_language"
LANGUAGE_COOKIE_AGE = None
LANGUAGE_COOKIE_DOMAIN = None
LANGUAGE_COOKIE_PATH = "/"
# If you set this to True, Django will format dates, numbers and calendars
# according to user current locale.
USE_L10N = False
# Not-necessarily-technical managers of the site. They get broken link
# notifications and other various emails.
MANAGERS = ADMINS
# Default content type and charset to use for all HttpResponse objects, if a
# MIME type isn't manually specified. These are used to construct the
# Content-Type header.
DEFAULT_CONTENT_TYPE = "text/html"
DEFAULT_CHARSET = "utf-8"
# Encoding of files read from disk (template and initial SQL files).
FILE_CHARSET = "utf-8"
# Email address that error messages come from.
SERVER_EMAIL = "root@localhost"
# Database connection info. If left empty, will default to the dummy backend.
DATABASES = {}
# Classes used to implement DB routing behavior.
DATABASE_ROUTERS = []
# The email backend to use. For possible shortcuts see django.core.mail.
# The default is to use the SMTP backend.
# Third-party backends can be specified by providing a Python path
# to a module that defines an EmailBackend class.
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
# Host for sending email.
EMAIL_HOST = "localhost"
# Port for sending email.
EMAIL_PORT = 25
# Whether to send SMTP 'Date' header in the local time zone or in UTC.
EMAIL_USE_LOCALTIME = False
# Optional SMTP authentication information for EMAIL_HOST.
EMAIL_HOST_USER = ""
EMAIL_HOST_PASSWORD = ""
EMAIL_USE_TLS = False
EMAIL_USE_SSL = False
EMAIL_SSL_CERTFILE = None
EMAIL_SSL_KEYFILE = None
EMAIL_TIMEOUT = None
# List of strings representing installed apps.
INSTALLED_APPS = []
TEMPLATES = []
# Default form rendering class.
FORM_RENDERER = "django.forms.renderers.DjangoTemplates"
# Default email address to use for various automated correspondence from
# the site managers.
DEFAULT_FROM_EMAIL = "webmaster@localhost"
# Subject-line prefix for email messages send with django.core.mail.mail_admins
# or ...mail_managers. Make sure to include the trailing space.
EMAIL_SUBJECT_PREFIX = "[Django] "
# Whether to append trailing slashes to URLs.
APPEND_SLASH = True
# Whether to prepend the "www." subdomain to URLs that don't have it.
PREPEND_WWW = False
# Override the server-derived value of SCRIPT_NAME
FORCE_SCRIPT_NAME = None
# List of compiled regular expression objects representing User-Agent strings
# that are not allowed to visit any page, systemwide. Use this for bad
# robots/crawlers. Here are a few examples:
# import re
# DISALLOWED_USER_AGENTS = [
# re.compile(r'^NaverBot.*'),
# re.compile(r'^EmailSiphon.*'),
# re.compile(r'^SiteSucker.*'),
# re.compile(r'^sohu-search')
# ]
DISALLOWED_USER_AGENTS = []
ABSOLUTE_URL_OVERRIDES = {}
# List of compiled regular expression objects representing URLs that need not
# be reported by BrokenLinkEmailsMiddleware. Here are a few examples:
# import re
# IGNORABLE_404_URLS = [
# re.compile(r'^/apple-touch-icon.*\.png$'),
# re.compile(r'^/favicon.ico$),
# re.compile(r'^/robots.txt$),
# re.compile(r'^/phpmyadmin/),
# re.compile(r'\.(cgi|php|pl)$'),
# ]
IGNORABLE_404_URLS = []
# A secret key for this particular Django installation. Used in secret-key
# hashing algorithms. Set this in your settings, or Django will complain
# loudly.
SECRET_KEY = ""
# Default file storage mechanism that holds media.
DEFAULT_FILE_STORAGE = "django.core.files.storage.FileSystemStorage"
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ""
# URL that handles the media served from MEDIA_ROOT.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ""
# Absolute path to the directory static files should be collected to.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = None
# URL that handles the static files served from STATIC_ROOT.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = None
# List of upload handler classes to be applied in order.
FILE_UPLOAD_HANDLERS = [
"django.core.files.uploadhandler.MemoryFileUploadHandler",
"django.core.files.uploadhandler.TemporaryFileUploadHandler",
]
# Maximum size, in bytes, of a request before it will be streamed to the
# file system instead of into memory.
FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
# Maximum size in bytes of request data (excluding file uploads) that will be
# read before a SuspiciousOperation (RequestDataTooBig) is raised.
DATA_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
# Maximum number of GET/POST parameters that will be read before a
# SuspiciousOperation (TooManyFieldsSent) is raised.
DATA_UPLOAD_MAX_NUMBER_FIELDS = 1000
# Directory in which upload streamed files will be temporarily saved. A value of
# `None` will make Django use the operating system's default temporary directory
# (i.e. "/tmp" on *nix systems).
FILE_UPLOAD_TEMP_DIR = None
# The numeric mode to set newly-uploaded files to. The value should be a mode
# you'd pass directly to os.chmod; see https://docs.python.org/3/library/os.html#files-and-directories.
FILE_UPLOAD_PERMISSIONS = None
# The numeric mode to assign to newly-created directories, when uploading files.
# The value should be a mode as you'd pass to os.chmod;
# see https://docs.python.org/3/library/os.html#files-and-directories.
FILE_UPLOAD_DIRECTORY_PERMISSIONS = None
# Python module path where user will place custom format definition.
# The directory where this setting is pointing should contain subdirectories
# named as the locales, containing a formats.py file
# (i.e. "myproject.locale" for myproject/locale/en/formats.py etc. use)
FORMAT_MODULE_PATH = None
# Default formatting for date objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = "N j, Y"
# Default formatting for datetime objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATETIME_FORMAT = "N j, Y, P"
# Default formatting for time objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
TIME_FORMAT = "P"
# Default formatting for date objects when only the year and month are relevant.
# See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
YEAR_MONTH_FORMAT = "F Y"
# Default formatting for date objects when only the month and day are relevant.
# See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
MONTH_DAY_FORMAT = "F j"
# Default short formatting for date objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATE_FORMAT = "m/d/Y"
# Default short formatting for datetime objects.
# See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATETIME_FORMAT = "m/d/Y P"
# Default formats to be used when parsing dates from input boxes, in order
# See all available format string here:
# http://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATE_INPUT_FORMATS = [
"%Y-%m-%d",
"%m/%d/%Y",
"%m/%d/%y", # '2006-10-25', '10/25/2006', '10/25/06'
"%b %d %Y",
"%b %d, %Y", # 'Oct 25 2006', 'Oct 25, 2006'
"%d %b %Y",
"%d %b, %Y", # '25 Oct 2006', '25 Oct, 2006'
"%B %d %Y",
"%B %d, %Y", # 'October 25 2006', 'October 25, 2006'
"%d %B %Y",
"%d %B, %Y", # '25 October 2006', '25 October, 2006'
]
# Default formats to be used when parsing times from input boxes, in order
# See all available format string here:
# http://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
TIME_INPUT_FORMATS = [
"%H:%M:%S", # '14:30:59'
"%H:%M:%S.%f", # '14:30:59.000200'
"%H:%M", # '14:30'
]
# Default formats to be used when parsing dates and times from input boxes,
# in order
# See all available format string here:
# http://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATETIME_INPUT_FORMATS = [
"%Y-%m-%d %H:%M:%S", # '2006-10-25 14:30:59'
"%Y-%m-%d %H:%M:%S.%f", # '2006-10-25 14:30:59.000200'
"%Y-%m-%d %H:%M", # '2006-10-25 14:30'
"%Y-%m-%d", # '2006-10-25'
"%m/%d/%Y %H:%M:%S", # '10/25/2006 14:30:59'
"%m/%d/%Y %H:%M:%S.%f", # '10/25/2006 14:30:59.000200'
"%m/%d/%Y %H:%M", # '10/25/2006 14:30'
"%m/%d/%Y", # '10/25/2006'
"%m/%d/%y %H:%M:%S", # '10/25/06 14:30:59'
"%m/%d/%y %H:%M:%S.%f", # '10/25/06 14:30:59.000200'
"%m/%d/%y %H:%M", # '10/25/06 14:30'
"%m/%d/%y", # '10/25/06'
]
# First day of week, to be used on calendars
# 0 means Sunday, 1 means Monday...
FIRST_DAY_OF_WEEK = 0
# Decimal separator symbol
DECIMAL_SEPARATOR = "."
# Boolean that sets whether to add thousand separator when formatting numbers
USE_THOUSAND_SEPARATOR = False
# Number of digits that will be together, when splitting them by
# THOUSAND_SEPARATOR. 0 means no grouping, 3 means splitting by thousands...
NUMBER_GROUPING = 0
# Thousand separator symbol
THOUSAND_SEPARATOR = ","
# The tablespaces to use for each model when not specified otherwise.
DEFAULT_TABLESPACE = ""
DEFAULT_INDEX_TABLESPACE = ""
# Default X-Frame-Options header value
X_FRAME_OPTIONS = "SAMEORIGIN"
USE_X_FORWARDED_HOST = False
USE_X_FORWARDED_PORT = False
# The Python dotted path to the WSGI application that Django's internal server
# (runserver) will use. If `None`, the return value of
# 'django.core.wsgi.get_wsgi_application' is used, thus preserving the same
# behavior as previous versions of Django. Otherwise this should point to an
# actual WSGI application object.
WSGI_APPLICATION = None
# If your Django app is behind a proxy that sets a header to specify secure
# connections, AND that proxy ensures that user-submitted headers with the
# same name are ignored (so that people can't spoof it), set this value to
# a tuple of (header_name, header_value). For any requests that come in with
# that header/value, request.is_secure() will return True.
# WARNING! Only set this if you fully understand what you're doing. Otherwise,
# you may be opening yourself up to a security risk.
SECURE_PROXY_SSL_HEADER = None
##############
# MIDDLEWARE #
##############
# List of middleware to use. Order is important; in the request phase, these
# middleware will be applied in the order given, and in the response
# phase the middleware will be applied in reverse order.
MIDDLEWARE_CLASSES = [
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
]
MIDDLEWARE = None
############
# SESSIONS #
############
# Cache to store session data if using the cache session backend.
SESSION_CACHE_ALIAS = "default"
# Cookie name. This can be whatever you want.
SESSION_COOKIE_NAME = "sessionid"
# Age of cookie, in seconds (default: 2 weeks).
SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2
# A string like ".example.com", or None for standard domain cookie.
SESSION_COOKIE_DOMAIN = None
# Whether the session cookie should be secure (https:// only).
SESSION_COOKIE_SECURE = False
# The path of the session cookie.
SESSION_COOKIE_PATH = "/"
# Whether to use the non-RFC standard httpOnly flag (IE, FF3+, others)
SESSION_COOKIE_HTTPONLY = True
# Whether to save the session data on every request.
SESSION_SAVE_EVERY_REQUEST = False
# Whether a user's session cookie expires when the Web browser is closed.
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
# The module to store session data
SESSION_ENGINE = "django.contrib.sessions.backends.db"
# Directory to store session files if using the file session module. If None,
# the backend will use a sensible default.
SESSION_FILE_PATH = None
# class to serialize session data
SESSION_SERIALIZER = "django.contrib.sessions.serializers.JSONSerializer"
#########
# CACHE #
#########
# The cache backends to use.
CACHES = {"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}}
CACHE_MIDDLEWARE_KEY_PREFIX = ""
CACHE_MIDDLEWARE_SECONDS = 600
CACHE_MIDDLEWARE_ALIAS = "default"
##################
# AUTHENTICATION #
##################
AUTH_USER_MODEL = "auth.User"
AUTHENTICATION_BACKENDS = ["django.contrib.auth.backends.ModelBackend"]
LOGIN_URL = "/accounts/login/"
LOGIN_REDIRECT_URL = "/accounts/profile/"
LOGOUT_REDIRECT_URL = None
# The number of days a password reset link is valid for
PASSWORD_RESET_TIMEOUT_DAYS = 3
# the first hasher in this list is the preferred algorithm. any
# password using different algorithms will be converted automatically
# upon login
PASSWORD_HASHERS = [
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher",
"django.contrib.auth.hashers.Argon2PasswordHasher",
"django.contrib.auth.hashers.BCryptSHA256PasswordHasher",
"django.contrib.auth.hashers.BCryptPasswordHasher",
]
AUTH_PASSWORD_VALIDATORS = []
###########
# SIGNING #
###########
SIGNING_BACKEND = "django.core.signing.TimestampSigner"
########
# CSRF #
########
# Dotted path to callable to be used as view when a request is
# rejected by the CSRF middleware.
CSRF_FAILURE_VIEW = "django.views.csrf.csrf_failure"
# Settings for CSRF cookie.
CSRF_COOKIE_NAME = "csrftoken"
CSRF_COOKIE_AGE = 60 * 60 * 24 * 7 * 52
CSRF_COOKIE_DOMAIN = None
CSRF_COOKIE_PATH = "/"
CSRF_COOKIE_SECURE = False
CSRF_COOKIE_HTTPONLY = False
CSRF_HEADER_NAME = "HTTP_X_CSRFTOKEN"
CSRF_TRUSTED_ORIGINS = []
CSRF_USE_SESSIONS = False
############
# MESSAGES #
############
# Class to use as messages backend
MESSAGE_STORAGE = "django.contrib.messages.storage.fallback.FallbackStorage"
# Default values of MESSAGE_LEVEL and MESSAGE_TAGS are defined within
# django.contrib.messages to avoid imports in this settings file.
###########
# LOGGING #
###########
# The callable to use to configure logging
LOGGING_CONFIG = "logging.config.dictConfig"
# Custom logging configuration.
LOGGING = {}
# Default exception reporter filter class used in case none has been
# specifically assigned to the HttpRequest instance.
DEFAULT_EXCEPTION_REPORTER_FILTER = "django.views.debug.SafeExceptionReporterFilter"
###########
# TESTING #
###########
# The name of the class to use to run the test suite
TEST_RUNNER = "django.test.runner.DiscoverRunner"
# Apps that don't need to be serialized at test database creation time
# (only apps with migrations are to start with)
TEST_NON_SERIALIZED_APPS = []
############
# FIXTURES #
############
# The list of directories to search for fixtures
FIXTURE_DIRS = []
###############
# STATICFILES #
###############
# A list of locations of additional static files
STATICFILES_DIRS = []
# The default file storage backend used during the build process
STATICFILES_STORAGE = "django.contrib.staticfiles.storage.StaticFilesStorage"
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = [
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
]
##############
# MIGRATIONS #
##############
# Migration module overrides for apps, by app label.
MIGRATION_MODULES = {}
#################
# SYSTEM CHECKS #
#################
# List of all issues generated by system checks that should be silenced. Light
# issues like warnings, infos or debugs will not generate a message. Silencing
# serious issues like errors and criticals does not result in hiding the
# message, but Django will not stop you from e.g. running server.
SILENCED_SYSTEM_CHECKS = []
#######################
# SECURITY MIDDLEWARE #
#######################
SECURE_BROWSER_XSS_FILTER = False
SECURE_CONTENT_TYPE_NOSNIFF = False
SECURE_HSTS_INCLUDE_SUBDOMAINS = False
SECURE_HSTS_PRELOAD = False
SECURE_HSTS_SECONDS = 0
SECURE_REDIRECT_EXEMPT = []
SECURE_SSL_HOST = None
SECURE_SSL_REDIRECT = False
| [
"[email protected]"
] | |
54f4a480d8c01ce5d7b1aa5e4b8ab7f31dfd0da8 | ad5494244bb4d0d92df8178d96b99b949f9ee04c | /hashing/models.py | 10ed2e0c8732c4953087615021d57659be2234d1 | [] | no_license | razyesh/Hash-gen-SHA256 | 14dbf86fab593bccec9997ec087535ee02995a6c | 5cf91114962e048893c910832460f6984d787a38 | refs/heads/master | 2021-09-26T11:38:28.819734 | 2020-04-03T19:00:57 | 2020-04-03T19:00:57 | 252,815,271 | 0 | 0 | null | 2021-09-22T18:49:48 | 2020-04-03T18:56:29 | Python | UTF-8 | Python | false | false | 156 | py | from django.db import models
# Create your models here.
class Hash(models.Model):
text = models.TextField()
hash = models.CharField(max_length=64) | [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.