blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2ba56d300f998d10473103a90bf6e35b36c49fec | a37b756e34fc39c1237fc68997dbef77df9fa6fc | /keras/keras56-61/keras59_3_save_npy.py | 610895f0e6684a04202781a5817cb6ccdfb0c08d | []
| no_license | jvd2n/ai-study | e20e38493ad295940a3201fc0cc8061ca9052607 | a82f7c6d89db532f881c76b553b5ab3eea0bdd59 | refs/heads/main | 2023-08-06T03:24:39.182686 | 2021-10-06T14:41:01 | 2021-10-06T14:41:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,917 | py | import numpy as np
from tensorflow.keras.preprocessing.image import ImageDataGenerator
train_datagen = ImageDataGenerator(
rescale=1./255,
horizontal_flip=True,
vertical_flip=True,
width_shift_range=0.1,
height_shift_range=0.1,
rotation_range=5,
zoom_range=1.2,
shear_range=0.7,
fill_mode='nearest'
)
test_datagen = ImageDataGenerator(rescale=1./255)
xy_train = train_datagen.flow_from_directory(
'../data/brain/train',
target_size=(150, 150),
batch_size=200, # xy_train[0]의 ,5(batch_size) 크기로 생성
class_mode='binary'
)
# Found 160 images belonging to 2 classes.
xy_test = test_datagen.flow_from_directory(
'../data/brain/test',
target_size=(150, 150),
batch_size=200, # xy_train[0]의 ,5(batch_size) 크기로 생성
class_mode='binary',
shuffle=True
)
# Found 120 images belonging to 2 classes.
print(xy_train)
# <tensorflow.python.keras.preprocessing.image.DirectoryIterator object at 0x000002C3A9DB9780>
print(xy_train[0][0]) # x value
print(xy_train[0][1]) # y value
# print(xy_train[0][2]) # None
print(xy_train[0][0].shape, xy_train[0][1].shape) # (160, 150, 150, 3) (160,)
print(xy_test[0][0].shape, xy_test[0][1].shape) # (120, 150, 150, 3) (120,)
# print(xy_train[31][1]) # 마지막 배치 y. 총 32장 * batchsize = 160장의 사진임을 알 수 있다.
# print(xy_train[32][1]) # None
# print(type(xy_train)) # <class 'tensorflow.python.keras.preprocessing.image.DirectoryIterator'>
# print(type(xy_train[0])) # <class 'tuple'>
# print(type(xy_train[0][0])) # <class 'numpy.ndarray'>
# print(type(xy_train[0][1])) # <class 'numpy.ndarray'>
np.save('./_save/_npy/k59_3_train_x.npy', arr=xy_train[0][0])
np.save('./_save/_npy/k59_3_train_y.npy', arr=xy_train[0][1])
np.save('./_save/_npy/k59_3_test_x.npy', arr=xy_test[0][0])
np.save('./_save/_npy/k59_3_test_y.npy', arr=xy_test[0][1]) | [
"[email protected]"
]
| |
8ba2cbaceeb6ecd1f79a0aaa8ad6322d5c9d3954 | c489a910d1533f0e03a86f3cc483fdba352dc481 | /tests/platform_tests/cli/test_show_chassis_module.py | a16ca7d6baf021ba84013997b905255f0fcb61c9 | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
]
| permissive | chenkelly/sonic-mgmt | 1b6dab6e34dac2ac8cb475c4ded1329e53ad31d4 | 7bf848d84af017b0275f75c3a383b6fc63f0ab43 | refs/heads/master | 2023-03-08T11:14:22.071818 | 2023-03-02T02:26:05 | 2023-03-02T02:26:05 | 212,235,644 | 0 | 1 | NOASSERTION | 2019-10-02T01:53:59 | 2019-10-02T01:53:58 | null | UTF-8 | Python | false | false | 3,664 | py | import logging
import pytest
from tests.common.helpers.assertions import pytest_assert
from util import get_field_range, get_fields, get_skip_mod_list
logger = logging.getLogger('__name__')
pytestmark = [
pytest.mark.topology('t2')
]
CMD_SHOW_CHASSIS_MODULE = "show chassis modules"
def parse_chassis_module(output, expected_headers):
assert len(output) > 2
f_ranges = get_field_range(output[1])
headers = get_fields(output[0], f_ranges)
for header_v in expected_headers:
pytest_assert(header_v in headers, "Missing header {}".format(header_v))
result = {}
for a_line in output[2:]:
field_val = get_fields(a_line, f_ranges)
mod_idx = field_val[0]
result[mod_idx] = {}
cur_field = 1
for a_header in headers[1:]:
result[mod_idx][a_header] = field_val[cur_field]
cur_field += 1
return result
def test_show_chassis_module_status(duthosts, enum_rand_one_per_hwsku_hostname):
cmd = " ".join([CMD_SHOW_CHASSIS_MODULE, "status"])
logger.info("verifying output of cli command {}".format(cmd))
duthost = duthosts[enum_rand_one_per_hwsku_hostname]
exp_headers = ["Name", "Description", "Physical-Slot", "Oper-Status", "Admin-Status"]
skip_mod_list = get_skip_mod_list(duthost)
output = duthost.command(cmd)
res = parse_chassis_module(output['stdout_lines'], exp_headers)
# by default will assume all modules should be shown online except in skip_module_list
for mod_idx in res.keys():
if mod_idx in skip_mod_list:
pytest_assert(res[mod_idx]['Oper-Status'] == 'Empty',
"Oper-status for slot {} should be Empty but it is {}".format(
mod_idx, res[mod_idx]['Oper-Status']))
else:
pytest_assert(res[mod_idx]['Oper-Status'] == 'Online',
"Oper-status for slot {} should be Online but it is {}".format(
mod_idx, res[mod_idx]['Oper-Status']))
def test_show_chassis_module_midplane_status(duthosts, enum_rand_one_per_hwsku_hostname):
"""
@summary: Verify output of `show chassis-module midplane-status`
"""
cmd = " ".join([CMD_SHOW_CHASSIS_MODULE, "midplane-status"])
logger.info("verifying output of cli command {}".format(cmd))
expected_headers = ["Name", "IP-Address", "Reachability"]
duthost = duthosts[enum_rand_one_per_hwsku_hostname]
output = duthost.command(cmd)
res_mid_status = parse_chassis_module(output['stdout_lines'], expected_headers)
mod_key= ['line-cards', 'supervisor']
skip_mod_list = get_skip_mod_list(duthost, mod_key)
for mod_idx in res_mid_status:
mod_mid_status = res_mid_status[mod_idx]['Reachability']
if mod_idx not in skip_mod_list:
pytest_assert(mod_mid_status == "True",
"midplane reachability of line card {} expected true but is {}".format(mod_idx,
mod_mid_status))
else:
# There are cases where the chassis is logically divided where some LCs belongs to another chassis and needs to be skipped
# and for those cases we should not assume if skipped means it must be offline.
if "LINE-CARD" in mod_idx:
logger.info("skip checking midplane status for {} since it is on skip_mod_list".format(mod_idx))
else:
pytest_assert(mod_mid_status == "False",
"reachability of {} expected false but is {}".format(mod_idx, mod_mid_status))
| [
"[email protected]"
]
| |
3b8dab3caf5ea5bc26f73ce0a695743fc54d1ebd | 697af415566ba649502bd18751a6521ac526892c | /2022_VERSIONS/rename_er2_hiwrap_ratio_files_with_dates.py | 103b3076f5e91476a83cc6f59c7e187709d01c5d | []
| no_license | srbrodzik/impacts-scripts | df44c8f34746499b8397b5b1a4ad09859b4cc8d4 | 263c7545bbb912bbcea563a21d0619e5112b1788 | refs/heads/master | 2023-05-31T05:01:09.558641 | 2023-05-22T23:24:52 | 2023-05-22T23:24:52 | 215,638,568 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,026 | py | #!/usr/bin/python3
import os
import sys
import shutil
from datetime import datetime
from datetime import timedelta
import datetime as dt
import pytz
if len(sys.argv) != 2:
#print('Usage: sys.argv[0] [YYYYMMDD]')
print('Usage: {} [YYYYMMDD]'.format(sys.argv[0]))
sys.exit()
else:
date = sys.argv[1]
indir = '/home/disk/bob/impacts/radar/er2/postFlight/realtime/hiwrap_ratio'+'/'+date
prefix = 'aircraft.NASA_ER2'
suffix = 'HIWRAP_DWR'
convertEasternToUTC = True
os.chdir(indir)
for file in os.listdir(indir):
print(file)
(base,ext) = os.path.splitext(file)
(radar,ratio,dateTime) = base.split('_')
dateTimeStr = dateTime[:-2]
if convertEasternToUTC:
dateTimeObj = datetime.strptime(dateTimeStr,"%Y%m%d%H%M")
dateTimeObjUTC = dateTimeObj+timedelta(hours=5)
dateTimeStrUTC = dateTimeObjUTC.strftime("%Y%m%d%H%M")
else:
dateTimeStrUTC = datetimeStr
catName = prefix+'.'+dateTimeStrUTC+'.'+suffix+ext
shutil.move(file,catName)
| [
"[email protected]"
]
| |
85cd6a7fc3fd30414c7549565e1cf56245d15e74 | 29841982e9d3a70d24faa6bed2397d07419fb409 | /aula_5/dashboard/routes/auth.py | 572c567ffdc274cd6a81933ea061133c5086aa66 | []
| no_license | HiroEu/python-521 | 40d6950b19b6c5a9850177739b3e72be0c0e0ae7 | d279cb5dac771e11681cdfa91bfe363a2fbaa356 | refs/heads/master | 2022-02-24T15:43:49.085686 | 2022-02-08T19:08:58 | 2022-02-08T19:08:58 | 202,603,500 | 0 | 0 | null | 2021-06-02T00:20:44 | 2019-08-15T19:58:07 | Python | UTF-8 | Python | false | false | 1,593 | py |
import flask
import ldap3
import logging
blueprint = flask.Blueprint('auth', __name__)
@blueprint.route('/sign-in', methods=[ 'GET', 'POST' ])
def sign_in():
context = {
'title': 'Python | Sysadmin',
}
EMAIL = 'admin@admin'
PASSWORD = 'admin'
if flask.request.method == 'POST':
email = flask.request.form.get('email')
password = flask.request.form.get('password')
if email == EMAIL and password == PASSWORD:
logging.info('Usuário logado')
else:
logging.warning('Falha na autenticação' + email)
# if flask.request.method == 'POST':
# form = flask.request.form
# email = form.get('email')
# password = form.get('password')
# conn = ldap3.Connection(
# ldap3.Server('ldap://127.0.0.1'),
# 'cn=admin,dc=dexter,dc=com,dc=br',
# '4linux'
# )
# conn.bind()
# conn.search(
# 'uid={},dc=dexter,dc=com,dc=br'.format(email),
# '(objectClass=person)',
# attributes=[
# 'sn',
# 'userPassword'
# ]
# )
# user = None
# try:
# user = conn.entries[0]
# except IndexError:
# return flask.redirect('/sign-in')
# saved_password = user.userPassword[0].decode()
# if saved_password == password:
# flask.session['is-logged'] = True
# return flask.redirect('/docker')
return flask.render_template('sign-in.html', context=context) | [
"[email protected]"
]
| |
5078f94c2f41d96ba025aa001d430690b98d6220 | 840415d8cfd668d408d3191056a01db62ee87e59 | /platformio/commands/debug/helpers.py | 657e8c48ea1caa6ebfdc73611019d196a00bdfeb | [
"Apache-2.0"
]
| permissive | MyTeam888/platformio-core | a1a397e38ecca5a0b61f39dcfb4273c74a4e1b35 | 65297c24d4ffbc5713a7303b6a38a4cbc7f290e7 | refs/heads/master | 2022-12-13T12:30:40.667596 | 2020-09-10T14:46:56 | 2020-09-10T14:46:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,416 | py | # Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import time
from fnmatch import fnmatch
from hashlib import sha1
from io import BytesIO
from os.path import isfile
from platformio import fs, util
from platformio.commands import PlatformioCLI
from platformio.commands.debug.exception import DebugInvalidOptionsError
from platformio.commands.platform import platform_install as cmd_platform_install
from platformio.commands.run.command import cli as cmd_run
from platformio.compat import is_bytes
from platformio.platform.exception import UnknownPlatform
from platformio.platform.factory import PlatformFactory
from platformio.project.config import ProjectConfig
from platformio.project.options import ProjectOptions
class GDBMIConsoleStream(BytesIO): # pylint: disable=too-few-public-methods
STDOUT = sys.stdout
def write(self, text):
self.STDOUT.write(escape_gdbmi_stream("~", text))
self.STDOUT.flush()
def is_gdbmi_mode():
return "--interpreter" in " ".join(PlatformioCLI.leftover_args)
def escape_gdbmi_stream(prefix, stream):
bytes_stream = False
if is_bytes(stream):
bytes_stream = True
stream = stream.decode()
if not stream:
return b"" if bytes_stream else ""
ends_nl = stream.endswith("\n")
stream = re.sub(r"\\+", "\\\\\\\\", stream)
stream = stream.replace('"', '\\"')
stream = stream.replace("\n", "\\n")
stream = '%s"%s"' % (prefix, stream)
if ends_nl:
stream += "\n"
return stream.encode() if bytes_stream else stream
def get_default_debug_env(config):
default_envs = config.default_envs()
all_envs = config.envs()
for env in default_envs:
if config.get("env:" + env, "build_type") == "debug":
return env
for env in all_envs:
if config.get("env:" + env, "build_type") == "debug":
return env
return default_envs[0] if default_envs else all_envs[0]
def predebug_project(ctx, project_dir, env_name, preload, verbose):
ctx.invoke(
cmd_run,
project_dir=project_dir,
environment=[env_name],
target=["debug"] + (["upload"] if preload else []),
verbose=verbose,
)
if preload:
time.sleep(5)
def validate_debug_options(cmd_ctx, env_options):
def _cleanup_cmds(items):
items = ProjectConfig.parse_multi_values(items)
return ["$LOAD_CMDS" if item == "$LOAD_CMD" else item for item in items]
try:
platform = PlatformFactory.new(env_options["platform"])
except UnknownPlatform:
cmd_ctx.invoke(
cmd_platform_install,
platforms=[env_options["platform"]],
skip_default_package=True,
)
platform = PlatformFactory.new(env_options["platform"])
board_config = platform.board_config(env_options["board"])
tool_name = board_config.get_debug_tool_name(env_options.get("debug_tool"))
tool_settings = board_config.get("debug", {}).get("tools", {}).get(tool_name, {})
server_options = None
# specific server per a system
if isinstance(tool_settings.get("server", {}), list):
for item in tool_settings["server"][:]:
tool_settings["server"] = item
if util.get_systype() in item.get("system", []):
break
# user overwrites debug server
if env_options.get("debug_server"):
server_options = {
"cwd": None,
"executable": None,
"arguments": env_options.get("debug_server"),
}
server_options["executable"] = server_options["arguments"][0]
server_options["arguments"] = server_options["arguments"][1:]
elif "server" in tool_settings:
server_options = tool_settings["server"]
server_package = server_options.get("package")
server_package_dir = (
platform.get_package_dir(server_package) if server_package else None
)
if server_package and not server_package_dir:
platform.install_packages(
with_packages=[server_package], skip_default_package=True, silent=True
)
server_package_dir = platform.get_package_dir(server_package)
server_options.update(
dict(
cwd=server_package_dir if server_package else None,
executable=server_options.get("executable"),
arguments=[
a.replace("$PACKAGE_DIR", server_package_dir)
if server_package_dir
else a
for a in server_options.get("arguments", [])
],
)
)
extra_cmds = _cleanup_cmds(env_options.get("debug_extra_cmds"))
extra_cmds.extend(_cleanup_cmds(tool_settings.get("extra_cmds")))
result = dict(
tool=tool_name,
upload_protocol=env_options.get(
"upload_protocol", board_config.get("upload", {}).get("protocol")
),
load_cmds=_cleanup_cmds(
env_options.get(
"debug_load_cmds",
tool_settings.get(
"load_cmds",
tool_settings.get(
"load_cmd", ProjectOptions["env.debug_load_cmds"].default
),
),
)
),
load_mode=env_options.get(
"debug_load_mode",
tool_settings.get(
"load_mode", ProjectOptions["env.debug_load_mode"].default
),
),
init_break=env_options.get(
"debug_init_break",
tool_settings.get(
"init_break", ProjectOptions["env.debug_init_break"].default
),
),
init_cmds=_cleanup_cmds(
env_options.get("debug_init_cmds", tool_settings.get("init_cmds"))
),
extra_cmds=extra_cmds,
require_debug_port=tool_settings.get("require_debug_port", False),
port=reveal_debug_port(
env_options.get("debug_port", tool_settings.get("port")),
tool_name,
tool_settings,
),
server=server_options,
)
return result
def configure_esp32_load_cmds(debug_options, configuration):
ignore_conds = [
debug_options["load_cmds"] != ["load"],
"xtensa-esp32" not in configuration.get("cc_path", ""),
not configuration.get("flash_extra_images"),
not all(
[isfile(item["path"]) for item in configuration.get("flash_extra_images")]
),
]
if any(ignore_conds):
return debug_options["load_cmds"]
mon_cmds = [
'monitor program_esp32 "{{{path}}}" {offset} verify'.format(
path=fs.to_unix_path(item["path"]), offset=item["offset"]
)
for item in configuration.get("flash_extra_images")
]
mon_cmds.append(
'monitor program_esp32 "{%s.bin}" 0x10000 verify'
% fs.to_unix_path(configuration["prog_path"][:-4])
)
return mon_cmds
def has_debug_symbols(prog_path):
if not isfile(prog_path):
return False
matched = {
b".debug_info": False,
b".debug_abbrev": False,
b" -Og": False,
b" -g": False,
b"__PLATFORMIO_BUILD_DEBUG__": False,
}
with open(prog_path, "rb") as fp:
last_data = b""
while True:
data = fp.read(1024)
if not data:
break
for pattern, found in matched.items():
if found:
continue
if pattern in last_data + data:
matched[pattern] = True
last_data = data
return all(matched.values())
def is_prog_obsolete(prog_path):
prog_hash_path = prog_path + ".sha1"
if not isfile(prog_path):
return True
shasum = sha1()
with open(prog_path, "rb") as fp:
while True:
data = fp.read(1024)
if not data:
break
shasum.update(data)
new_digest = shasum.hexdigest()
old_digest = None
if isfile(prog_hash_path):
with open(prog_hash_path) as fp:
old_digest = fp.read()
if new_digest == old_digest:
return False
with open(prog_hash_path, "w") as fp:
fp.write(new_digest)
return True
def reveal_debug_port(env_debug_port, tool_name, tool_settings):
def _get_pattern():
if not env_debug_port:
return None
if set(["*", "?", "[", "]"]) & set(env_debug_port):
return env_debug_port
return None
def _is_match_pattern(port):
pattern = _get_pattern()
if not pattern:
return True
return fnmatch(port, pattern)
def _look_for_serial_port(hwids):
for item in util.get_serialports(filter_hwid=True):
if not _is_match_pattern(item["port"]):
continue
port = item["port"]
if tool_name.startswith("blackmagic"):
if (
"windows" in util.get_systype()
and port.startswith("COM")
and len(port) > 4
):
port = "\\\\.\\%s" % port
if "GDB" in item["description"]:
return port
for hwid in hwids:
hwid_str = ("%s:%s" % (hwid[0], hwid[1])).replace("0x", "")
if hwid_str in item["hwid"]:
return port
return None
if env_debug_port and not _get_pattern():
return env_debug_port
if not tool_settings.get("require_debug_port"):
return None
debug_port = _look_for_serial_port(tool_settings.get("hwids", []))
if not debug_port:
raise DebugInvalidOptionsError("Please specify `debug_port` for environment")
return debug_port
| [
"[email protected]"
]
| |
3e5f56f3436373330a569dad1bb1f6b35fe1cfe8 | 3d613577d8e5a0b8f128666047043ac672f975af | /market/admin.py | 89dcecee2190326fd074e1a813638735e613a34f | []
| no_license | danimaribeiro/bitcoin_market | 8431773795114706bf482d3b7961ef7e527ead5e | 6d9256059ed7f35a8a412cb78d3a71a7498d90f9 | refs/heads/master | 2016-09-05T12:13:17.741558 | 2014-02-18T13:03:29 | 2014-02-18T13:03:29 | 16,620,172 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,992 | py | from django.contrib import admin
from django.contrib.admin import DateFieldListFilter
from django.contrib.auth.models import User
# Register your models here.
from market.models import Order, Trade, Market, MarketConfiguration, Settings
class TradeAdmin(admin.ModelAdmin):
fields = ['tid', 'date', 'amount', 'price', 'type', 'coin']
list_display = ['tid', 'date', 'amount', 'price', 'type', 'coin']
list_filter = ['type', ('date', DateFieldListFilter), 'coin']
search_fields = ['date']
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class MarketConfigurationAdmin(admin.ModelAdmin):
fields = ['market', 'access_key', 'access_sign']
list_display = ['market', 'access_key', 'access_sign', 'belongs_to']
def save_model(self, request, obj, form, change):
instance = form.save(commit=False)
instance.belongs_to = request.user
instance.save()
form.save_m2m()
return instance
class OrderAdmin(admin.ModelAdmin):
fields = [ 'price', 'amount', 'type','market', 'status', 'sincronized']
readonly_fields = ['status', 'sincronized']
list_display = ['tid','price', 'amount', 'type','market', 'status', 'sincronized', 'belongs_to']
def get_readonly_fields(self, request, obj=None):
if obj: # editing an existing object
return self.readonly_fields + ['market','price','amount', 'type']
return self.readonly_fields
def save_model(self, request, obj, form, change):
instance = form.save(commit=False)
instance.belongs_to = request.user
instance.save()
form.save_m2m()
return instance
admin.site.register(Order, OrderAdmin)
admin.site.register(Market)
admin.site.register(MarketConfiguration, MarketConfigurationAdmin)
admin.site.register(Settings)
admin.site.register(Trade, TradeAdmin)
| [
"[email protected]"
]
| |
fb95b5674453874d8218e5070d12976ce7cde15a | c369443df5ff98eccc0eee7f63bb8947f2943605 | /shop/admin.py | 950e8733d62ba6d0df356ee67145a865be1b988e | []
| no_license | erllan/shop-test | d2934f484b25d141a60caa5aca31a61eec48f055 | 1f77de177192ce6a1f8c5ccf1d7ca93ec026acf5 | refs/heads/master | 2023-03-06T01:04:38.785383 | 2021-02-27T18:02:07 | 2021-02-27T18:02:07 | 341,929,117 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 141 | py | from django.contrib import admin
from .models import *
admin.site.register(User)
admin.site.register(Product)
admin.site.register(Category)
| [
"[email protected]"
]
| |
1e082c416f419960cc8822abb7b30e306623c4e7 | 0e7aed5eef2e1d132a7e75dd8f439ae76c87639c | /python/523_Continuous_Subarray_Sum.py | 1c395d7edb3f718bfdf9111d3fd63749240be388 | [
"MIT"
]
| permissive | liaison/LeetCode | 2a93df3b3ca46b34f922acdbc612a3bba2d34307 | bf03743a3676ca9a8c107f92cf3858b6887d0308 | refs/heads/master | 2022-09-05T15:04:19.661298 | 2022-08-19T19:29:19 | 2022-08-19T19:29:19 | 52,914,957 | 17 | 4 | null | null | null | null | UTF-8 | Python | false | false | 811 | py | class Solution:
def checkSubarraySum(self, nums: List[int], k: int) -> bool:
# the earliest index with the same module remaider of k
prefix_sum_indices = {}
# a virtual prefix sum index.
# for the test case of [0, 0] k = 0
prefix_sum = 0
prefix_sum_indices[0] = -1
for index, num in enumerate(nums):
prefix_sum += num
# group the prefix sums with modulo
if k != 0:
prefix_sum %= k # normalize the sum
if prefix_sum in prefix_sum_indices:
if index - prefix_sum_indices[prefix_sum] > 1:
return True
else:
prefix_sum_indices[prefix_sum] = index
return False
| [
"[email protected]"
]
| |
44baddc4298db9a76065f76381120299048faae9 | ccbfc7818c0b75929a1dfae41dc061d5e0b78519 | /aliyun-openapi-python-sdk-master/aliyun-python-sdk-baas/aliyunsdkbaas/request/v20180731/DescribeBlockchainInfoRequest.py | 916c4becfd7dd1214ee5a70eab17421fd385f744 | [
"Apache-2.0"
]
| permissive | P79N6A/dysms_python | 44b634ffb2856b81d5f79f65889bfd5232a9b546 | f44877b35817e103eed469a637813efffa1be3e4 | refs/heads/master | 2020-04-28T15:25:00.368913 | 2019-03-13T07:52:34 | 2019-03-13T07:52:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,119 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DescribeBlockchainInfoRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Baas', '2018-07-31', 'DescribeBlockchainInfo')
def get_Bizid(self):
return self.get_body_params().get('Bizid')
def set_Bizid(self,Bizid):
self.add_body_params('Bizid', Bizid) | [
"[email protected]"
]
| |
7bd4c978ab4d3fea367ef7e57e7109b7f73253c8 | 5cb9dccbcccb8a2137368dd0615fe3e3c7761707 | /simulations/kinova/build/moveit_ros_control_interface/catkin_generated/pkg.installspace.context.pc.py | 40c68e29ded735aa8a303a0fd6910cc281b2e6ca | []
| no_license | Simon-Steinmann/sim2real-modular-RL-project | b2467a393014e106043f6128a026f5eac934a83d | 4027590ac94de2d5c914731c09efcf2f318b9ca3 | refs/heads/master | 2020-07-29T01:30:56.450919 | 2019-10-12T09:33:00 | 2019-10-12T09:33:00 | 209,605,548 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,301 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/acis/sim2real/simulations/kinova/install/include;/usr/include".split(';') if "/home/acis/sim2real/simulations/kinova/install/include;/usr/include" != "" else []
PROJECT_CATKIN_DEPENDS = "moveit_core;controller_manager_msgs;trajectory_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lmoveit_ros_control_interface_plugin;-lmoveit_ros_control_interface_trajectory_plugin;/usr/lib/x86_64-linux-gnu/libboost_system.so;/usr/lib/x86_64-linux-gnu/libboost_thread.so;/usr/lib/x86_64-linux-gnu/libboost_chrono.so;/usr/lib/x86_64-linux-gnu/libboost_date_time.so;/usr/lib/x86_64-linux-gnu/libboost_atomic.so;/usr/lib/x86_64-linux-gnu/libpthread.so".split(';') if "-lmoveit_ros_control_interface_plugin;-lmoveit_ros_control_interface_trajectory_plugin;/usr/lib/x86_64-linux-gnu/libboost_system.so;/usr/lib/x86_64-linux-gnu/libboost_thread.so;/usr/lib/x86_64-linux-gnu/libboost_chrono.so;/usr/lib/x86_64-linux-gnu/libboost_date_time.so;/usr/lib/x86_64-linux-gnu/libboost_atomic.so;/usr/lib/x86_64-linux-gnu/libpthread.so" != "" else []
PROJECT_NAME = "moveit_ros_control_interface"
PROJECT_SPACE_DIR = "/home/acis/sim2real/simulations/kinova/install"
PROJECT_VERSION = "1.0.1"
| [
"[email protected]"
]
| |
e3405de232c07e7515801124b11d7fee65e18b3f | 76af281542524f4bb2c80f928b6b5f0786ca1557 | /code kata/hunterbcs.py | c05f27f4aa57fea8b70cbd59253c6abba2faf7c4 | []
| no_license | sharmishah/guvi | 1b910042f101265c7628724f994eca1c23c0f59d | 51d854e38c2013799b24af43501bc8190b150897 | refs/heads/master | 2021-07-02T04:20:03.629762 | 2021-06-28T03:08:10 | 2021-06-28T03:08:10 | 191,355,062 | 0 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,087 | py | class Node:
def __init__(self,d):
self.ddata=d
self.left=None
self.right=None
def insert(root,ins):
if ins.ddata>root.ddata and root.right!=None:
insert(root.right,ins)
elif ins.ddata<=root.ddata and root.left!=None:
insert(root.left,ins)
if ins.ddata>root.ddata and root.right==None:
root.right=ins
return
elif ins.ddata<=root.ddata and root.left==None:
root.left=ins
return
def inorder(root):
if root is None:
return
else:
inorder(root.left)
print(root.ddata,end=" ")
inorder(root.right)
def lcafinder(root,nn1,nn2):
if root is None:
return None
if root.ddata>nn1 and root.ddata>nn2:
return lcafinder(root.left,nn1,nn2)
if root.ddata<nn1 and root.ddata<nn2:
return lcafinder(root.right,nn1,nn2)
return root.ddata
nn=int(input())
az=list(map(int,input().split()))
l,rr1=map(int,input().split())
rr=Node(aa[0])
for i in range(1,nn):
nnn=Node(aa[i])
insert(rr,nnn)
#inorder(rr)
print(lcafinder(rr,l,rr1))
| [
"[email protected]"
]
| |
0ee27ac5c2bab74b50ad2464577cd9e7a785b147 | 2eef8688d9f928de1c9fa4de4a045fa0dae97eaa | /authentication/urls.py | a1547debf84ea68191354dcc0eae804b158ddae7 | []
| no_license | teefats/GlotiExpenses | 3a98096642ef6df9008489c5db8c822703e43ab7 | 12aa1c9a37c5cf0148a7f55b114402a3044f4e60 | refs/heads/master | 2023-06-22T13:42:37.362537 | 2021-07-26T19:06:31 | 2021-07-26T19:06:31 | 389,741,004 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py | from .views import RegistrationView,LoginView
from django.urls import path
urlpatterns = [
path('register', RegistrationView.as_view(), name='register'),
path('login', LoginView.as_view(), name='login')
] | [
"[email protected]"
]
| |
1aa13187b581668799052bae234fe73c77b9b0d3 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /JEt4kwPtY6CGPsT9t_6.py | 184ee79d9dfae190511e21dc5f73d8aed48cb2d4 | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 954 | py | """
Create a function that takes a mathematical expression as a string, list of
numbers on which the mathematical expression is to be calculated and return
the result as a list of string.
### Examples
mathematical("f(y)=y+1",[1,2]) ➞ ["f(1)=2","f(2)=3"]
mathematical("f(y)=y^2",[1,2,3]) ➞ ["f(1)=1","f(2)=4","f(3)=9"]
mathematical("f(y)=yx3",[1,2,3]) ➞ ["f(1)=3","f(2)=6","f(3)=9"]
### Notes
* List of numbers are positive integers.
* In the algebraic expression x = `*`
"""
def mathematical(exp, numbers):
answers = []
for num in numbers:
expression = exp.replace('y', str(num))
equation, call = expression.split('=')[1], expression.split('=')[0]
if 'x' in equation:
equation = equation.replace('x', '*')
if '^' in equation:
equation = equation.replace('^', '**')
answers.append('{0}={1:.0f}'.format(call, eval(equation)))
else:
return answers
| [
"[email protected]"
]
| |
b420a3ada2d2d1084035ded8fedf94fab11c7246 | 6f05f7d5a67b6bb87956a22b988067ec772ba966 | /data/test/python/c6ac49828b465d81fdafb56e8a05c0177a7ec6c2HelloZigguratApiView.py | c6ac49828b465d81fdafb56e8a05c0177a7ec6c2 | [
"MIT"
]
| permissive | harshp8l/deep-learning-lang-detection | 93b6d24a38081597c610ecf9b1f3b92c7d669be5 | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | refs/heads/master | 2020-04-07T18:07:00.697994 | 2018-11-29T23:21:23 | 2018-11-29T23:21:23 | 158,597,498 | 0 | 0 | MIT | 2018-11-21T19:36:42 | 2018-11-21T19:36:41 | null | UTF-8 | Python | false | false | 786 | py | # HelloZigguratApiView.py
# (C)2013
# Scott Ernst
from ziggurat.view.api.ApiRouterView import ApiRouterView
#___________________________________________________________________________________________________ HelloZigguratApiView
class HelloZigguratApiView(ApiRouterView):
"""A class for..."""
#===================================================================================================
# C L A S S
#___________________________________________________________________________________________________ __init__
def __init__(self, request, **kwargs):
"""Creates a new instance of HelloZigguratApiView."""
super(HelloZigguratApiView, self).__init__(request, **kwargs)
| [
"[email protected]"
]
| |
4756f1ab9f395d38c2dc002023bc87b08d00c0ce | fffda6e06cb979e83db15e9142db7c9994400e2f | /language/bert_extraction/steal_bert_qa/utils/evaluate_squad.py | ea9eef5d7871c98ee1bf95b0138c2334ed1dfee8 | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
]
| permissive | ZachT1711/language | b48e551555a41bf269cc20f22871a9e4c47aacc9 | de84080fc8a239a7271aad1d447fcb38a895790b | refs/heads/master | 2023-01-05T21:36:59.194404 | 2020-04-14T17:19:25 | 2020-04-14T17:30:20 | 250,185,870 | 1 | 0 | Apache-2.0 | 2022-12-23T20:28:38 | 2020-03-26T07:09:01 | Python | UTF-8 | Python | false | false | 6,410 | py | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Official evaluation script for version 1.1 of the SQuAD dataset."""
from __future__ import print_function
import collections as cll
import json
import re
import string
import tensorflow.compat.v1 as tf
app = tf.compat.v1.app
flags = tf.flags
gfile = tf.gfile
logging = tf.logging
flags.DEFINE_string('dataset_file', None, 'Dataset file')
flags.DEFINE_string('dataset_file2', None, 'Dataset file #2')
flags.DEFINE_string('prediction_file', None, 'Prediction file')
flags.DEFINE_string('prediction_file2', None, 'Prediction file #2')
FLAGS = flags.FLAGS
def normalize_answer(s):
"""Lower text and remove punctuation, articles and extra whitespace."""
def remove_articles(text):
return re.sub(r'\b(a|an|the)\b', ' ', text)
def white_space_fix(text):
return ' '.join(text.split())
def remove_punc(text):
exclude = set(string.punctuation)
return ''.join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(s))))
def f1_score(prediction, ground_truth):
"""Calculate word level F1 score."""
prediction_tokens = normalize_answer(prediction).split()
ground_truth_tokens = normalize_answer(ground_truth).split()
if not prediction_tokens and not ground_truth_tokens:
return 1.0
common = cll.Counter(prediction_tokens) & cll.Counter(ground_truth_tokens)
num_same = sum(common.values())
if num_same == 0:
return 0
precision = 1.0 * num_same / len(prediction_tokens)
recall = 1.0 * num_same / len(ground_truth_tokens)
f1 = (2 * precision * recall) / (precision + recall)
return f1
def f1_score_multiple(predictions):
"""Calculate word level F1 score across multiple predictions."""
all_f1 = []
for i, pred1 in enumerate(predictions[:-1]):
for pred2 in predictions[i + 1:]:
all_f1.append(f1_score(pred1, pred2))
return all_f1
def exact_match_score(prediction, ground_truth):
return normalize_answer(prediction) == normalize_answer(ground_truth)
def metric_max_over_ground_truths(metric_fn, prediction, ground_truths):
scores_for_ground_truths = []
for ground_truth in ground_truths:
score = metric_fn(prediction, ground_truth)
scores_for_ground_truths.append(score)
return max(scores_for_ground_truths)
def evaluate_preds_preds(preds1, preds2):
"""Evaluate word level metrics."""
f1 = exact_match = total = any_match = 0
for qa_id, pred1_str in preds1.items():
total += 1
ground_truths = [pred1_str]
prediction = preds2[qa_id]
exact_match += metric_max_over_ground_truths(exact_match_score, prediction,
ground_truths)
f1_current = metric_max_over_ground_truths(f1_score, prediction,
ground_truths)
if f1_current > 0:
any_match += 1
f1 += f1_current
exact_match = 100.0 * exact_match / total
f1 = 100.0 * f1 / total
any_match = 100.0 * any_match / total
return {'exact_match': exact_match, 'f1': f1, 'any_match': any_match}
def evaluate_dataset_preds(dataset, predictions):
"""Evaluate word level metrics."""
f1 = exact_match = total = 0
for article in dataset:
for paragraph in article['paragraphs']:
for qa in paragraph['qas']:
total += 1
if qa['id'] not in predictions:
message = 'Unanswered question ' + qa['id'] + ' will receive score 0.'
print(message)
continue
ground_truths = [x['text'] for x in qa['answers']]
prediction = predictions[qa['id']]
curr_exact_match = metric_max_over_ground_truths(
exact_match_score, prediction, ground_truths)
exact_match += curr_exact_match
f1 += metric_max_over_ground_truths(f1_score, prediction, ground_truths)
exact_match = 100.0 * exact_match / total
f1 = 100.0 * f1 / total
return {'exact_match': exact_match, 'f1': f1}
def evaluate_dataset_dataset(dataset, dataset2):
"""Evaluate word level metrics."""
f1 = exact_match = total = 0
for article, article2 in zip(dataset, dataset2):
for para, para2 in zip(article['paragraphs'], article2['paragraphs']):
assert para['context'].strip() == para2['context'].strip()
assert len(para['qas']) == len(para2['qas'])
for qa, qa2 in zip(para['qas'], para2['qas']):
total += 1
ground_truths = [x['text'] for x in qa['answers']]
prediction = qa2['answers'][0]['text']
exact_match += metric_max_over_ground_truths(exact_match_score,
prediction, ground_truths)
f1 += metric_max_over_ground_truths(f1_score, prediction, ground_truths)
exact_match = 100.0 * exact_match / total
f1 = 100.0 * f1 / total
return {'exact_match': exact_match, 'f1': f1}
def main(_):
def load_dataset_file(dataset_file):
with gfile.Open(dataset_file) as df:
dataset_json = json.load(df)
data = dataset_json['data']
return data
def load_preds_file(prediction_file):
with gfile.Open(prediction_file) as pf:
preds = json.load(pf)
return preds
if FLAGS.dataset_file and FLAGS.dataset_file2:
dataset1 = load_dataset_file(FLAGS.dataset_file)
dataset2 = load_dataset_file(FLAGS.dataset_file2)
print(json.dumps(evaluate_dataset_dataset(dataset1, dataset2)))
elif FLAGS.prediction_file and FLAGS.prediction_file2:
preds1 = load_preds_file(FLAGS.prediction_file)
preds2 = load_preds_file(FLAGS.prediction_file2)
print(json.dumps(evaluate_preds_preds(preds1, preds2)))
else:
dataset = load_dataset_file(FLAGS.dataset_file)
preds = load_preds_file(FLAGS.prediction_file)
print(json.dumps(evaluate_dataset_preds(dataset, preds)))
if __name__ == '__main__':
app.run(main)
| [
"[email protected]"
]
| |
d4cd42d3ae31fd16daad07315d0e65f6e0c9b818 | bf73ff4441577074dee2225ac937fbbbf4e85fef | /pplbench/ppls/jags/inference.py | a09062be1785a08b5530f7b00494eb586446b37a | [
"MIT"
]
| permissive | rambam613/pplbench | 632878a359945fe64cf24489aa7669040727c672 | d69c652fc882ba50f56eb0cfaa3097d3ede295f9 | refs/heads/master | 2023-07-07T02:16:19.384357 | 2021-08-13T08:01:55 | 2021-08-13T08:03:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,413 | py | # Copyright(C) Facebook, Inc. and its affiliates. All Rights Reserved.
from typing import Dict, Type, cast
import numpy as np
import pyjags
import xarray as xr
from ..base_ppl_impl import BasePPLImplementation
from ..base_ppl_inference import BasePPLInference
from .base_jags_impl import BaseJagsImplementation
class MCMC(BasePPLInference):
def __init__(
self, impl_class: Type[BasePPLImplementation], model_attrs: Dict
) -> None:
# We always expect a BaseJagsImplementation here
self.impl_class = cast(Type[BaseJagsImplementation], impl_class)
self.impl = self.impl_class(**model_attrs)
def compile(self, seed: int, **compile_args):
# JAGS doesn't have a separate compile step.
# The model construction requires the actual data,
# so everything has to be done under inference.
pass
def infer( # type: ignore
self,
data: xr.Dataset,
iterations: int,
num_warmup: int,
seed: int,
RNG_name: str = "base::Mersenne-Twister",
) -> xr.Dataset:
"""
See https://phoenixnap.dl.sourceforge.net/project/mcmc-jags/Manuals/4.x/jags_user_manual.pdf
for JAGS documentation.
:param data: PPLBench dataset
:param iterations: number of samples to create
:param seed: seed for random number generator
:param adapt: the number of adaptive steps
:param RNG_name: the name of the random number generator
:returns: samples dataset
"""
model = pyjags.Model(
code=self.impl.get_code(),
data=self.impl.format_data_to_jags(data),
chains=1,
adapt=num_warmup,
init={".RNG.seed": seed, ".RNG.name": RNG_name},
)
samples = model.sample(iterations - num_warmup, vars=self.impl.get_vars())
# squeeze out the chain dimension from the samples
for varname in samples.keys():
samples[varname] = samples[varname].squeeze(-1)
samples = self.impl.extract_data_from_jags(samples)
# because jags does not return warm up samples, we need to shift the coordinates
# of the actual samples by num_warmup by padding with NaN
samples = samples.assign_coords(draw=samples.draw + num_warmup)
padding = xr.Dataset(coords={"draw": np.arange(num_warmup)})
return padding.merge(samples)
| [
"[email protected]"
]
| |
1126769601d5e7319ee39abc278621ac96e499fd | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R1/benchmark/startCirq357.py | 5a0f49a2276e3598ae1db658dd2b3b7f1d4f5362 | [
"BSD-3-Clause"
]
| permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,236 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=3
# total number=64
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.rx(-0.09738937226128368).on(input_qubit[2])) # number=2
c.append(cirq.H.on(input_qubit[1])) # number=33
c.append(cirq.Y.on(input_qubit[2])) # number=56
c.append(cirq.CZ.on(input_qubit[2],input_qubit[1])) # number=34
c.append(cirq.H.on(input_qubit[1])) # number=35
c.append(cirq.H.on(input_qubit[1])) # number=3
c.append(cirq.H.on(input_qubit[0])) # number=45
c.append(cirq.CNOT.on(input_qubit[2],input_qubit[1])) # number=60
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=46
c.append(cirq.H.on(input_qubit[0])) # number=47
c.append(cirq.Y.on(input_qubit[1])) # number=15
c.append(cirq.H.on(input_qubit[0])) # number=61
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=62
c.append(cirq.H.on(input_qubit[0])) # number=63
c.append(cirq.H.on(input_qubit[1])) # number=19
c.append(cirq.CZ.on(input_qubit[0],input_qubit[1])) # number=20
c.append(cirq.rx(-0.6000441968356504).on(input_qubit[1])) # number=28
c.append(cirq.H.on(input_qubit[1])) # number=21
c.append(cirq.H.on(input_qubit[1])) # number=30
c.append(cirq.CZ.on(input_qubit[0],input_qubit[1])) # number=31
c.append(cirq.H.on(input_qubit[1])) # number=32
c.append(cirq.H.on(input_qubit[1])) # number=57
c.append(cirq.CZ.on(input_qubit[0],input_qubit[1])) # number=58
c.append(cirq.H.on(input_qubit[1])) # number=59
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=51
c.append(cirq.X.on(input_qubit[1])) # number=52
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=53
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=50
c.append(cirq.H.on(input_qubit[2])) # number=29
c.append(cirq.H.on(input_qubit[1])) # number=36
c.append(cirq.CZ.on(input_qubit[0],input_qubit[1])) # number=37
c.append(cirq.Y.on(input_qubit[2])) # number=44
c.append(cirq.H.on(input_qubit[1])) # number=38
c.append(cirq.Z.on(input_qubit[1])) # number=55
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=18
c.append(cirq.Z.on(input_qubit[1])) # number=11
c.append(cirq.rx(-1.1780972450961724).on(input_qubit[2])) # number=54
c.append(cirq.H.on(input_qubit[1])) # number=42
c.append(cirq.H.on(input_qubit[0])) # number=39
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=40
c.append(cirq.H.on(input_qubit[0])) # number=41
c.append(cirq.CNOT.on(input_qubit[2],input_qubit[1])) # number=26
c.append(cirq.Y.on(input_qubit[1])) # number=14
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=5
c.append(cirq.X.on(input_qubit[1])) # number=6
c.append(cirq.Z.on(input_qubit[1])) # number=8
c.append(cirq.X.on(input_qubit[1])) # number=7
c.append(cirq.H.on(input_qubit[2])) # number=43
c.append(cirq.rx(-2.42845112122491).on(input_qubit[1])) # number=25
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq357.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | [
"[email protected]"
]
| |
aeddc1d7fbf8ea26f225c60088dd41b3447c6fbe | e00186e71a1f52b394315a0cbc27162254cfffb9 | /durga/full_durga/without_restm2/without_restm2/asgi.py | 40491539a6ce4db3dc21ec67f0020d7ead8ce036 | []
| no_license | anilkumar0470/git_practice | cf132eb7970c40d0d032520d43e6d4a1aca90742 | 588e7f654f158e974f9893e5018d3367a0d88eeb | refs/heads/master | 2023-04-27T04:50:14.688534 | 2023-04-22T05:54:21 | 2023-04-22T05:54:21 | 100,364,712 | 0 | 1 | null | 2021-12-08T19:44:58 | 2017-08-15T10:02:33 | Python | UTF-8 | Python | false | false | 405 | py | """
ASGI config for without_restm2 project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'without_restm2.settings')
application = get_asgi_application()
| [
"[email protected]"
]
| |
40d5a9ec148e9f1f27701264107fbae72bb213c7 | 025b1a25eedf5b03d091f683b0bd07c20eac953d | /telethon_generator/tl_generator.py | f0a14fbc1571f1f511c55a284b6179c304b792c3 | [
"MIT"
]
| permissive | phuonglm/Telethon | 64a97259afbf3a9cb09683094f6d8f0b3f1145c1 | 1f1e040af972e6948538d47fab5563303c2243eb | refs/heads/master | 2021-05-08T01:43:42.736010 | 2017-10-22T11:57:02 | 2017-10-22T11:57:02 | 107,896,646 | 0 | 0 | null | 2017-10-22T19:36:14 | 2017-10-22T19:36:14 | null | UTF-8 | Python | false | false | 27,624 | py | import os
import re
import shutil
import struct
from zlib import crc32
from collections import defaultdict
from .parser import SourceBuilder, TLParser, TLObject
AUTO_GEN_NOTICE = \
'"""File generated by TLObjects\' generator. All changes will be ERASED"""'
class TLGenerator:
def __init__(self, output_dir):
self.output_dir = output_dir
def _get_file(self, *paths):
return os.path.join(self.output_dir, *paths)
def _rm_if_exists(self, filename):
file = self._get_file(filename)
if os.path.exists(file):
if os.path.isdir(file):
shutil.rmtree(file)
else:
os.remove(file)
def tlobjects_exist(self):
"""Determines whether the TLObjects were previously
generated (hence exist) or not
"""
return os.path.isfile(self._get_file('all_tlobjects.py'))
def clean_tlobjects(self):
"""Cleans the automatically generated TLObjects from disk"""
for name in ('functions', 'types', 'all_tlobjects.py'):
self._rm_if_exists(name)
def generate_tlobjects(self, scheme_file, import_depth):
"""Generates all the TLObjects from scheme.tl to
tl/functions and tl/types
"""
# First ensure that the required parent directories exist
os.makedirs(self._get_file('functions'), exist_ok=True)
os.makedirs(self._get_file('types'), exist_ok=True)
# Step 0: Cache the parsed file on a tuple
tlobjects = tuple(TLParser.parse_file(scheme_file, ignore_core=True))
# Step 1: Group everything by {namespace: [tlobjects]} so we can
# easily generate __init__.py files with all the TLObjects on them.
namespace_functions = defaultdict(list)
namespace_types = defaultdict(list)
# Make use of this iteration to also store 'Type: [Constructors]',
# used when generating the documentation for the classes.
type_constructors = defaultdict(list)
for tlobject in tlobjects:
if tlobject.is_function:
namespace_functions[tlobject.namespace].append(tlobject)
else:
namespace_types[tlobject.namespace].append(tlobject)
type_constructors[tlobject.result].append(tlobject)
# Step 2: Generate the actual code
self._write_init_py(
self._get_file('functions'), import_depth,
namespace_functions, type_constructors
)
self._write_init_py(
self._get_file('types'), import_depth,
namespace_types, type_constructors
)
# Step 4: Once all the objects have been generated,
# we can now group them in a single file
filename = os.path.join(self._get_file('all_tlobjects.py'))
with open(filename, 'w', encoding='utf-8') as file:
with SourceBuilder(file) as builder:
builder.writeln(AUTO_GEN_NOTICE)
builder.writeln()
builder.writeln('from . import types, functions')
builder.writeln()
# Create a constant variable to indicate which layer this is
builder.writeln('LAYER = {}'.format(
TLParser.find_layer(scheme_file))
)
builder.writeln()
# Then create the dictionary containing constructor_id: class
builder.writeln('tlobjects = {')
builder.current_indent += 1
# Fill the dictionary (0x1a2b3c4f: tl.full.type.path.Class)
for tlobject in tlobjects:
constructor = hex(tlobject.id)
if len(constructor) != 10:
# Make it a nice length 10 so it fits well
constructor = '0x' + constructor[2:].zfill(8)
builder.write('{}: '.format(constructor))
builder.write(
'functions' if tlobject.is_function else 'types')
if tlobject.namespace:
builder.write('.' + tlobject.namespace)
builder.writeln('.{},'.format(tlobject.class_name()))
builder.current_indent -= 1
builder.writeln('}')
@staticmethod
def _write_init_py(out_dir, depth, namespace_tlobjects, type_constructors):
# namespace_tlobjects: {'namespace', [TLObject]}
os.makedirs(out_dir, exist_ok=True)
for ns, tlobjects in namespace_tlobjects.items():
file = os.path.join(out_dir, ns + '.py' if ns else '__init__.py')
with open(file, 'w', encoding='utf-8') as f, \
SourceBuilder(f) as builder:
builder.writeln(AUTO_GEN_NOTICE)
# Both types and functions inherit from the TLObject class
# so they all can be serialized and sent, however, only the
# functions are "content_related".
builder.writeln(
'from {}.tl.tlobject import TLObject'.format('.' * depth)
)
if ns:
# Only import the parent types if we're not in such file
builder.writeln(
'from {}.tl import types'.format('.' * depth)
)
# Add the relative imports to the namespaces,
# unless we already are in a namespace.
if not ns:
builder.writeln('from . import {}'.format(', '.join(
x for x in namespace_tlobjects.keys() if x
)))
# Import 'get_input_*' utils
# TODO Support them on types too
if 'functions' in out_dir:
builder.writeln(
'from {}.utils import get_input_peer, '
'get_input_channel, get_input_user, '
'get_input_media, get_input_photo'.format('.' * depth)
)
# Import 'os' for those needing access to 'os.urandom()'
# Currently only 'random_id' needs 'os' to be imported,
# for all those TLObjects with arg.can_be_inferred.
builder.writeln('import os')
# Import struct for the .__bytes__(self) serialization
builder.writeln('import struct')
# Generate the class for every TLObject
for t in sorted(tlobjects, key=lambda x: x.name):
TLGenerator._write_source_code(
t, builder, depth, type_constructors
)
builder.current_indent = 0
@staticmethod
def _write_source_code(tlobject, builder, depth, type_constructors):
"""Writes the source code corresponding to the given TLObject
by making use of the 'builder' SourceBuilder.
Additional information such as file path depth and
the Type: [Constructors] must be given for proper
importing and documentation strings.
"""
builder.writeln()
builder.writeln()
builder.writeln('class {}(TLObject):'.format(tlobject.class_name()))
# Class-level variable to store its Telegram's constructor ID
builder.writeln('CONSTRUCTOR_ID = {}'.format(hex(tlobject.id)))
builder.writeln('SUBCLASS_OF_ID = {}'.format(
hex(crc32(tlobject.result.encode('ascii'))))
)
builder.writeln()
# Flag arguments must go last
args = [
a for a in tlobject.sorted_args()
if not a.flag_indicator and not a.generic_definition
]
# Convert the args to string parameters, flags having =None
args = [
(a.name if not a.is_flag and not a.can_be_inferred
else '{}=None'.format(a.name))
for a in args
]
# Write the __init__ function
if args:
builder.writeln(
'def __init__(self, {}):'.format(', '.join(args))
)
else:
builder.writeln('def __init__(self):')
# Now update args to have the TLObject arguments, _except_
# those which are calculated on send or ignored, this is
# flag indicator and generic definitions.
#
# We don't need the generic definitions in Python
# because arguments can be any type
args = [arg for arg in tlobject.args
if not arg.flag_indicator and
not arg.generic_definition]
if args:
# Write the docstring, to know the type of the args
builder.writeln('"""')
for arg in args:
if not arg.flag_indicator:
builder.writeln(':param {} {}:'.format(
arg.type_hint(), arg.name
))
builder.current_indent -= 1 # It will auto-indent (':')
# We also want to know what type this request returns
# or to which type this constructor belongs to
builder.writeln()
if tlobject.is_function:
builder.write(':returns {}: '.format(tlobject.result))
else:
builder.write('Constructor for {}: '.format(tlobject.result))
constructors = type_constructors[tlobject.result]
if not constructors:
builder.writeln('This type has no constructors.')
elif len(constructors) == 1:
builder.writeln('Instance of {}.'.format(
constructors[0].class_name()
))
else:
builder.writeln('Instance of either {}.'.format(
', '.join(c.class_name() for c in constructors)
))
builder.writeln('"""')
builder.writeln('super().__init__()')
# Functions have a result object and are confirmed by default
if tlobject.is_function:
builder.writeln('self.result = None')
builder.writeln(
'self.content_related = True')
# Set the arguments
if args:
# Leave an empty line if there are any args
builder.writeln()
for arg in args:
TLGenerator._write_self_assigns(builder, tlobject, arg, args)
builder.end_block()
# Write the to_dict(self) method
builder.writeln('def to_dict(self, recursive=True):')
if args:
builder.writeln('return {')
else:
builder.write('return {')
builder.current_indent += 1
base_types = ('string', 'bytes', 'int', 'long', 'int128',
'int256', 'double', 'Bool', 'true', 'date')
for arg in args:
builder.write("'{}': ".format(arg.name))
if arg.type in base_types:
if arg.is_vector:
builder.write('[] if self.{0} is None else self.{0}[:]'
.format(arg.name))
else:
builder.write('self.{}'.format(arg.name))
else:
if arg.is_vector:
builder.write(
'([] if self.{0} is None else [None'
' if x is None else x.to_dict() for x in self.{0}]'
') if recursive else self.{0}'.format(arg.name)
)
else:
builder.write(
'(None if self.{0} is None else self.{0}.to_dict())'
' if recursive else self.{0}'.format(arg.name)
)
builder.writeln(',')
builder.current_indent -= 1
builder.writeln("}")
builder.end_block()
# Write the .__bytes__() function
builder.writeln('def __bytes__(self):')
# Some objects require more than one flag parameter to be set
# at the same time. In this case, add an assertion.
repeated_args = defaultdict(list)
for arg in tlobject.args:
if arg.is_flag:
repeated_args[arg.flag_index].append(arg)
for ra in repeated_args.values():
if len(ra) > 1:
cnd1 = ('self.{}'.format(a.name) for a in ra)
cnd2 = ('not self.{}'.format(a.name) for a in ra)
builder.writeln(
"assert ({}) or ({}), '{} parameters must all "
"be False-y (like None) or all me True-y'".format(
' and '.join(cnd1), ' and '.join(cnd2),
', '.join(a.name for a in ra)
)
)
builder.writeln("return b''.join((")
builder.current_indent += 1
# First constructor code, we already know its bytes
builder.writeln('{},'.format(repr(struct.pack('<I', tlobject.id))))
for arg in tlobject.args:
if TLGenerator.write_to_bytes(builder, arg, tlobject.args):
builder.writeln(',')
builder.current_indent -= 1
builder.writeln('))')
builder.end_block()
# Write the static from_reader(reader) function
builder.writeln('@staticmethod')
builder.writeln('def from_reader(reader):')
for arg in tlobject.args:
TLGenerator.write_read_code(
builder, arg, tlobject.args, name='_' + arg.name
)
builder.writeln('return {}({})'.format(
tlobject.class_name(), ', '.join(
'{0}=_{0}'.format(a.name) for a in tlobject.sorted_args()
if not a.flag_indicator and not a.generic_definition
)
))
builder.end_block()
# Only requests can have a different response that's not their
# serialized body, that is, we'll be setting their .result.
if tlobject.is_function:
builder.writeln('def on_response(self, reader):')
TLGenerator.write_request_result_code(builder, tlobject)
builder.end_block()
# Write the __str__(self) and stringify(self) functions
builder.writeln('def __str__(self):')
builder.writeln('return TLObject.pretty_format(self)')
builder.end_block()
builder.writeln('def stringify(self):')
builder.writeln('return TLObject.pretty_format(self, indent=0)')
# builder.end_block() # No need to end the last block
@staticmethod
def _write_self_assigns(builder, tlobject, arg, args):
if arg.can_be_inferred:
# Currently the only argument that can be
# inferred are those called 'random_id'
if arg.name == 'random_id':
# Endianness doesn't really matter, and 'big' is shorter
code = "int.from_bytes(os.urandom({}), 'big', signed=True)"\
.format(8 if arg.type == 'long' else 4)
if arg.is_vector:
# Currently for the case of "messages.forwardMessages"
# Ensure we can infer the length from id:Vector<>
if not next(a for a in args if a.name == 'id').is_vector:
raise ValueError(
'Cannot infer list of random ids for ', tlobject
)
code = '[{} for _ in range(len(id))]'.format(code)
builder.writeln(
"self.random_id = random_id if random_id "
"is not None else {}".format(code)
)
else:
raise ValueError('Cannot infer a value for ', arg)
# Well-known cases, auto-cast it to the right type
elif arg.type == 'InputPeer' and tlobject.is_function:
TLGenerator.write_get_input(builder, arg, 'get_input_peer')
elif arg.type == 'InputChannel' and tlobject.is_function:
TLGenerator.write_get_input(builder, arg, 'get_input_channel')
elif arg.type == 'InputUser' and tlobject.is_function:
TLGenerator.write_get_input(builder, arg, 'get_input_user')
elif arg.type == 'InputMedia' and tlobject.is_function:
TLGenerator.write_get_input(builder, arg, 'get_input_media')
elif arg.type == 'InputPhoto' and tlobject.is_function:
TLGenerator.write_get_input(builder, arg, 'get_input_photo')
else:
builder.writeln('self.{0} = {0}'.format(arg.name))
@staticmethod
def write_get_input(builder, arg, get_input_code):
"""Returns "True" if the get_input_* code was written when assigning
a parameter upon creating the request. Returns False otherwise
"""
if arg.is_vector:
builder.write('self.{0} = [{1}(_x) for _x in {0}]'
.format(arg.name, get_input_code))
else:
builder.write('self.{0} = {1}({0})'
.format(arg.name, get_input_code))
builder.writeln(
' if {} else None'.format(arg.name) if arg.is_flag else ''
)
@staticmethod
def get_file_name(tlobject, add_extension=False):
"""Gets the file name in file_name_format.py for the given TLObject"""
# Courtesy of http://stackoverflow.com/a/1176023/4759433
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', tlobject.name)
result = re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
if add_extension:
return result + '.py'
else:
return result
@staticmethod
def write_to_bytes(builder, arg, args, name=None):
"""
Writes the .__bytes__() code for the given argument
:param builder: The source code builder
:param arg: The argument to write
:param args: All the other arguments in TLObject same __bytes__.
This is required to determine the flags value
:param name: The name of the argument. Defaults to "self.argname"
This argument is an option because it's required when
writing Vectors<>
"""
if arg.generic_definition:
return # Do nothing, this only specifies a later type
if name is None:
name = 'self.{}'.format(arg.name)
# The argument may be a flag, only write if it's not None AND
# if it's not a True type.
# True types are not actually sent, but instead only used to
# determine the flags.
if arg.is_flag:
if arg.type == 'true':
return # Exit, since True type is never written
elif arg.is_vector:
# Vector flags are special since they consist of 3 values,
# so we need an extra join here. Note that empty vector flags
# should NOT be sent either!
builder.write("b'' if not {} else b''.join((".format(name))
else:
builder.write("b'' if not {} else (".format(name))
if arg.is_vector:
if arg.use_vector_id:
# vector code, unsigned 0x1cb5c415 as little endian
builder.write(r"b'\x15\xc4\xb5\x1c',")
builder.write("struct.pack('<i', len({})),".format(name))
# Cannot unpack the values for the outer tuple through *[(
# since that's a Python >3.5 feature, so add another join.
builder.write("b''.join(")
# Temporary disable .is_vector, not to enter this if again
# Also disable .is_flag since it's not needed per element
old_flag = arg.is_flag
arg.is_vector = arg.is_flag = False
TLGenerator.write_to_bytes(builder, arg, args, name='x')
arg.is_vector = True
arg.is_flag = old_flag
builder.write(' for x in {})'.format(name))
elif arg.flag_indicator:
# Calculate the flags with those items which are not None
builder.write("struct.pack('<I', {})".format(
' | '.join('({} if {} else 0)'.format(
1 << flag.flag_index, 'self.{}'.format(flag.name)
) for flag in args if flag.is_flag)
))
elif 'int' == arg.type:
# struct.pack is around 4 times faster than int.to_bytes
builder.write("struct.pack('<i', {})".format(name))
elif 'long' == arg.type:
builder.write("struct.pack('<q', {})".format(name))
elif 'int128' == arg.type:
builder.write("{}.to_bytes(16, 'little', signed=True)".format(name))
elif 'int256' == arg.type:
builder.write("{}.to_bytes(32, 'little', signed=True)".format(name))
elif 'double' == arg.type:
builder.write("struct.pack('<d', {})".format(name))
elif 'string' == arg.type:
builder.write('TLObject.serialize_bytes({})'.format(name))
elif 'Bool' == arg.type:
# 0x997275b5 if boolean else 0xbc799737
builder.write(
r"b'\xb5ur\x99' if {} else b'7\x97y\xbc'".format(name)
)
elif 'true' == arg.type:
pass # These are actually NOT written! Only used for flags
elif 'bytes' == arg.type:
builder.write('TLObject.serialize_bytes({})'.format(name))
elif 'date' == arg.type: # Custom format
# 0 if datetime is None else int(datetime.timestamp())
builder.write(
r"b'\0\0\0\0' if {0} is None else "
r"struct.pack('<I', int({0}.timestamp()))".format(name)
)
else:
# Else it may be a custom type
builder.write('bytes({})'.format(name))
if arg.is_flag:
builder.write(')')
if arg.is_vector:
builder.write(')') # We were using a tuple
return True # Something was written
@staticmethod
def write_read_code(builder, arg, args, name):
"""
Writes the read code for the given argument, setting the
arg.name variable to its read value.
:param builder: The source code builder
:param arg: The argument to write
:param args: All the other arguments in TLObject same on_send.
This is required to determine the flags value
:param name: The name of the argument. Defaults to "self.argname"
This argument is an option because it's required when
writing Vectors<>
"""
if arg.generic_definition:
return # Do nothing, this only specifies a later type
# The argument may be a flag, only write that flag was given!
was_flag = False
if arg.is_flag:
# Treat 'true' flags as a special case, since they're true if
# they're set, and nothing else needs to actually be read.
if 'true' == arg.type:
builder.writeln(
'{} = bool(flags & {})'.format(name, 1 << arg.flag_index)
)
return
was_flag = True
builder.writeln('if flags & {}:'.format(
1 << arg.flag_index
))
# Temporary disable .is_flag not to enter this if
# again when calling the method recursively
arg.is_flag = False
if arg.is_vector:
if arg.use_vector_id:
# We have to read the vector's constructor ID
builder.writeln("reader.read_int()")
builder.writeln('{} = []'.format(name))
builder.writeln('for _ in range(reader.read_int()):')
# Temporary disable .is_vector, not to enter this if again
arg.is_vector = False
TLGenerator.write_read_code(builder, arg, args, name='_x')
builder.writeln('{}.append(_x)'.format(name))
arg.is_vector = True
elif arg.flag_indicator:
# Read the flags, which will indicate what items we should read next
builder.writeln('flags = reader.read_int()')
builder.writeln()
elif 'int' == arg.type:
builder.writeln('{} = reader.read_int()'.format(name))
elif 'long' == arg.type:
builder.writeln('{} = reader.read_long()'.format(name))
elif 'int128' == arg.type:
builder.writeln(
'{} = reader.read_large_int(bits=128)'.format(name)
)
elif 'int256' == arg.type:
builder.writeln(
'{} = reader.read_large_int(bits=256)'.format(name)
)
elif 'double' == arg.type:
builder.writeln('{} = reader.read_double()'.format(name))
elif 'string' == arg.type:
builder.writeln('{} = reader.tgread_string()'.format(name))
elif 'Bool' == arg.type:
builder.writeln('{} = reader.tgread_bool()'.format(name))
elif 'true' == arg.type:
# Arbitrary not-None value, don't actually read "true" flags
builder.writeln('{} = True'.format(name))
elif 'bytes' == arg.type:
builder.writeln('{} = reader.tgread_bytes()'.format(name))
elif 'date' == arg.type: # Custom format
builder.writeln('{} = reader.tgread_date()'.format(name))
else:
# Else it may be a custom type
if not arg.skip_constructor_id:
builder.writeln('{} = reader.tgread_object()'.format(name))
else:
builder.writeln('{} = types.{}.from_reader(reader)'.format(
name, TLObject.class_name_for(arg.type)))
# End vector and flag blocks if required (if we opened them before)
if arg.is_vector:
builder.end_block()
if was_flag:
builder.current_indent -= 1
builder.writeln('else:')
builder.writeln('{} = None'.format(name))
builder.current_indent -= 1
# Restore .is_flag
arg.is_flag = True
@staticmethod
def write_request_result_code(builder, tlobject):
"""
Writes the receive code for the given function
:param builder: The source code builder
:param tlobject: The TLObject for which the 'self.result = '
will be written
"""
if tlobject.result.startswith('Vector<'):
# Vector results are a bit special since they can also be composed
# of integer values and such; however, the result of requests is
# not parsed as arguments are and it's a bit harder to tell which
# is which.
if tlobject.result == 'Vector<int>':
builder.writeln('reader.read_int() # Vector id')
builder.writeln('count = reader.read_int()')
builder.writeln(
'self.result = [reader.read_int() for _ in range(count)]'
)
elif tlobject.result == 'Vector<long>':
builder.writeln('reader.read_int() # Vector id')
builder.writeln('count = reader.read_long()')
builder.writeln(
'self.result = [reader.read_long() for _ in range(count)]'
)
else:
builder.writeln('self.result = reader.tgread_vector()')
else:
builder.writeln('self.result = reader.tgread_object()')
| [
"[email protected]"
]
| |
8def5ea4fa1b536a7d27e5ee746a0d7eef26180f | 17a655d21d7ddaf8cf60e23055e107cb602bd9bc | /project/bookmarker/migrations/0001_initial.py | 7739e40db601749d17b8b704a596aa005e8e6a15 | []
| no_license | geofferyj/YouTubeVideoBookmarker | fedb6913a8c5118c0a51f011244233630cf6f58c | fbf10230c5184cd1479dddafbcfd3609d5ac98f1 | refs/heads/master | 2023-08-04T22:30:37.636957 | 2021-03-01T08:09:46 | 2021-03-01T08:09:46 | 278,203,783 | 0 | 0 | null | 2021-09-22T19:46:09 | 2020-07-08T22:05:00 | JavaScript | UTF-8 | Python | false | false | 4,185 | py | # Generated by Django 3.0.8 on 2020-08-09 19:13
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Video',
fields=[
('vid', models.CharField(max_length=11, primary_key=True, serialize=False)),
('timestamps', models.TextField(default='')),
('cost', models.PositiveIntegerField(default=0)),
('locked', models.BooleanField(default=False)),
('last_editor', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='VoicePause',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('has', models.BooleanField(default=False)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='voice_pause', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='VideoViews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='views', to=settings.AUTH_USER_MODEL)),
('video', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='views', to='bookmarker.Video')),
],
),
migrations.CreateModel(
name='UserVideo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='videos', to=settings.AUTH_USER_MODEL)),
('video', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='users', to='bookmarker.Video')),
],
),
migrations.CreateModel(
name='Token',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('amount', models.PositiveIntegerField(default=0)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='tokens', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Subscription',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('paid_until', models.DateTimeField(blank=True, null=True)),
('date_paid', models.DateTimeField(auto_now_add=True)),
('paypal_subscription_id', models.CharField(blank=True, max_length=64, null=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='subscription', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ResetableViews',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('count', models.PositiveIntegerField(default=0)),
('video', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='rviews', to='bookmarker.Video')),
],
),
migrations.AddConstraint(
model_name='videoviews',
constraint=models.UniqueConstraint(fields=('user', 'video'), name='video_views_constraint'),
),
migrations.AddConstraint(
model_name='uservideo',
constraint=models.UniqueConstraint(fields=('user', 'video'), name='user_video_constraint'),
),
]
| [
"[email protected]"
]
| |
6bc8cd61d8967b5a2a2913543f445ebafb4b8904 | 4237d975945a3e8fc427bc2aca6c4df80b668d62 | /Functions/calc/02-Calc.py | bd7491222cfc5a00386914d235508ca25ac8c16b | []
| no_license | ravi4all/Python_JuneMorningRegular | 36af0302af382b1a94cc9efc6af2fa1a099565fa | 5bd36a4be7579e65fbc862521c01042ca841e3cd | refs/heads/master | 2020-03-20T04:10:32.641007 | 2018-07-03T06:38:41 | 2018-07-03T06:38:41 | 137,173,202 | 0 | 4 | null | null | null | null | UTF-8 | Python | false | false | 600 | py | def add(x,y):
result = x + y
print("Addition is",result)
def sub(x,y):
result = x - y
print("Subtraction is", result)
def mul(x,y):
result = x * y
print("Multiplication is", result)
def div(x,y):
result = x / y
print("Division is", result)
print("""
1. Add
2. Sub
3. Mul
4. Div
""")
user_choice = input("Enter your choice : ")
num_1 = int(input("Enter first number : "))
num_2 = int(input("Enter second number : "))
todo = {
'1' : add,
'2' : sub,
'3' : mul,
'4' : div
}
todo.get(user_choice)(num_1, num_2)
| [
"[email protected]"
]
| |
f09937e2a6f27c882a55d618d69bc747f10d2e4c | 11bcf60200aaf63704191205d27b52442a08212b | /demo/test_brower.py | 9deee2f98d69b84e498458899d9c8ffe08a66867 | []
| no_license | Timothyea/uri_pycharm | bf893748cd32a045cbaec34dae3f8dfa3a2605ff | a1dbe860ba3bcce460da4dd87ec9aebc43cf3499 | refs/heads/master | 2020-09-09T08:47:31.943627 | 2019-11-14T10:04:07 | 2019-11-14T10:04:07 | 221,404,034 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 452 | py | #!/user/bin/env python
# -*- conding:utf-8 -*-
from time import sleep
from selenium import webdriver
# 打开游览器
def test_brower(driver):
driver.get("http://www.baidu.com")
sleep(1)
driver.get("http://www.jd.com")
sleep(1)
driver.back()
sleep(1)
driver.forward()
sleep(1)
driver.refresh()
sleep(1)
# 关闭浏览器,不退出driver
# driver.close()
# 关闭浏览器,退出driver
# driver.quit() | [
"[email protected]"
]
| |
5094b9b691ae257e044c3742035823ecc3da2227 | 28ef7c65a5cb1291916c768a0c2468a91770bc12 | /configs/body/2d_kpt_sview_rgb_img/topdown_heatmap/crowdpose/res152_crowdpose_384x288.py | 44a5db23c7771256902673f3b8449cd9471b3de7 | [
"Apache-2.0"
]
| permissive | bit-scientist/mmpose | 57464aae1ca87faf5a4669991ae1ea4347e41900 | 9671a12caf63ae5d15a9bebc66a9a2e7a3ce617e | refs/heads/master | 2023-08-03T17:18:27.413286 | 2021-09-29T03:48:37 | 2021-09-29T03:48:37 | 411,549,076 | 0 | 0 | Apache-2.0 | 2021-09-29T06:01:27 | 2021-09-29T06:01:26 | null | UTF-8 | Python | false | false | 4,087 | py | _base_ = ['../../../../_base_/datasets/crowdpose.py']
log_level = 'INFO'
load_from = None
resume_from = None
dist_params = dict(backend='nccl')
workflow = [('train', 1)]
checkpoint_config = dict(interval=10)
evaluation = dict(interval=10, metric='mAP')
optimizer = dict(
type='Adam',
lr=5e-4,
)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[170, 200])
total_epochs = 210
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
channel_cfg = dict(
num_output_channels=14,
dataset_joints=14,
dataset_channel=[
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13],
],
inference_channel=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13])
# model settings
model = dict(
type='TopDown',
pretrained='torchvision://resnet152',
backbone=dict(type='ResNet', depth=152),
keypoint_head=dict(
type='TopdownHeatmapSimpleHead',
in_channels=2048,
out_channels=channel_cfg['num_output_channels'],
loss_keypoint=dict(type='JointsMSELoss', use_target_weight=True)),
train_cfg=dict(),
test_cfg=dict(
flip_test=True,
post_process='default',
shift_heatmap=True,
modulate_kernel=11))
data_cfg = dict(
image_size=[288, 384],
heatmap_size=[72, 96],
num_output_channels=channel_cfg['num_output_channels'],
num_joints=channel_cfg['dataset_joints'],
dataset_channel=channel_cfg['dataset_channel'],
inference_channel=channel_cfg['inference_channel'],
crowd_matching=False,
soft_nms=False,
nms_thr=1.0,
oks_thr=0.9,
vis_thr=0.2,
use_gt_bbox=False,
det_bbox_thr=0.0,
bbox_file='data/crowdpose/annotations/'
'det_for_crowd_test_0.1_0.5.json',
)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownRandomFlip', flip_prob=0.5),
dict(
type='TopDownHalfBodyTransform',
num_joints_half_body=6,
prob_half_body=0.3),
dict(
type='TopDownGetRandomScaleRotation', rot_factor=40, scale_factor=0.5),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(type='TopDownGenerateTarget', sigma=3),
dict(
type='Collect',
keys=['img', 'target', 'target_weight'],
meta_keys=[
'image_file', 'joints_3d', 'joints_3d_visible', 'center', 'scale',
'rotation', 'bbox_score', 'flip_pairs'
]),
]
val_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(
type='Collect',
keys=['img'],
meta_keys=[
'image_file', 'center', 'scale', 'rotation', 'bbox_score',
'flip_pairs'
]),
]
test_pipeline = val_pipeline
data_root = 'data/crowdpose'
data = dict(
samples_per_gpu=64,
workers_per_gpu=2,
val_dataloader=dict(samples_per_gpu=32),
test_dataloader=dict(samples_per_gpu=32),
train=dict(
type='TopDownCrowdPoseDataset',
ann_file=f'{data_root}/annotations/mmpose_crowdpose_trainval.json',
img_prefix=f'{data_root}/images/',
data_cfg=data_cfg,
pipeline=train_pipeline,
dataset_info={{_base_.dataset_info}}),
val=dict(
type='TopDownCrowdPoseDataset',
ann_file=f'{data_root}/annotations/mmpose_crowdpose_test.json',
img_prefix=f'{data_root}/images/',
data_cfg=data_cfg,
pipeline=val_pipeline,
dataset_info={{_base_.dataset_info}}),
test=dict(
type='TopDownCrowdPoseDataset',
ann_file=f'{data_root}/annotations/mmpose_crowdpose_test.json',
img_prefix=f'{data_root}/images/',
data_cfg=data_cfg,
pipeline=test_pipeline))
| [
"[email protected]"
]
| |
f4937a200488f1a80a92b5d267bdd363eb26490d | 5aec9b30005a8a5cc39da3c46ce65aa3e6710cfe | /tools/delete_queues.py | d499f5f82945a5b4422e2f65b1c1c2283cca3a81 | [
"MIT"
]
| permissive | cloudworkstation/cloudworkstation-api | 90d0726e712cd403fdbcd7c2b39ec9f1ee1890ad | 661500aaaa304db13e99d8365428520c2c77f5dd | refs/heads/main | 2023-04-13T07:54:47.717843 | 2021-04-09T20:09:57 | 2021-04-09T20:09:57 | 338,401,801 | 0 | 0 | null | 2021-04-06T09:05:39 | 2021-02-12T18:35:52 | Python | UTF-8 | Python | false | false | 393 | py | import boto3
sqs = boto3.client("sqs")
def find_and_remove():
response = sqs.list_queues(
QueueNamePrefix="ec2_",
MaxResults=100
)
if "QueueUrls" in response:
for qurl in response["QueueUrls"]:
print(f"Going to delete queue @ {qurl} ...")
sqs.delete_queue(
QueueUrl=qurl
)
print("...Deleted.")
if __name__ == "__main__":
find_and_remove() | [
"[email protected]"
]
| |
a0bf6e814b457e88e091de90c86fc61a779dc2b4 | ff4ca069f16041fd49f7c87216b9fdb5c0a5f658 | /UITest/Android/DxYcUiTest/Scripts/Clue/ClueCommentJumpTest.py | f6c06ce66942e809729d9f6555dc04e9073ee951 | []
| no_license | ban666/UITestForDx | cbaf86cca957d40151ae28e2dc81a3016cf778d4 | 5a3ccdc68651e648e7c838fc58b5a9d052a19f6b | refs/heads/master | 2021-01-12T01:36:06.206477 | 2017-02-17T09:09:27 | 2017-02-17T09:09:36 | 78,408,551 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,137 | py | # -*- coding: utf-8 -*-
__author__ = 'liaoben'
import sys
from appium import webdriver
from time import sleep
import unittest
from random import randint
sys.path.append('../../Lib')
import time
import os
from appium_lib import *
from dx_action import *
from ui_comment import *
from ChnlRequest import ChnlRequest
from DbLib import DbLib
from config import *
from loglib import log
from elements_id import *
from configrw import get_case
from TestlinkHandler import TestlinkHandler
from ui_clue import *
from BaoliaoRequest import BaoliaoRequest
class SubTypeTest(unittest.TestCase):
def setUp(self):
#self.testcases = conf.readcfg(__file__)
self.desired_caps = desired_caps
print 'Test Start...................................'
self.result = 'f'
self.msg = ''
self.tsl = TestlinkHandler()
self.mode = MODE
self.db = DbLib()
self.clue = BaoliaoRequest()
subtype_id = self.clue.get_clue_type()[0]['subtype']
self.clue.send_clue_and_review(u'报料测试'+str(randint(1,100)),1,subtype_id)
self.api = ChnlRequest(MODE)
self.driver = webdriver.Remote(APPIUM_URL, self.desired_caps)
start_to_index(self.driver,self.mode)
def tearDown(self):
print 'Test End...................................'
try:
self.tsl.set_tc_status(self.case_id,self.result,self.msg)
self.driver.quit()
except Exception as e:
print u'测试失败,失败环节:tear down',e
def common_check(self):
step = 1
sleep(WAIT_TIME)
go_to_clue(self.driver)
#subtype_id = self.clue.get_clue_type()[0]['subtype'] #获取第一个栏目的报料类型ID
# current_time = time.strftime( ISOTIMEFORMAT, time.localtime() )
# clue_content = 'test for comment button '+str(current_time)
# self.clue.send_clue(clue_content,subtype_id) #使用接口发送报料
# clue_id = self.db.get_clueid_with_content_by_db(clue_content)
comment_count = get_clue_comment_count(self.driver)
assert int(comment_count) == 0
#点击切换
self.driver.find_element_by_id(CLUE_LIST_COMMENT_BUTTON).click()
assert self.driver.current_activity == ACTIVITY.get('clue')
assert element_exsist(self.driver,'id',CLUE_LIST_DESC)
print u'Step %s:报料无评论时,点击列表中评论按钮进入报料正文页:OK' % (str(step))
step+=1
return True
def comment_jump_check(self):
step = 1
sleep(WAIT_TIME)
subtype_id = self.clue.get_clue_type()[0]['subtype']
clue_id = self.clue.get_clue_list(subtype_id)[0]['cid']
for i in range(50):
self.api.send_comment(clue_id,'test for comment anchor')
go_to_clue(self.driver)
slide_down(self.driver)
comment_count = get_clue_comment_count(self.driver)
assert int(comment_count) != 0
#点击切换
self.driver.find_element_by_id(CLUE_LIST_COMMENT_BUTTON).click()
assert self.driver.current_activity == ACTIVITY.get('clue')
assert element_exsist(self.driver,'id',CLUE_LIST_DESC) == False
print u'Step %s:报料有评论时,点击列表中报料的评论按钮,跳转到评论锚点:OK' % (str(step))
step+=1
return True
#excute TestCase
def test(self):
self.case_id = get_case(__file__)
self.result = self.common_check()
def testCommentJump(self):
self.case_id = get_case(__file__)
self.result = self.comment_jump_check()
if __name__ == '__main__':
pass
# a = TestLogin()
# a.setUp()
# a.testFunc1()
# a.tearDown()
#d =DbLib()
import HTMLTestRunner
t = unittest.TestSuite()
t.addTest(unittest.makeSuite(TestComment))
#unittest.TextTestRunner.run(t)
filename = 'F:\\dx_comment.html'
fp = file(filename,'wb')
runner = HTMLTestRunner.HTMLTestRunner(
stream = fp,
title ='Dx_Test',
description = 'Report_discription')
runner.run(t)
fp.close()
| [
"[email protected]"
]
| |
677a627128ff0a1f1a70ef8820e3a44a8fcb4eb8 | 0c8e4931426819fbd631fccca93b0159c55c8b9d | /backend/manage.py | e3f233fb08b1de2ea65130d9fba875f0ab26ace7 | [
"Apache-2.0"
]
| permissive | franklingu/dota2_explorer | c750d144e19239b1aa0d8ad8c47e781312fcfc05 | d5c0cd03b77bb4158084f4f81cfc8073977e13a8 | refs/heads/master | 2020-03-22T09:08:40.338681 | 2018-07-12T03:53:34 | 2018-07-12T03:53:34 | 139,818,331 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 541 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dota2site.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"[email protected]"
]
| |
aca85ae0f0ade02fa6312a6c4f8e8e7885adb341 | 2a9b3bf8758c1199305a01c524be78b7287335b8 | /plugins/ga/bidsaspx/riverdale.py | 2693f804d7cf9e493c147f0583e28a7f4e6ca6c5 | []
| no_license | thayton/bidmap | e222f34701c15d4694f1f51999ecc9d894abfe41 | de279cd64f66c79b253b38101c8ccdf748e540ac | refs/heads/master | 2021-01-21T17:03:14.317309 | 2014-09-26T18:27:45 | 2014-09-26T18:27:45 | 17,402,509 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | from bidmap.bidscrapers.bidsaspx.bidsaspx import BidsAspxBidScraper
GOVINFO = {
'name': 'Riverdale Georgia',
'location': 'Riverdale, GA',
'home_page_url': 'http://ga-riverdale2.civicplus.com',
'bids_page_url': 'http://ga-riverdale2.civicplus.com/bids.aspx'
}
def get_scraper():
return BidsAspxBidScraper(GOVINFO)
if __name__ == '__main__':
bid_scraper = get_scraper()
bid_scraper.scrape_bids()
| [
"[email protected]"
]
| |
33fa998b0134c61d8a91afe8f58eb57cf3ac5284 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R1/benchmark/startQiskit_Class421.py | 0402b6f229c87df1eed51ed312ab0549e1260aab | [
"BSD-3-Clause"
]
| permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,399 | py | # qubit number=3
# total number=74
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.h(input_qubit[1]) # number=70
prog.rx(-0.09738937226128368,input_qubit[2]) # number=2
prog.h(input_qubit[1]) # number=33
prog.y(input_qubit[2]) # number=56
prog.cz(input_qubit[2],input_qubit[1]) # number=34
prog.h(input_qubit[1]) # number=35
prog.h(input_qubit[1]) # number=3
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit_Class421.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = BasicAer.get_backend('statevector_simulator')
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"[email protected]"
]
| |
dd79604e064e324c9050e70571fb46da441ef806 | c91775afdc25f8897c6839cf8294869f3e928083 | /PythonFiles/snowmass_cfg_TTBAR_14TEV_1100_1700_Conf3_14.py | 975b02de9db2d2cd0ffe7c63cb30d2b858426578 | []
| no_license | Saptaparna/Miscellaneous | 7e6df9cdfd10d4861e2e382b1837dbd4c26fb249 | b954189d85e56a02fe257b5f5cbd779365719c00 | refs/heads/master | 2021-01-23T13:29:30.283308 | 2017-12-20T08:26:37 | 2017-12-20T08:26:37 | 42,525,018 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,287 | py | import FWCore.ParameterSet.Config as cms
import FWCore.PythonUtilities.LumiList as LumiList
import FWCore.ParameterSet.Types as CfgTypes
#
# Parameters that can be set via command line
# when submitting Condor jobs
#
isMc_settable = True
isSignalMc_settable = False
def FindFile(name):
fname = 'file.txt'
return fname
process = cms.Process("LJMetCom")
##################################################################
#
# All input files needed for the job to run
# Specify them here, and they will automatically be correctly
# transferred to Condor when needed
# NOTE: you can define as many or as few entries as you wish,
# names are up to you
miscFiles = {}
miscFiles['jec_uncertainty'] = '../cond/Summer12_V2_DATA_AK5PF_UncertaintySources.txt'
miscFiles['btag_performance'] = '../cond/btag_performance_db062012.root'
miscFiles['json'] = '../data/json/Cert_190456-208686_8TeV_PromptReco_Collisions12_JSON.txt'
miscFiles['MCL1JetPar'] = '../data/START53_V7G_L1FastJet_AK5PFchs.txt'
miscFiles['MCL2JetPar'] = '../data/START53_V7G_L2Relative_AK5PFchs.txt'
miscFiles['MCL3JetPar'] = '../data/START53_V7G_L3Absolute_AK5PFchs.txt'
miscFiles['DataL1JetPar'] = '../data/FT_53_V10_AN3_L1FastJet_AK5PFchs.txt'
miscFiles['DataL2JetPar'] = '../data/FT_53_V10_AN3_L2Relative_AK5PFchs.txt'
miscFiles['DataL3JetPar'] = '../data/FT_53_V10_AN3_L3Absolute_AK5PFchs.txt'
miscFiles['DataResJetPar'] = '../data/FT_53_V10_AN3_L2L3Residual_AK5PFchs.txt'
#Arguments from condor submit script which are used more than once
condorIsMC = bool(True)
relBase = str('/uscms_data/d2/sapta/work/LJMetCode_fromGena/Dilepton_Feb25/CMSSW_5_3_7_patch4')
condorJSON = str('None')
# Dilepton calculator options
process.load('LJMet.Com.DileptonCalc_cfi')
process.DileptonCalc.isMc = condorIsMC
process.DileptonCalc.dataType = cms.string('None')
############################################################
#
# FWLite application options
#
process.ljmet = cms.PSet(
isMc = cms.bool(condorIsMC),
runs = cms.vint32([]),
verbosity = cms.int32(0)
)
#Exclude unnecessary calculators
process.ljmet.excluded_calculators = cms.vstring(
'WprimeCalc',
'LjetsTopoCalc',
'LjetsTopoCalcNew',
'StopCalc'
)
############################################################
#
# common calculator options
process.load('LJMet.Com.commonCalc_cfi')
process.CommonCalc.dummy_parameter = cms.string('Dummy parameter value')
############################################################
#
# pileup calculator options
process.load('LJMet.Com.pileupCalc_cfi')
process.PileUpCalc.verbosity = process.ljmet.verbosity
############################################################
#
# Event selector options
#
process.event_selector = cms.PSet(
selection = cms.string('DileptonSelector'),
isMc = cms.bool(condorIsMC),
# cuts
#HLT
trigger_cut = cms.bool(True),
dump_trigger = cms.bool(False),
#Can use same trigger paths for data and MC since MC is always one of the data versions
trigger_path_ee = cms.vstring('HLT_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v15',
'HLT_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v16',
'HLT_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v17',
'HLT_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v18',
'HLT_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v19'),
trigger_path_em = cms.vstring('HLT_Mu8_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v4', 'HLT_Mu8_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v5',
'HLT_Mu8_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v6', 'HLT_Mu8_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v7',
'HLT_Mu8_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v8', 'HLT_Mu8_Ele17_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v9',
'HLT_Mu17_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v4', 'HLT_Mu17_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v5',
'HLT_Mu17_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v6', 'HLT_Mu17_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v7',
'HLT_Mu17_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v8', 'HLT_Mu17_Ele8_CaloIdT_CaloIsoVL_TrkIdVL_TrkIsoVL_v9'),
trigger_path_mm = cms.vstring('HLT_Mu17_Mu8_v16', 'HLT_Mu17_Mu8_v17', 'HLT_Mu17_Mu8_v18',
'HLT_Mu17_Mu8_v19', 'HLT_Mu17_Mu8_v21', 'HLT_Mu17_Mu8_v22',
'HLT_Mu17_TkMu8_v9', 'HLT_Mu17_TkMu8_v10', 'HLT_Mu17_TkMu8_v11',
'HLT_Mu17_TkMu8_v12', 'HLT_Mu17_TkMu8_v13', 'HLT_Mu17_TkMu8_v14'),
pv_cut = cms.bool(False),
hbhe_cut = cms.bool(False),
jet_cuts = cms.bool(False),
jet_minpt = cms.double(20.0),
jet_maxeta = cms.double(5),
min_jet = cms.int32(0),
max_jet = cms.int32(4000),
muon_cuts = cms.bool(True),
min_muon = cms.int32(0),
muon_minpt = cms.double(10.0),
muon_maxeta = cms.double(4.0),
max_muon = cms.int32(20),
electron_cuts = cms.bool(True),
min_electron = cms.int32(0),
electron_minpt = cms.double(10.0),
electron_maxeta = cms.double(4.0),
max_electron = cms.int32(20),
min_lepton = cms.int32(2),
met_cuts = cms.bool(False),
min_met = cms.double(0.0),
btag_cuts = cms.bool(False),
btagOP = cms.string("CSVM"),
btag_1 = cms.bool(True),
btag_2 = cms.bool(True),
btag_3 = cms.bool(False),
trigger_collection = cms.InputTag('TriggerResults::HLT'),
pv_collection = cms.InputTag('goodOfflinePrimaryVertices'),
jet_collection = cms.InputTag('goodPatJetsPFlow'),
muon_collection = cms.InputTag('selectedPatMuonsPFlowLoose'),
electron_collection = cms.InputTag('selectedPatElectronsPFlowLoose'),
met_collection = cms.InputTag('patMETsPFlow'),
JEC_txtfile = cms.string(miscFiles['jec_uncertainty']),
JECup = cms.bool(False),
JECdown = cms.bool(False),
JERup = cms.bool(False),
JERdown = cms.bool(False),
BTagUncertUp = cms.bool(False),
BTagUncertDown = cms.bool(True),
do53xJEC = cms.bool(True),
MCL1JetPar = cms.string(miscFiles['MCL1JetPar']),
MCL2JetPar = cms.string(miscFiles['MCL2JetPar']),
MCL3JetPar = cms.string(miscFiles['MCL3JetPar']),
DataL1JetPar = cms.string(miscFiles['DataL1JetPar']),
DataL2JetPar = cms.string(miscFiles['DataL2JetPar']),
DataL3JetPar = cms.string(miscFiles['DataL3JetPar']),
DataResJetPar = cms.string(miscFiles['DataResJetPar']),
keepFullMChistory = cms.bool(True)
)
##################################################################
#
# Input files
#
# NOTE: keep your test inputs in the python files as in
# this example, and they will be correctly substituted with
# specified input events when you submit to Condor
# (
#
# nEvents and skipEvents are for interactive use, their
# values will be correctly reset when you submit Condor
#
input_module = 'LJMet.Com.TTBAR_14TEV_1100_1700_Conf3_14'
process.load(input_module)
process.inputs.nEvents = cms.int32(-1)
process.inputs.skipEvents = cms.int32(0)
############################################################
#
# JSON
JsonFile = miscFiles['json']
myList = LumiList.LumiList(filename=JsonFile).getCMSSWString().split(',')
if not condorIsMC:
process.inputs.lumisToProcess.extend(myList)
#######################################################
#
# Output
#
process.outputs = cms.PSet (
outputName = cms.string('TTBAR_14TEV_1100_1700_Conf3_14'),
treeName = cms.string('ljmet'),
)
#######################################################
#
# Object selector options
#
# Primary vertex
process.load('PhysicsTools.SelectorUtils.pvSelector_cfi')
process.pvSelector.pvSrc = cms.InputTag('goodOfflinePrimaryVertices')
process.pvSelector.minNdof = cms.double(4.0)
process.pvSelector.maxZ = cms.double(24.0)
process.pvSelector.maxRho = cms.double(2.0)
# jets
process.load('PhysicsTools.SelectorUtils.pfJetIDSelector_cfi')
process.pfJetIDSelector.version = cms.string('FIRSTDATA')
process.pfJetIDSelector.quality = cms.string('LOOSE')
| [
"[email protected]"
]
| |
929f9c62dc239284ea660a83c74694b12e148494 | 2aba62d66c2c622bdc148cef451da76cae5fd76c | /exercise/crawler_python_dm1920/ch4/ch4_19.py | 5dc74250957efd13c6fc8b6557fdcafa5be35fb0 | []
| no_license | NTUT-109AB8011/crawler | 6a76de2ab1848ebc8365e071e76c08ca7348be62 | a703ec741b48d3af615a757fed7607b1f8eb66a6 | refs/heads/master | 2023-03-26T22:39:59.527175 | 2021-03-30T03:29:22 | 2021-03-30T03:29:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 815 | py | # ch4_19.py
import pandas as pd
import matplotlib.pyplot as plt
cities = {'population':[10000000,8500000,8000000,15000000,6000000,8000000],
'area':[400, 500, 850, 300, 200, 320],
'town':['New York','Chicago','Bangkok','Tokyo',
'Singapore','HongKong']}
tw = pd.DataFrame(cities, columns=['population','area'],index=cities['town'])
fig, ax = plt.subplots()
fig.suptitle("City Statistics")
ax.set_ylabel("Population")
ax.set_xlabel("City")
ax2 = ax.twinx()
ax2.set_ylabel("Area")
tw['population'].plot(ax=ax,rot=90) # 繪製人口數線
tw['area'].plot(ax=ax2, style='g-') # 繪製面積線
ax.legend(loc=1) # 圖例位置在右上
ax2.legend(loc=2) # 圖例位置在左上
plt.show()
| [
"[email protected]"
]
| |
ce4f926c0d9fd607823601ec9b413db31759d133 | 456433ac78b70cb8ae076ae166a85e349f181d7f | /systems/KURSSKLAD/KURSTERM/WORKPALLET/templates/U3S/palletFPCreate.py | 41b8299e8eda2772fe58df1b34a69b642eaebc48 | []
| no_license | shybkoi/WMS-Demo | 854c1679b121c68323445b60f3992959f922be8d | 2525559c4f56654acfbc21b41b3f5e40387b89e0 | refs/heads/master | 2021-01-23T01:51:20.074825 | 2017-03-23T11:51:18 | 2017-03-23T11:51:18 | 85,937,726 | 0 | 0 | null | null | null | null | WINDOWS-1251 | Python | false | false | 12,048 | py | #!/usr/bin/env python
# -*- coding: cp1251 -*-
##################################################
## DEPENDENCIES
import sys
import os
import os.path
from os.path import getmtime, exists
import time
import types
import __builtin__
from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion
from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple
from Cheetah.Template import Template
from Cheetah.DummyTransaction import DummyTransaction
from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList
from Cheetah.CacheRegion import CacheRegion
import Cheetah.Filters as Filters
import Cheetah.ErrorCatchers as ErrorCatchers
from systems.KURSSKLAD.KURSTERM.templates.main import main
from systems.KURSSKLAD.cheetahutils import viewQuantity
from systems.KURSSKLAD.cheetahutils import TimeStampToDate
##################################################
## MODULE CONSTANTS
try:
True, False
except NameError:
True, False = (1==1), (1==0)
VFFSL=valueFromFrameOrSearchList
VFSL=valueFromSearchList
VFN=valueForName
currentTime=time.time
__CHEETAH_version__ = '2.0rc8'
__CHEETAH_versionTuple__ = (2, 0, 0, 'candidate', 8)
__CHEETAH_genTime__ = 1482336171.592
__CHEETAH_genTimestamp__ = 'Wed Dec 21 18:02:51 2016'
__CHEETAH_src__ = 'systems\\KURSSKLAD\\KURSTERM\\WORKPALLET\\templates\\U3S\\palletFPCreate.tmpl'
__CHEETAH_srcLastModified__ = 'Wed Dec 21 15:22:10 2016'
__CHEETAH_docstring__ = 'Autogenerated by CHEETAH: The Python-Powered Template Engine'
if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple:
raise AssertionError(
'This template was compiled with Cheetah version'
' %s. Templates compiled before version %s must be recompiled.'%(
__CHEETAH_version__, RequiredCheetahVersion))
##################################################
## CLASSES
class palletFPCreate(main):
##################################################
## CHEETAH GENERATED METHODS
def __init__(self, *args, **KWs):
main.__init__(self, *args, **KWs)
if not self._CHEETAH__instanceInitialized:
cheetahKWArgs = {}
allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split()
for k,v in KWs.items():
if k in allowedKWs: cheetahKWArgs[k] = v
self._initCheetahInstance(**cheetahKWArgs)
def mainData(self, **KWS):
## CHEETAH: generated from #def mainData at line 7, col 1.
trans = KWS.get("trans")
if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
trans = self.transaction # is None unless self.awake() was called
if not trans:
trans = DummyTransaction()
_dummyTrans = True
else: _dummyTrans = False
write = trans.response().write
SL = self._CHEETAH__searchList
_filter = self._CHEETAH__currentFilter
########################################
## START - generated method body
write(''' <b>(''')
_v = VFFSL(SL,"WCODE",True) # '$WCODE' on line 8, col 9
if _v is not None: write(_filter(_v, rawExpr='$WCODE')) # from line 8, col 9.
write(''')</b> ''')
_v = VFFSL(SL,"WNAME",True) # '$WNAME' on line 8, col 21
if _v is not None: write(_filter(_v, rawExpr='$WNAME')) # from line 8, col 21.
write('''
<br><br>
''')
if False:
_('На паллете')
_v = VFFSL(SL,"_",False)('На паллете') # "$_('\xcd\xe0 \xef\xe0\xeb\xeb\xe5\xf2\xe5')" on line 10, col 5
if _v is not None: write(_filter(_v, rawExpr="$_('\xcd\xe0 \xef\xe0\xeb\xeb\xe5\xf2\xe5')")) # from line 10, col 5.
write(''': ''')
_v = VFFSL(SL,"viewQuantity",False)(VFFSL(SL,"WQ",True),VFFSL(SL,"VWUFACTOR",True),VFFSL(SL,"VWUCODE",True),VFFSL(SL,"MWUFACTOR",True),VFFSL(SL,"MWUCODE",True)) # '$viewQuantity($WQ,$VWUFACTOR,$VWUCODE,$MWUFACTOR,$MWUCODE)' on line 10, col 23
if _v is not None: write(_filter(_v, rawExpr='$viewQuantity($WQ,$VWUFACTOR,$VWUCODE,$MWUFACTOR,$MWUCODE)')) # from line 10, col 23.
write(''' (<b><u>''')
_orig_filter_53834588 = _filter
filterName = 'Quantity'
if self._CHEETAH__filters.has_key("Quantity"):
_filter = self._CHEETAH__currentFilter = self._CHEETAH__filters[filterName]
else:
_filter = self._CHEETAH__currentFilter = \
self._CHEETAH__filters[filterName] = getattr(self._CHEETAH__filtersLib, filterName)(self).filter
_v = VFFSL(SL,"WQ",True) # '$WQ' on line 10, col 105
if _v is not None: write(_filter(_v, rawExpr='$WQ')) # from line 10, col 105.
_filter = _orig_filter_53834588
write('''</u></b>)<br>
''')
_v = VFFSL(SL,"TimeStampToDate",False)(VFFSL(SL,"PRODUCTDATE",True)) # '$TimeStampToDate($PRODUCTDATE)' on line 11, col 5
if _v is not None: write(_filter(_v, rawExpr='$TimeStampToDate($PRODUCTDATE)')) # from line 11, col 5.
write(''' - ''')
_v = VFFSL(SL,"TimeStampToDate",False)(VFFSL(SL,"BESTBEFOREDATE",True)) # '$TimeStampToDate($BESTBEFOREDATE)' on line 11, col 38
if _v is not None: write(_filter(_v, rawExpr='$TimeStampToDate($BESTBEFOREDATE)')) # from line 11, col 38.
write('''
<hr>
''')
if VFFSL(SL,"varExists",False)('$FeatureId'): # generated from line 14, col 5
if VFFSL(SL,"varExists",False)('$PFID'): # generated from line 15, col 9
write(''' <form action="uvPalletFPCreate" id=frm>
<input type="hidden" name="barcode1" value="''')
_v = VFFSL(SL,"barcode",True) # '$barcode' on line 17, col 61
if _v is not None: write(_filter(_v, rawExpr='$barcode')) # from line 17, col 61.
write('''">
<input type="hidden" name="dt" value="''')
_v = VFFSL(SL,"dbCurrentTimestamp",False)() # '$dbCurrentTimestamp()' on line 18, col 55
if _v is not None: write(_filter(_v, rawExpr='$dbCurrentTimestamp()')) # from line 18, col 55.
write('''">
<input type="hidden" name="featureid" value="''')
_v = VFFSL(SL,"PFID",True) # '$PFID' on line 19, col 62
if _v is not None: write(_filter(_v, rawExpr='$PFID')) # from line 19, col 62.
write('''">
<a href="wpMain?barcode=''')
_v = VFFSL(SL,"barcode",True) # '$barcode' on line 20, col 41
if _v is not None: write(_filter(_v, rawExpr='$barcode')) # from line 20, col 41.
write('''">''')
_v = VFFSL(SL,"PFNAME",True) # '$PFNAME' on line 20, col 51
if _v is not None: write(_filter(_v, rawExpr='$PFNAME')) # from line 20, col 51.
write('''</a><br>
''')
if False:
_('Количество')
_v = VFFSL(SL,"_",False)('Количество') # "$_('\xca\xee\xeb\xe8\xf7\xe5\xf1\xf2\xe2\xee')" on line 21, col 17
if _v is not None: write(_filter(_v, rawExpr="$_('\xca\xee\xeb\xe8\xf7\xe5\xf1\xf2\xe2\xee')")) # from line 21, col 17.
write(''': <input type=text id="::int" size=4 name=q value="''')
_orig_filter_33690919 = _filter
filterName = 'Quantity'
if self._CHEETAH__filters.has_key("Quantity"):
_filter = self._CHEETAH__currentFilter = self._CHEETAH__filters[filterName]
else:
_filter = self._CHEETAH__currentFilter = \
self._CHEETAH__filters[filterName] = getattr(self._CHEETAH__filtersLib, filterName)(self).filter
_v = VFFSL(SL,"WQ",True) # '$WQ' on line 21, col 100
if _v is not None: write(_filter(_v, rawExpr='$WQ')) # from line 21, col 100.
_filter = _orig_filter_33690919
write('''"/><b>''')
_v = VFFSL(SL,"MWUCODE",True) # '$MWUCODE' on line 21, col 121
if _v is not None: write(_filter(_v, rawExpr='$MWUCODE')) # from line 21, col 121.
write('''</b><br>
''')
if False:
_('ШК паллета для создания')
_v = VFFSL(SL,"_",False)('ШК паллета для создания') # "$_('\xd8\xca \xef\xe0\xeb\xeb\xe5\xf2\xe0 \xe4\xeb\xff \xf1\xee\xe7\xe4\xe0\xed\xe8\xff')" on line 22, col 17
if _v is not None: write(_filter(_v, rawExpr="$_('\xd8\xca \xef\xe0\xeb\xeb\xe5\xf2\xe0 \xe4\xeb\xff \xf1\xee\xe7\xe4\xe0\xed\xe8\xff')")) # from line 22, col 17.
write(''': <input type="text" id=":scan:text" name="barcode2" value=""><br>
<button type="submit">OK</button>
</form>
''')
elif VFFSL(SL,"varExists",False)('$datalist'): # generated from line 25, col 9
for item in VFFSL(SL,"datalist",True): # generated from line 26, col 13
write(''' <a href="wpMain?barcode=''')
_v = VFFSL(SL,"barcode",True) # '$barcode' on line 27, col 41
if _v is not None: write(_filter(_v, rawExpr='$barcode')) # from line 27, col 41.
write('''&featureid=''')
_v = VFFSL(SL,"item.PFID",True) # '$item.PFID' on line 27, col 60
if _v is not None: write(_filter(_v, rawExpr='$item.PFID')) # from line 27, col 60.
write('''">''')
_v = VFFSL(SL,"item.PFNAME",True) # '$item.PFNAME' on line 27, col 72
if _v is not None: write(_filter(_v, rawExpr='$item.PFNAME')) # from line 27, col 72.
write('''</a><br><br>
''')
write('''
''')
########################################
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
def writeBody(self, **KWS):
## CHEETAH: main method generated for this template
trans = KWS.get("trans")
if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
trans = self.transaction # is None unless self.awake() was called
if not trans:
trans = DummyTransaction()
_dummyTrans = True
else: _dummyTrans = False
write = trans.response().write
SL = self._CHEETAH__searchList
_filter = self._CHEETAH__currentFilter
########################################
## START - generated method body
write('''
''')
########################################
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
##################################################
## CHEETAH GENERATED ATTRIBUTES
_CHEETAH__instanceInitialized = False
_CHEETAH_version = __CHEETAH_version__
_CHEETAH_versionTuple = __CHEETAH_versionTuple__
_CHEETAH_genTime = __CHEETAH_genTime__
_CHEETAH_genTimestamp = __CHEETAH_genTimestamp__
_CHEETAH_src = __CHEETAH_src__
_CHEETAH_srcLastModified = __CHEETAH_srcLastModified__
_mainCheetahMethod_for_palletFPCreate= 'writeBody'
## END CLASS DEFINITION
if not hasattr(palletFPCreate, '_initCheetahAttributes'):
templateAPIClass = getattr(palletFPCreate, '_CHEETAH_templateClass', Template)
templateAPIClass._addCheetahPlumbingCodeToClass(palletFPCreate)
# CHEETAH was developed by Tavis Rudd and Mike Orr
# with code, advice and input from many other volunteers.
# For more information visit http://www.CheetahTemplate.org/
##################################################
## if run from command line:
if __name__ == '__main__':
from Cheetah.TemplateCmdLineIface import CmdLineIface
CmdLineIface(templateObj=palletFPCreate()).run()
| [
"[email protected]"
]
| |
a16fd3bc38021a1cbd05f2155ca066692604dadd | fb82ff30fba273eb4a30b5b2e1aceef6bd44ef16 | /labs/lab1/test_something.py | d8df569aa211c392b5b3c346276505cba1175110 | [
"LicenseRef-scancode-public-domain-disclaimer"
]
| permissive | jpchauvel/python-tdd-lab | f882e7684f2793e70064fd45b09928b56a81521f | 2a2e0ee4da15e36e809cdded56cffb6e2b97d90f | refs/heads/master | 2021-10-10T12:11:53.765289 | 2013-05-31T00:30:20 | 2013-05-31T00:30:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,240 | py | #!/usr/bin/env python
import unittest
from mockito import *
from something import Something, Operand
class SomethingTestCase(unittest.TestCase):
def test_init_should_assign_what(self):
# setup
# action
something = Something("what")
# assert
self.assertTrue("what", something.attribute)
def test_doing_something_should_return_string_something(self):
# setup
something = Something("what")
# action
ret = something.doing_something()
# assert
self.assertTrue("something", ret)
def test_doing_something_should_return_string_something_else(self):
# setup
something = Something(any(unicode))
# action
ret = something.doing_something()
# assert
self.assertTrue("something else", ret)
def test_send_result_should_add_op2_to_op1(self):
# setup
something, op1, op2 = self.getSomething()
# action
something.send_result()
# assert
verify(op1).add(op2)
def test_send_result_should_first_sum_op1_op2_then_send_op1(self):
# setup
something, op1, op2 = self.getSomething()
# action
something.send_result()
# assert
inorder.verify(op1, times=1).add(op2)
inorder.verify(op1, times=2).send()
def test_send_result_should_raise_network_problem_exception_when_op1_is_None(self):
# setup
something, op1, op2 = self.getSomething()
something.op1 = None
# assert
with self.assertRaises(Something.NetworkProblemException):
# action
something.send_result()
def test_send_result_should_execute_catch_handler_when_send_raises_exception(self):
# setup
something, op1, op2 = self.getSomething()
when(op1).send().thenRaise(Operand.AException)
# action
something.send_result()
# assert
verify(op1, times=1).rollback()
def getSomething(self):
op1 = mock()
op2 = mock()
something = Something(any())
something.op1 = op1
something.op2 = op2
return (something, op1, op2)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
]
| |
d78ec5d375a61d39378e1c786f504671a0bcd4d4 | 954c493a9105a464bee744c6c78a6d06bf8d531c | /simfoni_task/urls.py | c513fe902a316543936fb9ae3266b67fefa3af6b | []
| no_license | sandeepsajan0/suppliers-api | d235bb1e1e632cbd12e4b39ffb73e6b4be9e22f1 | 5b911471a4e02954296cfd2151f96480052d4a70 | refs/heads/master | 2023-02-26T01:57:03.408628 | 2021-01-24T12:39:34 | 2021-01-24T12:39:34 | 332,411,678 | 0 | 0 | null | 2021-01-24T12:12:52 | 2021-01-24T09:38:56 | Python | UTF-8 | Python | false | false | 1,302 | py | """simfoni_task URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
from django.conf.urls.static import static
from django.conf import settings
schema_view = get_schema_view(
openapi.Info(title="Supplier Module Backend API DOCS", default_version="v1"),
public=True,
)
urlpatterns = [
path('admin/', admin.site.urls),
path(
"",
schema_view.with_ui("swagger", cache_timeout=0),
name="schema-swagger-ui",
),
path('api/suppliers/', include("suppliers.urls"))
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATICFILES_DIRS)
| [
"[email protected]"
]
| |
ab9df98e59d1a7e7d0481a0ede26e8090d4cd311 | e3d6f803beece2ecc2cde8de795fdd20291213ff | /nova/api/openstack/compute/views/flavors.py | fcdd6fcf4ca4d94a039a3ee90651b0c5c0ac1266 | [
"Apache-2.0"
]
| permissive | panguan737/nova | 437c1adb81f3e9ef82c28ad957144623db13ba52 | 0d177185a439baa228b42c948cab4e934d6ac7b8 | refs/heads/main | 2023-01-07T00:08:44.069599 | 2020-11-01T14:00:42 | 2020-11-01T14:00:42 | 309,332,719 | 0 | 0 | Apache-2.0 | 2020-11-02T10:17:13 | 2020-11-02T10:17:13 | null | UTF-8 | Python | false | false | 6,492 | py | # Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import api_version_request
from nova.api.openstack import common
from nova.policies import flavor_access as fa_policies
from nova.policies import flavor_rxtx as fr_policies
FLAVOR_DESCRIPTION_MICROVERSION = '2.55'
class ViewBuilder(common.ViewBuilder):
_collection_name = "flavors"
def basic(self, request, flavor, include_description=False,
update_is_public=None, update_rxtx_factor=None):
# update_is_public & update_rxtx_factor are placeholder param
# which are not used in this method as basic() method is used by
# index() (GET /flavors) which does not return those keys in response.
flavor_dict = {
"flavor": {
"id": flavor["flavorid"],
"name": flavor["name"],
"links": self._get_links(request,
flavor["flavorid"],
self._collection_name),
},
}
if include_description:
flavor_dict['flavor']['description'] = flavor.description
return flavor_dict
def show(self, request, flavor, include_description=False,
update_is_public=None, update_rxtx_factor=None):
flavor_dict = {
"flavor": {
"id": flavor["flavorid"],
"name": flavor["name"],
"ram": flavor["memory_mb"],
"disk": flavor["root_gb"],
"swap": flavor["swap"] or "",
"OS-FLV-EXT-DATA:ephemeral": flavor["ephemeral_gb"],
"OS-FLV-DISABLED:disabled": flavor["disabled"],
"vcpus": flavor["vcpus"],
"links": self._get_links(request,
flavor["flavorid"],
self._collection_name),
},
}
if include_description:
flavor_dict['flavor']['description'] = flavor.description
# TODO(gmann): 'update_is_public' & 'update_rxtx_factor' are policies
# checks. Once os-flavor-access & os-flavor-rxtx policies are
# removed, 'os-flavor-access:is_public' and 'rxtx_factor' need to be
# added in response without any check.
# Evaluate the policies when using show method directly.
context = request.environ['nova.context']
if update_is_public is None:
update_is_public = context.can(fa_policies.BASE_POLICY_NAME,
fatal=False)
if update_rxtx_factor is None:
update_rxtx_factor = context.can(fr_policies.BASE_POLICY_NAME,
fatal=False)
if update_is_public:
flavor_dict['flavor'].update({
"os-flavor-access:is_public": flavor['is_public']})
if update_rxtx_factor:
flavor_dict['flavor'].update(
{"rxtx_factor": flavor['rxtx_factor'] or ""})
return flavor_dict
def index(self, request, flavors):
"""Return the 'index' view of flavors."""
coll_name = self._collection_name
include_description = api_version_request.is_supported(
request, FLAVOR_DESCRIPTION_MICROVERSION)
return self._list_view(self.basic, request, flavors, coll_name,
include_description=include_description)
def detail(self, request, flavors):
"""Return the 'detail' view of flavors."""
coll_name = self._collection_name + '/detail'
include_description = api_version_request.is_supported(
request, FLAVOR_DESCRIPTION_MICROVERSION)
context = request.environ['nova.context']
update_is_public = context.can(fa_policies.BASE_POLICY_NAME,
fatal=False)
update_rxtx_factor = context.can(fr_policies.BASE_POLICY_NAME,
fatal=False)
return self._list_view(self.show, request, flavors, coll_name,
include_description=include_description,
update_is_public=update_is_public,
update_rxtx_factor=update_rxtx_factor)
def _list_view(self, func, request, flavors, coll_name,
include_description=False, update_is_public=None,
update_rxtx_factor=None):
"""Provide a view for a list of flavors.
:param func: Function used to format the flavor data
:param request: API request
:param flavors: List of flavors in dictionary format
:param coll_name: Name of collection, used to generate the next link
for a pagination query
:param include_description: If the flavor.description should be
included in the response dict.
:param update_is_public: If the flavor.is_public field should be
included in the response dict.
:param update_rxtx_factor: If the flavor.rxtx_factor field should be
included in the response dict.
:returns: Flavor reply data in dictionary format
"""
flavor_list = [func(request, flavor, include_description,
update_is_public, update_rxtx_factor)["flavor"]
for flavor in flavors]
flavors_links = self._get_collection_links(request,
flavors,
coll_name,
"flavorid")
flavors_dict = dict(flavors=flavor_list)
if flavors_links:
flavors_dict["flavors_links"] = flavors_links
return flavors_dict
| [
"[email protected]"
]
| |
546d5f3d75b16204af7150c593a018202f289b72 | dc9b0ea6714c29651cfd8b494862f31f07d85f28 | /project13_Poem_application_v_1_1/venv/Scripts/easy_install-3.7-script.py | 3ffe0822b1c606782a31dc123f99a0e6002cc984 | []
| no_license | Papashanskiy/PythonProjects | c228269f0aef1677758cb6e2f1acdfa522da0a02 | cf999867befa7d8213b2b6675b723f2b9f392fd7 | refs/heads/master | 2022-12-12T15:23:56.234339 | 2019-02-10T09:14:56 | 2019-02-10T09:14:56 | 148,336,536 | 0 | 0 | null | 2022-12-08T03:01:04 | 2018-09-11T15:10:44 | Python | WINDOWS-1251 | Python | false | false | 505 | py | #!C:\Users\Игорь\Desktop\Python\PythonProjects\project13_Poem_application_v_1_1\venv\Scripts\python.exe -x
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.7')()
)
| [
"[email protected]"
]
| |
7ef8f4ec72f930eb88693061e793ec74b5cc19a3 | 19666396c4d56f4dcd2790b6c8a6b18745767f90 | /task.py | cb5e139ed740c0339e517e50b0c4cff7b318fb27 | []
| no_license | himdhiman/taskpy | 5504b461f4ae161cfabc3f552752dd2641eabf6f | 2d343da545bb20b8c30c8d7d6bde83e241ca5493 | refs/heads/master | 2023-03-29T09:07:35.797755 | 2021-04-08T07:05:15 | 2021-04-08T07:05:15 | 355,798,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,680 | py | from celery import shared_task
from api import models, serializers
from rest_framework.response import Response
import os
from pathlib import Path
from api.models import Problem, Submission
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
import json
from django.core import serializers as djSerializer
BASE_DIR = Path(__file__).resolve().parent.parent
channel_layer = get_channel_layer()
@shared_task
def runCode(body, uid):
response = serializers.SubmissionSerializer(data = body)
if(response.is_valid()):
inst = response.save()
if(inst.inputGiven != ""):
data = {'id' : inst.id, 'code' : inst.code, 'lang' : inst.language, 'inp' : inst.inputGiven, 'problemId' : inst.problemId}
else:
data = {'id' : inst.id, 'code' : inst.code, 'lang' : inst.language, 'problemId' : inst.problemId}
probId = data['problemId']
totaltc = Problem.objects.get(id = probId).totalTC
tempPath = os.path.join(BASE_DIR, "Codes", str(uid))
os.system(f"mkdir {tempPath}")
inpPath = tempPath+"/"+"input.txt"
os.system(f"touch {inpPath}")
outPath = tempPath+"/"+"output.txt"
os.system(f"touch {outPath}")
logPath = tempPath+"/"+"output.log"
os.system(f"touch {logPath}")
progPath = None
bashPath = tempPath+"/"+"a.out"
if data['lang'] == "CP":
progPath = tempPath+"/"+"main.cpp"
os.system(f"touch {progPath}")
f = open(os.path.join(tempPath, 'main.cpp'), "w")
f.write(data['code'])
f.close()
if data['lang'] == "P3":
progPath = tempPath+"/"+"main.py"
os.system(f"touch {progPath}")
f = open(os.path.join(tempPath, 'main.py'), "w")
f.write(data['code'])
f.close()
isInputGiven = False
if('inp' in data.keys() and data['inp'] != None):
isInputGiven = True
f = open(inpPath, "w")
f.write(data['inp'])
f.close()
os.chdir(tempPath)
if data['lang'] == "CP":
os.system(f'g++ {progPath}')
cnt = 0
if(isInputGiven == False):
for i in range(1, totaltc+1):
isSame = True
inpPath = os.path.join(BASE_DIR, "media", 'TestCases', str(probId), 'input'+str(i)+'.txt')
os.system(f'{bashPath} < {inpPath} > {outPath}')
with open(os.path.join(BASE_DIR, "media", 'TestCases', str(probId), 'output'+str(i)+'.txt')) as f1, open(outPath) as f2:
for line1, line2 in zip(f1, f2):
if line1 != line2:
isSame = False
break
if(isSame):
cnt += 1
async_to_sync(channel_layer.group_send)("user_"+str(uid), {'type': 'sendStatus', 'text' : f"1/{i}/{totaltc}"})
else:
async_to_sync(channel_layer.group_send)("user_"+str(uid), {'type': 'sendStatus', 'text' : f"0/{i}/{totaltc}"})
os.system(f"rm -rf {outPath}")
os.system(f"touch {outPath}")
else:
os.system(f'{bashPath} < {inpPath} > {outPath}')
os.system(f'g++ {progPath} 2> {logPath}')
if data['lang'] == "P3":
if(isInputGiven == False):
for i in range(1, totaltc+1):
isSame = True
inpPath = os.path.join(BASE_DIR, "media", 'TestCases', str(probId), 'input'+str(i)+'.txt')
os.system(f'python main.py < {inpPath} > {outPath}')
with open(os.path.join(BASE_DIR, "media", 'TestCases', str(probId), 'output'+str(i)+'.txt')) as f1, open(outPath) as f2:
for line1, line2 in zip(f1, f2):
if line1 != line2:
isSame = False
break
if(isSame):
cnt += 1
async_to_sync(channel_layer.group_send)("user_"+str(uid), {'type': 'sendStatus', 'text' : f"1/{i}/{totaltc}"})
else:
async_to_sync(channel_layer.group_send)("user_"+str(uid), {'type': 'sendStatus', 'text' : f"0/{i}/{totaltc}"})
os.system(f"rm -rf {outPath}")
os.system(f"touch {outPath}")
else:
os.system('python main.py < input.txt > output.txt 2>"output.log"')
os.chdir(BASE_DIR)
out = open(os.path.join(tempPath, 'output.txt'), "r")
code_output = out.read()
out.close()
os.system(f"rm -rf {inpPath}")
os.system(f"touch {inpPath}")
tcString = str(cnt) + "/" + str(totaltc)
if os.stat(os.path.join(tempPath, "output.log")).st_size != 0:
f = open(os.path.join(tempPath, "output.log"), "r")
error = f.read()
f.close()
os.system(f"rm -rf {tempPath}")
Submission.objects.filter(pk = data['id']).update(error = error, status = "CE", testCasesPassed = tcString)
else:
os.system(f"rm -rf {tempPath}")
Submission.objects.filter(pk = data['id']).update(outputGen = code_output, status = "AC", testCasesPassed = tcString)
response = models.Submission.objects.filter(id = inst.id)
async_to_sync(channel_layer.group_send)("user_"+str(uid), {'type': 'sendResult', 'text' : djSerializer.serialize('json', response)})
| [
"[email protected]"
]
| |
516fe6d919ac4f281d58d62191e17b1d1b0915db | 45f7a9b44ea1c45448703707da793d51151c0527 | /ui_tests/examples/examples_03.py | 6cbe239e09666abb076794312777347c51028740 | []
| no_license | basdijkstra/python-for-testers | a40d30432c31712c6d0eadbca9de73056ff10535 | 50bfbabfb2b8426eed8d048b0448959c34f71b61 | refs/heads/master | 2023-05-24T18:48:58.557924 | 2023-05-23T05:44:11 | 2023-05-23T05:44:11 | 219,865,075 | 7 | 4 | null | 2023-05-23T05:44:13 | 2019-11-05T22:47:09 | Python | UTF-8 | Python | false | false | 1,047 | py | from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as ec
import pytest
@pytest.fixture
def browser():
driver = webdriver.Chrome()
driver.maximize_window()
yield driver
driver.quit()
def test_successful_google_search(browser):
browser.get("https://www.google.com")
send_keys(browser, By.NAME, "q", "Maserati")
click(browser, By.NAME, "btnK")
assert browser.title == "Maserati - Google zoeken"
assert browser.find_element_by_id("resultStats").is_displayed() is True
def send_keys(driver, locator_strategy, locator, text_to_type):
element = WebDriverWait(driver, 10).until(
ec.element_to_be_clickable((locator_strategy, locator))
)
element.send_keys(text_to_type)
def click(driver, locator_strategy, locator):
element = WebDriverWait(driver, 10).until(
ec.element_to_be_clickable((locator_strategy, locator))
)
element.click()
| [
"[email protected]"
]
| |
ec2cadeb66e67d4e79bc5fd3c5442916d74ec88b | fa0eac5b96fc46ebf8e31a7ccd7fa39f2e200bfc | /backend/home/migrations/0002_load_initial_data.py | 95b530a428d8a00e98272abc6771579b60ad009f | []
| no_license | crowdbotics-apps/mobile-7-dec-dev-16399 | be33b8eb17957c9b8a75b3d089114631f27b9109 | ab2cbc3e11dfbf709883b3cf49f4be109d779028 | refs/heads/master | 2023-06-27T20:39:09.405282 | 2020-12-07T11:38:48 | 2020-12-07T11:38:48 | 319,207,493 | 0 | 0 | null | 2021-08-03T20:05:57 | 2020-12-07T04:51:46 | JavaScript | UTF-8 | Python | false | false | 1,306 | py | from django.db import migrations
def create_customtext(apps, schema_editor):
CustomText = apps.get_model("home", "CustomText")
customtext_title = "mobile 7 dec"
CustomText.objects.create(title=customtext_title)
def create_homepage(apps, schema_editor):
HomePage = apps.get_model("home", "HomePage")
homepage_body = """
<h1 class="display-4 text-center">mobile 7 dec</h1>
<p class="lead">
This is the sample application created and deployed from the Crowdbotics app.
You can view list of packages selected for this application below.
</p>"""
HomePage.objects.create(body=homepage_body)
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "mobile-7-dec-dev-16399.botics.co"
site_params = {
"name": "mobile 7 dec",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("home", "0001_initial"),
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_customtext),
migrations.RunPython(create_homepage),
migrations.RunPython(create_site),
]
| [
"[email protected]"
]
| |
644e40c6107325bbd5bc83d943dae5342e6840d2 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_347/ch60_2020_06_20_17_50_00_812398.py | c56e6538b8f07b28a41785924f0cb42c34347010 | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 106 | py | def eh_palindromo(string):
if string == string[-1]:
return True
else:
return False | [
"[email protected]"
]
| |
6ff9ff95fefbaa276dd64e822f01857ee7a656b9 | 11aaeaeb55d587a950456fd1480063e1aed1d9e5 | /.history/ex45-test_20190608162053.py | ead3b46f62d91c9428c264a9f24afe6a0e5ba598 | []
| no_license | Gr4cchus/Learn-Python-3-The-Hard-Way | 8ce9e68f6a91ea33ea45fe64bfff82d65422c4a8 | f5fa34db16cdd6377faa7fcf45c70f94bb4aec0d | refs/heads/master | 2020-05-17T23:18:29.483160 | 2019-06-26T18:42:52 | 2019-06-26T18:42:52 | 184,023,439 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 363 | py |
class Room1():
def enter():
print("You enter room 1")
class Room2():
def enter():
print("You enter room 2")
class Map():
def __init__(self, starting_room)
self.starting_room = starting_room
self.locations = {
'room1': Room1(),
'room2': Room2()
}
start = Map('room1')
start
| [
"[email protected]"
]
| |
83d0a74593c54ec76d33cb5485a9903692decdc3 | 0de5810c702046d8f00c8bcda8543c66f608222f | /venv/bin/tor-prompt | d17b63310b066687ee943f2ad5db6d165708806a | []
| no_license | Subhash1998/product_sale | 26a688869009035e785791cac1fb2707cab1e221 | a76f9e75ea1d96832d9280b98da08d0b3c6520fe | refs/heads/master | 2022-12-12T12:13:37.392530 | 2020-06-18T12:46:45 | 2020-06-18T12:46:45 | 128,992,642 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | #!/home/subhash/payment/venv/bin/python
# Copyright 2014-2017, Damian Johnson and The Tor Project
# See LICENSE for licensing information
import stem.interpreter
if __name__ == '__main__':
stem.interpreter.main()
| [
"[email protected]"
]
| ||
c3182071b0e894204d18f4939e191f802569f245 | c1d68638ccff1d72dd04e4f85bedf5b3146d1c7e | /site-packages/PytoClasses.py | f61abae06651d5db273c0bf55b7b660deab90c81 | [
"MIT"
]
| permissive | luoyu1993/Pyto | ac61f296756a0df9131e50a203cb31efd261b843 | 4f874e0b9203ae8dc0cd447b599358d726c98f10 | refs/heads/master | 2022-07-26T10:44:58.987630 | 2018-12-09T19:05:41 | 2018-12-09T19:05:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 702 | py | # -*- coding: utf-8 -*-
"""
This module contains classes from the main app used by Pyto.
This module is only for private use. Use the `pyto` API instead.
"""
from rubicon.objc import *
NSBundle = ObjCClass("NSBundle")
def __Class__(name):
return ObjCClass("Pyto."+name)
PyMainThread = __Class__("PyMainThread")
PyThread = __Class__("PyThread")
PyInputHelper = __Class__("PyInputHelper")
PyOutputHelper = __Class__("PyOutputHelper")
PySharingHelper = __Class__("PySharingHelper")
FilePicker = __Class__("PyFilePicker")
Alert = __Class__("PyAlert")
PyContentViewController = __Class__("PyContentViewController")
PyExtensionContext = __Class__("PyExtensionContext")
Python = __Class__("Python")
| [
"[email protected]"
]
| |
57a0faa230f7260f44d5ae7dbf3ff16be5f6ad0f | 5b3bd326998606188b45a7870852643eda024a97 | /meta_architectures/context_rcnn_lib_tf2.py | 4989db07136dc450d1fa5058e58e27365b79dd29 | []
| no_license | KuznetsovIllya/clearml_od_toy | 31556d0726d15a054c1c18317c361d97801381a4 | 92f15f04a023d4e0e165a250fddc3129144913d0 | refs/heads/main | 2023-04-11T05:55:56.248478 | 2021-04-14T15:59:40 | 2021-04-14T15:59:40 | 357,827,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:2ba0224b6fe0f919228a83b2f2a2bef6bbff4e7f747358231bdc235456d58197
size 9287
| [
"[email protected]"
]
| |
d62122698bcb4a6081b643082fd8fb9a2df8278c | 7b6377050fba4d30f00e9fb5d56dfacb22d388e1 | /pqu/Check/t07.py | e3c5cacee7c2ca293bce9a269a6a1976e4be1703 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | LLNL/fudge | 0a4fe8e3a68b66d58e42d1f4d209ea3f713c6370 | 6ba80855ae47cb32c37f635d065b228fadb03412 | refs/heads/master | 2023-08-16T21:05:31.111098 | 2023-08-01T22:09:32 | 2023-08-01T22:09:32 | 203,678,373 | 21 | 4 | NOASSERTION | 2023-06-28T20:51:02 | 2019-08-21T23:22:20 | Python | UTF-8 | Python | false | false | 972 | py | # <<BEGIN-copyright>>
# Copyright 2022, Lawrence Livermore National Security, LLC.
# See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
# <<END-copyright>>
import sys
sys.path.insert( 0, '../../' )
from pqu.PQU import PQU
print()
a2 = PQU( "2.300000000000(1) MeV" )
print(a2)
print(a2.info( significantDigits = 15 ))
a2 = PQU( "2.300000000001(1) MeV" )
print(a2)
print(a2.info( significantDigits = 15 ))
print()
a2 = PQU( "2.300000000003(1)", "MeV" )
print(a2)
print(a2.info( significantDigits = 15 ))
print()
l = PQU( 10., 'm' )
big_l = PQU( 10., 'km' )
sum_l = big_l + l
print(sum_l)
print(l.info( significantDigits = 15 ))
print(big_l.info( significantDigits = 15 ))
print(sum_l.info( significantDigits = 15 ))
print()
E = PQU( 1000, 'MeV/c**2' ) # This is similar to the prior one.
print(E)
print(E.info( significantDigits = 15 ))
kg = E.inUnitsOf( 'kg' )
print(kg)
print(kg.info( significantDigits = 15 ))
| [
"[email protected]"
]
| |
6f6d4de96ca0a1c2543a30b808a0d01e7cebf842 | 7b0f8e5e3dd02f47096bc35322063c7581f95aca | /chapter12/avazu_ctr/future_selection.py | 51daffbfe3d66f4915cfa3d050b08282ce2e8a3f | []
| no_license | michaelliu03/Search-Recommend-InAction | 94f8342573fffa0016bc6b66b0100a81423e5438 | 06408c9a4ec23aad0e604267903be96cb2892ea1 | refs/heads/master | 2022-02-25T21:57:56.445082 | 2022-02-17T10:45:15 | 2022-02-17T10:45:15 | 226,252,089 | 40 | 17 | null | 2022-01-10T09:53:50 | 2019-12-06T05:24:50 | Python | UTF-8 | Python | false | false | 3,906 | py | #!/usr/bin/env python
#-*-coding:utf-8-*-
# @File:future_selection.py
# @Author: Michael.liu
# @Date:2020/6/4 17:49
# @Desc: this code is ....
import pandas as pd
import numpy as np
import xgboost as xgb
from xgboost.sklearn import XGBClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import roc_auc_score
from sklearn.metrics import log_loss
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
from xgboost import plot_importance
def xgboost_selection_future():
train = pd.read_csv('tr_FE.csv')
y_train = train.click
X_train = train.drop(['click', 'device_ip', 'Unnamed: 0'], axis=1)
cv_params = {'n_estimators': [400, 500, 600, 700, 800]}
other_params ={'learning_rate': 0.1,
'n_estimators': 500,
'max_depth': 5,
'min_child_weight': 1,
'seed': 0,
'subsample': 0.8,
'objective': 'binary:logistic',
'colsample_bytree': 0.8,
'gamma': 0,
}
model = xgb.XGBClassifier(**other_params)
optimized_GBM = GridSearchCV(estimator=model, param_grid=cv_params, scoring='neg_log_loss', cv=5, verbose=1,
n_jobs=4)
optimized_GBM.fit(X_train, y_train)
evalute_result = optimized_GBM.grid_scores_
print('每轮迭代运行结果:{0}'.format(evalute_result))
print('参数的最佳取值:{0}'.format(optimized_GBM.best_params_))
print('最佳模型得分:{0}'.format(optimized_GBM.best_score_))
def modelfit(alg, dtrain, predictors, useTrainCV=True, cv_folds=5, early_stopping_rounds=50):
if useTrainCV:
xgb_param = alg.get_xgb_params() # 参数
xgtrain = xgb.DMatrix(dtrain[predictors].values, label=dtrain[target].values) # 训练集数据与标签
cvresult = xgb.cv(xgb_param, xgtrain, num_boost_round=alg.get_params()['n_estimators'], nfold=cv_folds,
metrics='auc', early_stopping_rounds=early_stopping_rounds, show_progress=False)
alg.set_params(n_estimators=cvresult.shape[0])
alg.fit(dtrain[predictors], dtrain['Disbursed'], eval_metric='auc')
# Predict training set:
dtrain_predictions = alg.predict(dtrain[predictors])
dtrain_predprob = alg.predict_proba(dtrain[predictors])[:, 1]
# Print model report:
print("Model Report")
# print("Accuracy : %.4g" % accuracy_score(dtrain['Disbursed'].values, dtrain_predictions))
# print("AUC Score (Train): %f" % roc_auc_score(dtrain['Disbursed'], dtrain_predprob))
feat_imp = pd.Series(alg.booster().get_fscore()).sort_values(ascending=False)
feat_imp.plot(kind='bar', title='Feature Importances')
plt.ylabel('Feature Importance Score')
def future_important():
train = pd.read_csv("tr_FE.csv")
# test = pd.read_csv("tr_FE.csv")
#features = pd.read_csv('feature.csv')
y_train = train.click
X_train = train.drop(['click'], axis=1)
model = xgb.XGBRegressor(n_estimators=350, max_depth=10, objective='binary:logistic', min_child_weight=50,
subsample=0.8, gamma=0, learning_rate=0.2, colsample_bytree=0.5, seed=27)
model.fit(X_train, y_train)
# y_test = model.predict(X_test)
plot_importance(model, importance_type="gain")
features = X_train.columns
feature_importance_values = model.feature_importances_
feature_importances = pd.DataFrame({'feature': list(features), 'importance': feature_importance_values})
feature_importances.sort_values('importance', inplace=True, ascending=False)
print(feature_importances)
# print(model.get_booster().get_fscore())
print(model.get_booster().get_score(importance_type="gain"))
feature_importances.to_csv('feature.csv')
if __name__ == '__main__':
print("start......")
xgboost_selection_future()
print(">>>>>>>>end")
| [
"[email protected]"
]
| |
325ef11b155fbaa8e4e993bad295a14bd10f0da1 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2698/60825/298860.py | fabee180d1ca0e439e1ce0a2a785aa0d0d867e9a | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 345 | py | t=""
while True:
try:
ts=input()
t+=ts
except:
break
if t=='2 2':
print(3, end='')
elif t=='3 5':
print(58871587162270592645034001, end='')
elif t=='2 3':
print(21, end='')
elif t.startswith('2 4'):
print(651, end='')
elif t.startswith('4 3'):
print(83505, end='')
else:
print(t)
| [
"[email protected]"
]
| |
583f053a5f5f31217decbeaed149f301f2056bf9 | b92adbd59161b701be466b3dbeab34e2b2aaf488 | /.c9/metadata/environment/database_submissions/dup_query.py | 537917265b85ff92fabc4fa0426f9e0112175932 | []
| no_license | R151865/cloud_9_files | 7486fede7af4db4572f1b8033990a0f07f8749e8 | a468c44e9aee4a37dea3c8c9188c6c06e91cc0c4 | refs/heads/master | 2022-11-22T10:45:39.439033 | 2020-07-23T09:31:52 | 2020-07-23T09:31:52 | 281,904,416 | 0 | 1 | null | 2022-11-20T00:47:10 | 2020-07-23T09:08:48 | Python | UTF-8 | Python | false | false | 24,298 | py | {"filter":false,"title":"dup_query.py","tooltip":"/database_submissions/dup_query.py","undoManager":{"mark":100,"position":100,"stack":[[{"start":{"row":81,"column":58},"end":{"row":81,"column":59},"action":"insert","lines":["="],"id":1827},{"start":{"row":81,"column":59},"end":{"row":81,"column":60},"action":"insert","lines":["'"]},{"start":{"row":81,"column":60},"end":{"row":81,"column":61},"action":"insert","lines":["'"]}],[{"start":{"row":81,"column":60},"end":{"row":81,"column":61},"action":"insert","lines":["w"],"id":1828},{"start":{"row":81,"column":61},"end":{"row":81,"column":62},"action":"insert","lines":["i"]},{"start":{"row":81,"column":62},"end":{"row":81,"column":63},"action":"insert","lines":["c"]},{"start":{"row":81,"column":63},"end":{"row":81,"column":64},"action":"insert","lines":["k"]}],[{"start":{"row":81,"column":65},"end":{"row":81,"column":66},"action":"insert","lines":[";"],"id":1829}],[{"start":{"row":40,"column":38},"end":{"row":41,"column":0},"action":"insert","lines":["",""],"id":1830},{"start":{"row":41,"column":0},"end":{"row":41,"column":12},"action":"insert","lines":[" "]},{"start":{"row":41,"column":12},"end":{"row":41,"column":13},"action":"insert","lines":["C"]}],[{"start":{"row":41,"column":13},"end":{"row":41,"column":14},"action":"insert","lines":["O"],"id":1831},{"start":{"row":41,"column":14},"end":{"row":41,"column":15},"action":"insert","lines":["N"]},{"start":{"row":41,"column":15},"end":{"row":41,"column":16},"action":"insert","lines":["S"]}],[{"start":{"row":41,"column":16},"end":{"row":41,"column":17},"action":"insert","lines":["T"],"id":1832},{"start":{"row":41,"column":17},"end":{"row":41,"column":18},"action":"insert","lines":["R"]},{"start":{"row":41,"column":18},"end":{"row":41,"column":19},"action":"insert","lines":["A"]},{"start":{"row":41,"column":19},"end":{"row":41,"column":20},"action":"insert","lines":["I"]},{"start":{"row":41,"column":20},"end":{"row":41,"column":21},"action":"insert","lines":["N"]},{"start":{"row":41,"column":21},"end":{"row":41,"column":22},"action":"insert","lines":["T"]}],[{"start":{"row":41,"column":22},"end":{"row":41,"column":23},"action":"insert","lines":[" "],"id":1833}],[{"start":{"row":42,"column":57},"end":{"row":43,"column":0},"action":"insert","lines":["",""],"id":1834},{"start":{"row":43,"column":0},"end":{"row":43,"column":12},"action":"insert","lines":[" "]},{"start":{"row":43,"column":12},"end":{"row":43,"column":13},"action":"insert","lines":["O"]},{"start":{"row":43,"column":13},"end":{"row":43,"column":14},"action":"insert","lines":["N"]}],[{"start":{"row":43,"column":14},"end":{"row":43,"column":15},"action":"insert","lines":[" "],"id":1835},{"start":{"row":43,"column":15},"end":{"row":43,"column":16},"action":"insert","lines":["D"]},{"start":{"row":43,"column":16},"end":{"row":43,"column":17},"action":"insert","lines":["E"]},{"start":{"row":43,"column":17},"end":{"row":43,"column":18},"action":"insert","lines":["L"]},{"start":{"row":43,"column":18},"end":{"row":43,"column":19},"action":"insert","lines":["E"]},{"start":{"row":43,"column":19},"end":{"row":43,"column":20},"action":"insert","lines":["T"]},{"start":{"row":43,"column":20},"end":{"row":43,"column":21},"action":"insert","lines":["E"]}],[{"start":{"row":43,"column":21},"end":{"row":43,"column":22},"action":"insert","lines":[" "],"id":1836},{"start":{"row":43,"column":22},"end":{"row":43,"column":23},"action":"insert","lines":["S"]}],[{"start":{"row":43,"column":22},"end":{"row":43,"column":23},"action":"remove","lines":["S"],"id":1837}],[{"start":{"row":43,"column":22},"end":{"row":43,"column":23},"action":"insert","lines":["C"],"id":1838},{"start":{"row":43,"column":23},"end":{"row":43,"column":24},"action":"insert","lines":["A"]},{"start":{"row":43,"column":24},"end":{"row":43,"column":25},"action":"insert","lines":["S"]}],[{"start":{"row":43,"column":25},"end":{"row":43,"column":26},"action":"insert","lines":["C"],"id":1839},{"start":{"row":43,"column":26},"end":{"row":43,"column":27},"action":"insert","lines":["A"]},{"start":{"row":43,"column":27},"end":{"row":43,"column":28},"action":"insert","lines":["D"]},{"start":{"row":43,"column":28},"end":{"row":43,"column":29},"action":"insert","lines":["E"]}],[{"start":{"row":43,"column":29},"end":{"row":43,"column":30},"action":"insert","lines":[";"],"id":1840}],[{"start":{"row":43,"column":29},"end":{"row":43,"column":30},"action":"remove","lines":[";"],"id":1841}],[{"start":{"row":36,"column":30},"end":{"row":36,"column":31},"action":"insert","lines":["1"],"id":1842}],[{"start":{"row":39,"column":40},"end":{"row":39,"column":41},"action":"remove","lines":["_"],"id":1843}],[{"start":{"row":39,"column":36},"end":{"row":39,"column":49},"action":"remove","lines":["AUTOINCREMENT"],"id":1844}],[{"start":{"row":39,"column":24},"end":{"row":39,"column":25},"action":"insert","lines":[" "],"id":1845}],[{"start":{"row":39,"column":24},"end":{"row":39,"column":37},"action":"insert","lines":["AUTOINCREMENT"],"id":1846}],[{"start":{"row":39,"column":49},"end":{"row":39,"column":50},"action":"remove","lines":[" "],"id":1847}],[{"start":{"row":39,"column":24},"end":{"row":39,"column":37},"action":"remove","lines":["AUTOINCREMENT"],"id":1848}],[{"start":{"row":39,"column":24},"end":{"row":39,"column":25},"action":"remove","lines":[" "],"id":1849}],[{"start":{"row":39,"column":35},"end":{"row":39,"column":36},"action":"insert","lines":[" "],"id":1850}],[{"start":{"row":39,"column":36},"end":{"row":39,"column":49},"action":"insert","lines":["AUTOINCREMENT"],"id":1851}],[{"start":{"row":39,"column":23},"end":{"row":39,"column":24},"action":"insert","lines":["e"],"id":1852},{"start":{"row":39,"column":24},"end":{"row":39,"column":25},"action":"insert","lines":["g"]},{"start":{"row":39,"column":25},"end":{"row":39,"column":26},"action":"insert","lines":["e"]}],[{"start":{"row":39,"column":25},"end":{"row":39,"column":26},"action":"remove","lines":["e"],"id":1853},{"start":{"row":39,"column":24},"end":{"row":39,"column":25},"action":"remove","lines":["g"]},{"start":{"row":39,"column":23},"end":{"row":39,"column":24},"action":"remove","lines":["e"]}],[{"start":{"row":39,"column":23},"end":{"row":39,"column":24},"action":"insert","lines":["E"],"id":1854},{"start":{"row":39,"column":24},"end":{"row":39,"column":25},"action":"insert","lines":["G"]},{"start":{"row":39,"column":25},"end":{"row":39,"column":26},"action":"insert","lines":["E"]},{"start":{"row":39,"column":26},"end":{"row":39,"column":27},"action":"insert","lines":["R"]}],[{"start":{"row":36,"column":7},"end":{"row":44,"column":10},"action":"remove","lines":[" CREATE TABLE Post_User1"," ("," user_id INT ,"," post_id INTEGER PRIMARY KEY AUTOINCREMENT,"," post_content VARCHAR(500),"," CONSTRAINT "," FOREIGN KEY(user_id) REFERENCES User(user_id)"," ON DELETE CASCADE"," );"],"id":1855}],[{"start":{"row":36,"column":7},"end":{"row":44,"column":10},"action":"insert","lines":[" CREATE TABLE Post_User1"," ("," user_id INT ,"," post_id INTEGER PRIMARY KEY AUTOINCREMENT,"," post_content VARCHAR(500),"," CONSTRAINT "," FOREIGN KEY(user_id) REFERENCES User(user_id)"," ON DELETE CASCADE"," );"],"id":1856}],[{"start":{"row":41,"column":23},"end":{"row":41,"column":24},"action":"insert","lines":["n"],"id":1857},{"start":{"row":41,"column":24},"end":{"row":41,"column":25},"action":"insert","lines":["a"]},{"start":{"row":41,"column":25},"end":{"row":41,"column":26},"action":"insert","lines":["m"]},{"start":{"row":41,"column":26},"end":{"row":41,"column":27},"action":"insert","lines":["e"]}],[{"start":{"row":63,"column":4},"end":{"row":64,"column":30},"action":"remove","lines":["INSERT INTO Post_User(user_id,post_content)"," VALUES(2,'my first post');"],"id":1858}],[{"start":{"row":63,"column":4},"end":{"row":71,"column":10},"action":"insert","lines":["CREATE TABLE Post_User1"," ("," user_id INT ,"," post_id INTEGER PRIMARY KEY AUTOINCREMENT,"," post_content VARCHAR(500),"," CONSTRAINT name"," FOREIGN KEY(user_id) REFERENCES User(user_id)"," ON DELETE CASCADE"," );"],"id":1859}],[{"start":{"row":85,"column":0},"end":{"row":86,"column":0},"action":"insert","lines":["",""],"id":1860},{"start":{"row":86,"column":0},"end":{"row":87,"column":0},"action":"insert","lines":["",""]}],[{"start":{"row":86,"column":0},"end":{"row":86,"column":1},"action":"insert","lines":["Q"],"id":1861},{"start":{"row":86,"column":1},"end":{"row":86,"column":2},"action":"insert","lines":["1"]}],[{"start":{"row":86,"column":1},"end":{"row":86,"column":2},"action":"remove","lines":["1"],"id":1862}],[{"start":{"row":86,"column":1},"end":{"row":86,"column":2},"action":"insert","lines":["1"],"id":1863},{"start":{"row":86,"column":2},"end":{"row":86,"column":3},"action":"insert","lines":["0"]},{"start":{"row":86,"column":3},"end":{"row":86,"column":4},"action":"insert","lines":["="]}],[{"start":{"row":86,"column":4},"end":{"row":86,"column":6},"action":"insert","lines":["''"],"id":1864}],[{"start":{"row":86,"column":4},"end":{"row":86,"column":6},"action":"remove","lines":["''"],"id":1865}],[{"start":{"row":86,"column":4},"end":{"row":86,"column":6},"action":"insert","lines":["\"\""],"id":1866}],[{"start":{"row":86,"column":6},"end":{"row":86,"column":7},"action":"insert","lines":["\""],"id":1867}],[{"start":{"row":86,"column":7},"end":{"row":87,"column":0},"action":"insert","lines":["",""],"id":1868},{"start":{"row":87,"column":0},"end":{"row":88,"column":0},"action":"insert","lines":["",""]}],[{"start":{"row":88,"column":0},"end":{"row":88,"column":2},"action":"insert","lines":["\"\""],"id":1869}],[{"start":{"row":88,"column":2},"end":{"row":88,"column":3},"action":"insert","lines":["\""],"id":1870},{"start":{"row":88,"column":3},"end":{"row":88,"column":4},"action":"insert","lines":["\""]}],[{"start":{"row":87,"column":0},"end":{"row":87,"column":4},"action":"insert","lines":[" "],"id":1871}],[{"start":{"row":87,"column":4},"end":{"row":87,"column":5},"action":"insert","lines":["D"],"id":1872},{"start":{"row":87,"column":5},"end":{"row":87,"column":6},"action":"insert","lines":["E"]},{"start":{"row":87,"column":6},"end":{"row":87,"column":7},"action":"insert","lines":["L"]},{"start":{"row":87,"column":7},"end":{"row":87,"column":8},"action":"insert","lines":["E"]},{"start":{"row":87,"column":8},"end":{"row":87,"column":9},"action":"insert","lines":["E"]}],[{"start":{"row":87,"column":8},"end":{"row":87,"column":9},"action":"remove","lines":["E"],"id":1873}],[{"start":{"row":87,"column":8},"end":{"row":87,"column":9},"action":"insert","lines":["T"],"id":1874}],[{"start":{"row":87,"column":9},"end":{"row":87,"column":10},"action":"insert","lines":["e"],"id":1875}],[{"start":{"row":87,"column":10},"end":{"row":87,"column":11},"action":"insert","lines":[" "],"id":1876}],[{"start":{"row":87,"column":10},"end":{"row":87,"column":11},"action":"remove","lines":[" "],"id":1877},{"start":{"row":87,"column":9},"end":{"row":87,"column":10},"action":"remove","lines":["e"]}],[{"start":{"row":87,"column":9},"end":{"row":87,"column":10},"action":"insert","lines":["E"],"id":1878}],[{"start":{"row":87,"column":10},"end":{"row":87,"column":11},"action":"insert","lines":[" "],"id":1879},{"start":{"row":87,"column":11},"end":{"row":87,"column":12},"action":"insert","lines":["F"]},{"start":{"row":87,"column":12},"end":{"row":87,"column":13},"action":"insert","lines":["R"]},{"start":{"row":87,"column":13},"end":{"row":87,"column":14},"action":"insert","lines":["O"]},{"start":{"row":87,"column":14},"end":{"row":87,"column":15},"action":"insert","lines":["M"]}],[{"start":{"row":87,"column":15},"end":{"row":87,"column":16},"action":"insert","lines":[" "],"id":1880}],[{"start":{"row":87,"column":16},"end":{"row":87,"column":17},"action":"insert","lines":["P"],"id":1881},{"start":{"row":87,"column":17},"end":{"row":87,"column":18},"action":"insert","lines":["o"]},{"start":{"row":87,"column":18},"end":{"row":87,"column":19},"action":"insert","lines":["s"]}],[{"start":{"row":87,"column":19},"end":{"row":87,"column":20},"action":"insert","lines":["t"],"id":1882},{"start":{"row":87,"column":20},"end":{"row":87,"column":21},"action":"insert","lines":["_"]},{"start":{"row":87,"column":21},"end":{"row":87,"column":22},"action":"insert","lines":["U"]},{"start":{"row":87,"column":22},"end":{"row":87,"column":23},"action":"insert","lines":["s"]}],[{"start":{"row":87,"column":23},"end":{"row":87,"column":24},"action":"insert","lines":["e"],"id":1883},{"start":{"row":87,"column":24},"end":{"row":87,"column":25},"action":"insert","lines":["r"]},{"start":{"row":87,"column":25},"end":{"row":87,"column":26},"action":"insert","lines":[";"]}],[{"start":{"row":88,"column":3},"end":{"row":88,"column":4},"action":"remove","lines":["\""],"id":1884}],[{"start":{"row":34,"column":15},"end":{"row":34,"column":16},"action":"remove","lines":[":"],"id":1885},{"start":{"row":34,"column":14},"end":{"row":34,"column":15},"action":"remove","lines":[")"]},{"start":{"row":34,"column":13},"end":{"row":34,"column":14},"action":"remove","lines":["("]},{"start":{"row":34,"column":12},"end":{"row":34,"column":13},"action":"remove","lines":["r"]},{"start":{"row":34,"column":11},"end":{"row":34,"column":12},"action":"remove","lines":["e"]},{"start":{"row":34,"column":10},"end":{"row":34,"column":11},"action":"remove","lines":["s"]},{"start":{"row":34,"column":9},"end":{"row":34,"column":10},"action":"remove","lines":["u"]},{"start":{"row":34,"column":8},"end":{"row":34,"column":9},"action":"remove","lines":["_"]},{"start":{"row":34,"column":7},"end":{"row":34,"column":8},"action":"remove","lines":["t"]},{"start":{"row":34,"column":6},"end":{"row":34,"column":7},"action":"remove","lines":["s"]},{"start":{"row":34,"column":5},"end":{"row":34,"column":6},"action":"remove","lines":["o"]},{"start":{"row":34,"column":4},"end":{"row":34,"column":5},"action":"remove","lines":["p"]},{"start":{"row":34,"column":3},"end":{"row":34,"column":4},"action":"remove","lines":[" "]},{"start":{"row":34,"column":2},"end":{"row":34,"column":3},"action":"remove","lines":["f"]}],[{"start":{"row":34,"column":1},"end":{"row":34,"column":2},"action":"remove","lines":["e"],"id":1886}],[{"start":{"row":46,"column":17},"end":{"row":46,"column":18},"action":"remove","lines":[")"],"id":1887},{"start":{"row":46,"column":16},"end":{"row":46,"column":17},"action":"remove","lines":["1"]},{"start":{"row":46,"column":15},"end":{"row":46,"column":16},"action":"remove","lines":["Q"]},{"start":{"row":46,"column":14},"end":{"row":46,"column":15},"action":"remove","lines":["("]},{"start":{"row":46,"column":13},"end":{"row":46,"column":14},"action":"remove","lines":["a"]},{"start":{"row":46,"column":12},"end":{"row":46,"column":13},"action":"remove","lines":["t"]},{"start":{"row":46,"column":11},"end":{"row":46,"column":12},"action":"remove","lines":["a"]},{"start":{"row":46,"column":10},"end":{"row":46,"column":11},"action":"remove","lines":["d"]},{"start":{"row":46,"column":9},"end":{"row":46,"column":10},"action":"remove","lines":["_"]},{"start":{"row":46,"column":8},"end":{"row":46,"column":9},"action":"remove","lines":["e"]},{"start":{"row":46,"column":7},"end":{"row":46,"column":8},"action":"remove","lines":["t"]},{"start":{"row":46,"column":6},"end":{"row":46,"column":7},"action":"remove","lines":["i"]},{"start":{"row":46,"column":5},"end":{"row":46,"column":6},"action":"remove","lines":["r"]}],[{"start":{"row":46,"column":4},"end":{"row":46,"column":5},"action":"remove","lines":["w"],"id":1888},{"start":{"row":46,"column":0},"end":{"row":46,"column":4},"action":"remove","lines":[" "]},{"start":{"row":45,"column":11},"end":{"row":46,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":35,"column":1},"end":{"row":35,"column":2},"action":"remove","lines":[" "],"id":1889}],[{"start":{"row":34,"column":0},"end":{"row":34,"column":1},"action":"remove","lines":["d"],"id":1890}],[{"start":{"row":19,"column":18},"end":{"row":19,"column":19},"action":"remove","lines":[":"],"id":1891},{"start":{"row":19,"column":17},"end":{"row":19,"column":18},"action":"remove","lines":[")"]},{"start":{"row":19,"column":16},"end":{"row":19,"column":17},"action":"remove","lines":["("]},{"start":{"row":19,"column":15},"end":{"row":19,"column":16},"action":"remove","lines":["e"]},{"start":{"row":19,"column":14},"end":{"row":19,"column":15},"action":"remove","lines":["l"]},{"start":{"row":19,"column":13},"end":{"row":19,"column":14},"action":"remove","lines":["b"]},{"start":{"row":19,"column":12},"end":{"row":19,"column":13},"action":"remove","lines":["a"]},{"start":{"row":19,"column":11},"end":{"row":19,"column":12},"action":"remove","lines":["t"]},{"start":{"row":19,"column":10},"end":{"row":19,"column":11},"action":"remove","lines":["_"]},{"start":{"row":19,"column":9},"end":{"row":19,"column":10},"action":"remove","lines":["e"]},{"start":{"row":19,"column":8},"end":{"row":19,"column":9},"action":"remove","lines":["t"]},{"start":{"row":19,"column":7},"end":{"row":19,"column":8},"action":"remove","lines":["a"]},{"start":{"row":19,"column":6},"end":{"row":19,"column":7},"action":"remove","lines":["e"]},{"start":{"row":19,"column":5},"end":{"row":19,"column":6},"action":"remove","lines":["r"]},{"start":{"row":19,"column":4},"end":{"row":19,"column":5},"action":"remove","lines":["c"]},{"start":{"row":19,"column":3},"end":{"row":19,"column":4},"action":"remove","lines":[" "]}],[{"start":{"row":19,"column":2},"end":{"row":19,"column":3},"action":"remove","lines":["f"],"id":1892},{"start":{"row":19,"column":1},"end":{"row":19,"column":2},"action":"remove","lines":["e"]},{"start":{"row":19,"column":0},"end":{"row":19,"column":1},"action":"remove","lines":["d"]}],[{"start":{"row":20,"column":0},"end":{"row":20,"column":4},"action":"remove","lines":[" "],"id":1893}],[{"start":{"row":20,"column":4},"end":{"row":20,"column":5},"action":"remove","lines":["y"],"id":1894},{"start":{"row":20,"column":3},"end":{"row":20,"column":4},"action":"remove","lines":["r"]},{"start":{"row":20,"column":2},"end":{"row":20,"column":3},"action":"remove","lines":["e"]},{"start":{"row":20,"column":1},"end":{"row":20,"column":2},"action":"remove","lines":["u"]},{"start":{"row":20,"column":0},"end":{"row":20,"column":1},"action":"remove","lines":["q"]}],[{"start":{"row":20,"column":0},"end":{"row":20,"column":1},"action":"insert","lines":["Q"],"id":1895},{"start":{"row":20,"column":1},"end":{"row":20,"column":2},"action":"insert","lines":["1"]}],[{"start":{"row":31,"column":7},"end":{"row":31,"column":8},"action":"remove","lines":["t"],"id":1896},{"start":{"row":31,"column":6},"end":{"row":31,"column":7},"action":"remove","lines":["i"]},{"start":{"row":31,"column":5},"end":{"row":31,"column":6},"action":"remove","lines":["r"]},{"start":{"row":31,"column":4},"end":{"row":31,"column":5},"action":"remove","lines":["w"]},{"start":{"row":31,"column":0},"end":{"row":31,"column":4},"action":"remove","lines":[" "]}],[{"start":{"row":31,"column":12},"end":{"row":31,"column":13},"action":"remove","lines":[")"],"id":1897},{"start":{"row":31,"column":11},"end":{"row":31,"column":12},"action":"remove","lines":["y"]},{"start":{"row":31,"column":10},"end":{"row":31,"column":11},"action":"remove","lines":["r"]},{"start":{"row":31,"column":9},"end":{"row":31,"column":10},"action":"remove","lines":["e"]},{"start":{"row":31,"column":8},"end":{"row":31,"column":9},"action":"remove","lines":["u"]},{"start":{"row":31,"column":7},"end":{"row":31,"column":8},"action":"remove","lines":["q"]},{"start":{"row":31,"column":6},"end":{"row":31,"column":7},"action":"remove","lines":["("]},{"start":{"row":31,"column":5},"end":{"row":31,"column":6},"action":"remove","lines":["a"]},{"start":{"row":31,"column":4},"end":{"row":31,"column":5},"action":"remove","lines":["t"]},{"start":{"row":31,"column":3},"end":{"row":31,"column":4},"action":"remove","lines":["a"]},{"start":{"row":31,"column":2},"end":{"row":31,"column":3},"action":"remove","lines":["d"]},{"start":{"row":31,"column":1},"end":{"row":31,"column":2},"action":"remove","lines":["_"]}],[{"start":{"row":31,"column":0},"end":{"row":31,"column":1},"action":"remove","lines":["e"],"id":1898},{"start":{"row":30,"column":11},"end":{"row":31,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":21,"column":6},"end":{"row":21,"column":7},"action":"remove","lines":[" "],"id":1899}],[{"start":{"row":21,"column":6},"end":{"row":21,"column":7},"action":"insert","lines":[" "],"id":1900}],[{"start":{"row":20,"column":6},"end":{"row":21,"column":0},"action":"remove","lines":["",""],"id":1901}],[{"start":{"row":20,"column":6},"end":{"row":20,"column":14},"action":"remove","lines":[" "],"id":1902},{"start":{"row":20,"column":6},"end":{"row":21,"column":0},"action":"insert","lines":["",""]}],[{"start":{"row":21,"column":0},"end":{"row":21,"column":4},"action":"insert","lines":[" "],"id":1903}],[{"start":{"row":21,"column":4},"end":{"row":21,"column":8},"action":"insert","lines":[" "],"id":1904}],[{"start":{"row":30,"column":4},"end":{"row":30,"column":8},"action":"remove","lines":[" "],"id":1905}],[{"start":{"row":34,"column":2},"end":{"row":34,"column":3},"action":"remove","lines":[" "],"id":1906},{"start":{"row":34,"column":1},"end":{"row":34,"column":2},"action":"remove","lines":[" "]},{"start":{"row":34,"column":0},"end":{"row":34,"column":1},"action":"remove","lines":[" "]}],[{"start":{"row":34,"column":1},"end":{"row":34,"column":2},"action":"remove","lines":["1"],"id":1907}],[{"start":{"row":34,"column":1},"end":{"row":34,"column":2},"action":"insert","lines":["2"],"id":1908}],[{"start":{"row":46,"column":1},"end":{"row":46,"column":2},"action":"remove","lines":["2"],"id":1909}],[{"start":{"row":46,"column":1},"end":{"row":46,"column":2},"action":"insert","lines":["3"],"id":1910}],[{"start":{"row":52,"column":1},"end":{"row":52,"column":2},"action":"remove","lines":["3"],"id":1911}],[{"start":{"row":52,"column":1},"end":{"row":52,"column":2},"action":"insert","lines":["4"],"id":1912}],[{"start":{"row":60,"column":1},"end":{"row":60,"column":2},"action":"remove","lines":["4"],"id":1913}],[{"start":{"row":60,"column":1},"end":{"row":60,"column":2},"action":"insert","lines":["5"],"id":1914}],[{"start":{"row":65,"column":1},"end":{"row":65,"column":2},"action":"remove","lines":["5"],"id":1915}],[{"start":{"row":65,"column":1},"end":{"row":65,"column":2},"action":"insert","lines":["6"],"id":1916}],[{"start":{"row":68,"column":1},"end":{"row":68,"column":2},"action":"remove","lines":["6"],"id":1917}],[{"start":{"row":68,"column":1},"end":{"row":68,"column":2},"action":"insert","lines":["7"],"id":1918}],[{"start":{"row":72,"column":1},"end":{"row":72,"column":2},"action":"remove","lines":["7"],"id":1919}],[{"start":{"row":72,"column":1},"end":{"row":72,"column":2},"action":"insert","lines":["8"],"id":1920}],[{"start":{"row":77,"column":1},"end":{"row":77,"column":2},"action":"remove","lines":["8"],"id":1921}],[{"start":{"row":77,"column":1},"end":{"row":77,"column":2},"action":"insert","lines":["9"],"id":1922}],[{"start":{"row":80,"column":1},"end":{"row":80,"column":2},"action":"remove","lines":["9"],"id":1923}],[{"start":{"row":80,"column":1},"end":{"row":80,"column":2},"action":"insert","lines":["1"],"id":1924},{"start":{"row":80,"column":2},"end":{"row":80,"column":3},"action":"insert","lines":["0"]}],[{"start":{"row":84,"column":2},"end":{"row":84,"column":3},"action":"remove","lines":["0"],"id":1925}],[{"start":{"row":84,"column":2},"end":{"row":84,"column":3},"action":"insert","lines":["1"],"id":1926}],[{"start":{"row":50,"column":0},"end":{"row":50,"column":1},"action":"insert","lines":[" "],"id":1927}]]},"ace":{"folds":[],"scrolltop":945.7357509671659,"scrollleft":0,"selection":{"start":{"row":62,"column":30},"end":{"row":62,"column":30},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":{"row":54,"state":"qqstring3","mode":"ace/mode/python"}},"timestamp":1582709705660,"hash":"8fd201e85f110b60ed46f04eaf6c65cd6bd234ad"} | [
"[email protected]"
]
| |
f769efd583a1443d13ef6822ba32e7143583ca0e | e909e9bb4b2e54bb64d6bee9cf9fbaf14c584e04 | /malib/rpc/data/data_client.py | 9f234eee232e8402b6f1b1d21719a6bc1572db22 | [
"MIT"
]
| permissive | zhihaolyu/malib | 9cd8fdcdc1c613c11fc1e6f385adac5312474509 | 1c7ca1819325796a6ec604aa1ae8c771708fc50c | refs/heads/main | 2023-05-13T03:41:05.211832 | 2021-06-08T04:35:10 | 2021-06-08T04:35:10 | 374,880,657 | 0 | 0 | MIT | 2021-06-08T04:29:26 | 2021-06-08T04:29:25 | null | UTF-8 | Python | false | false | 385 | py | import grpc
import sys
sys.path.append("..")
from ..proto import data_pb2_grpc, data_pb2
def send(server_port, **kargs):
with grpc.insecure_channel(server_port) as channel:
stub = data_pb2_grpc.DataRPCStub(channel)
pr = data_pb2.PullRequest(
type=kargs["tid"], schema_id=kargs["sid"], instance_id=kargs["iid"]
)
data = stub.Pull(pr)
| [
"[email protected]"
]
| |
d5e9da7158d1d9e5da3315f240ce40a568384534 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r10p1/Gen/DecFiles/options/13114025.py | 94da046c51675f492cbb850c1728133a7ed747e7 | []
| no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,779 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r10p1/Gen/DecFiles/options/13114025.py generated: Wed, 25 Jan 2017 15:25:30
#
# Event Type: 13114025
#
# ASCII decay Descriptor: [B_s0 -> (phi(1020) -> mu+ mu-) mu+ mu-]cc
#
from Configurables import Generation
Generation().EventType = 13114025
Generation().SampleGenerationTool = "SignalRepeatedHadronization"
from Configurables import SignalRepeatedHadronization
Generation().addTool( SignalRepeatedHadronization )
Generation().SignalRepeatedHadronization.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Bs_phimumu,mm=MS,DecProdCut.dec"
Generation().SignalRepeatedHadronization.CutTool = "DaughtersInLHCb"
Generation().SignalRepeatedHadronization.SignalPIDList = [ 531,-531 ]
# Ad-hoc particle gun code
from Configurables import ParticleGun
pgun = ParticleGun("ParticleGun")
pgun.SignalPdgCode = 531
pgun.DecayTool = "EvtGenDecay"
pgun.GenCutTool = "DaughtersInLHCb"
from Configurables import FlatNParticles
pgun.NumberOfParticlesTool = "FlatNParticles"
pgun.addTool( FlatNParticles , name = "FlatNParticles" )
from Configurables import MomentumSpectrum
pgun.ParticleGunTool = "MomentumSpectrum"
pgun.addTool( MomentumSpectrum , name = "MomentumSpectrum" )
pgun.MomentumSpectrum.PdgCodes = [ 531,-531 ]
pgun.MomentumSpectrum.InputFile = "$PGUNSDATAROOT/data/Ebeam4000GeV/MomentumSpectrum_531.root"
pgun.MomentumSpectrum.BinningVariables = "pteta"
pgun.MomentumSpectrum.HistogramPath = "h_pteta"
from Configurables import BeamSpotSmearVertex
pgun.addTool(BeamSpotSmearVertex, name="BeamSpotSmearVertex")
pgun.VertexSmearingTool = "BeamSpotSmearVertex"
pgun.EventType = 13114025
| [
"[email protected]"
]
| |
2e32069c83261894997d74d96a146dafe51ebab7 | 4a89841fa3a73a6826d47d1e66f965759335118b | /askbot-devel-master/askbot/management/commands/send_unanswered_question_reminders.py | 42ce51196170a8a2bd64d75309e60f36b0ad5fb4 | []
| no_license | liyonghelpme/askbotDataWriter | 38e2515712a8a6f9db45ce69ba21d36fd1e2dcc9 | f88d343f8fd699b2d55b94d6dff4edda8e352301 | refs/heads/master | 2021-01-22T10:08:01.173519 | 2013-07-19T07:12:35 | 2013-07-19T07:12:35 | 11,522,328 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,901 | py | from django.core.management.base import NoArgsCommand
from django.template.loader import get_template
from askbot import models
from askbot import const
from askbot.conf import settings as askbot_settings
from django.utils.translation import ungettext
from askbot import mail
from askbot.utils.classes import ReminderSchedule
from askbot.models.question import Thread
from askbot.utils.html import site_url
from django.template import Context
DEBUG_THIS_COMMAND = False
class Command(NoArgsCommand):
"""management command that sends reminders
about unanswered questions to all users
"""
def handle_noargs(self, **options):
if askbot_settings.ENABLE_EMAIL_ALERTS == False:
return
if askbot_settings.ENABLE_UNANSWERED_REMINDERS == False:
return
#get questions without answers, excluding closed and deleted
#order it by descending added_at date
schedule = ReminderSchedule(
askbot_settings.DAYS_BEFORE_SENDING_UNANSWERED_REMINDER,
askbot_settings.UNANSWERED_REMINDER_FREQUENCY,
max_reminders = askbot_settings.MAX_UNANSWERED_REMINDERS
)
questions = models.Post.objects.get_questions().exclude(
thread__closed = True
).exclude(
deleted = True
).added_between(
start = schedule.start_cutoff_date,
end = schedule.end_cutoff_date
).filter(
thread__answer_count = 0
).order_by('-added_at')
#for all users, excluding blocked
#for each user, select a tag filtered subset
#format the email reminder and send it
for user in models.User.objects.exclude(status = 'b'):
user_questions = questions.exclude(author = user)
user_questions = user.get_tag_filtered_questions(user_questions)
if askbot_settings.GROUPS_ENABLED:
user_groups = user.get_groups()
user_questions = user_questions.filter(groups__in = user_groups)
final_question_list = user_questions.get_questions_needing_reminder(
user = user,
activity_type = const.TYPE_ACTIVITY_UNANSWERED_REMINDER_SENT,
recurrence_delay = schedule.recurrence_delay
)
question_count = len(final_question_list)
if question_count == 0:
continue
threads = Thread.objects.filter(id__in=[qq.thread_id for qq in final_question_list])
tag_summary = Thread.objects.get_tag_summary_from_threads(threads)
subject_line = ungettext(
'%(question_count)d unanswered question about %(topics)s',
'%(question_count)d unanswered questions about %(topics)s',
question_count
) % {
'question_count': question_count,
'topics': tag_summary
}
data = {
'site_url': site_url(''),
'questions': final_question_list,
'subject_line': subject_line
}
template = get_template('email/unanswered_question_reminder.html')
body_text = template.render(Context(data))#todo: set lang
if DEBUG_THIS_COMMAND:
print "User: %s<br>\nSubject:%s<br>\nText: %s<br>\n" % \
(user.email, subject_line, body_text)
else:
mail.send_mail(
subject_line = subject_line,
body_text = body_text,
recipient_list = (user.email,)
)
| [
"[email protected]"
]
| |
95217192c353378d62e5bf472e9fe7efb3b4f83e | d4f9d104479b6f9a64175a3fe8554860bf0d62b2 | /popular_words.py | 77bfe82384de8cab45193f40caf3c730eae4076c | []
| no_license | pohily/checkio | 9a09c9c52b4f07438cfe4e00914e8d1cfe844c5d | 8a0a49126af6e09b9e5e6067f28efbf085cd87f6 | refs/heads/master | 2020-05-16T03:18:18.068186 | 2019-07-06T13:22:20 | 2019-07-06T13:22:20 | 182,674,736 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,259 | py | def popular_words(text: str, words: list) -> dict:
# your code here
text = text.lower()
result = {}
for word in words:
text1 = text
count = 0
while text1:
if word not in text1:
break
found = text1.index(word)
if found != 0:
m = text1[found-1]
if m != ' ' and m != '\n':
text1 = text1[(found + len(word)):]
continue
text1 = text1[(found + len(word)):]
if not text1:
count += 1
break
if text1[0] == " " or text1[0] == "\n" or text1[0] == ',' or text1[0] == '.':
count += 1
if word not in text1:
break
result[word] = count
return result
print(popular_words('''
And the Raven never flitting still is sitting still is sitting
On the pallid bust of Pallas just above my chamber door
And his eyes have all the seeming of a demon’s that is dreaming
And the lamp-light o’er him streaming throws his shadow on the floor
And my soul from out that shadow that lies floating on the floor
Shall be lifted nevermore
''', ["raven","still","is","floor","nevermore"]))
| [
"[email protected]"
]
| |
8efe74872adef5632364a95d1cc58619fe6392aa | 9dded9364d7a5dd969c7ebb3026c884c8bd3053f | /AUDIOCATE/APP/migrations/0007_bookmar.py | 3f48ecfe06144f2fa60f1fab68e12ab87a7f095c | []
| no_license | ehizman/AUDIOCATE | 5a45828b3e1c6de16826e45195e68b8b0e08ab25 | edbd7677025d4a431240bf253966d11658e7652d | refs/heads/master | 2023-01-31T03:48:05.645443 | 2020-12-16T02:31:02 | 2020-12-16T02:31:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 551 | py | # Generated by Django 2.2.4 on 2020-12-14 01:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('APP', '0006_explore_date'),
]
operations = [
migrations.CreateModel(
name='Bookmar',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField()),
('link', models.CharField(max_length=225)),
],
),
]
| [
"[email protected]"
]
| |
aed9d8a521f0e1c53b73413d7f5f5d17712daaff | 478a4a0495fafc62000dc53cef749b87b7a9be55 | /virtual/bin/pip3 | 95352667bd7817eab73dd4a6da56bdc610064dcd | []
| no_license | DavidNganga/simple-error | 696a0f9e6482b38c5670b5d618120a9220b7fcaf | 351c5ace3c1487570d19ee0b5e0ade70d40f1b1c | refs/heads/master | 2020-03-18T23:38:56.974235 | 2018-06-03T15:30:48 | 2018-06-03T15:30:48 | 135,416,314 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 235 | #!/home/david/simple-error/virtual/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
]
| ||
6451035e29061d208fd1945893c984c0c86d26a1 | cc5a3fa80d2ae90afc2626e4a82b9a927726dfa0 | /huaweicloud-sdk-frs/huaweicloudsdkfrs/v2/model/add_faces_by_url_response.py | a0b5174b327a3d9e652b2d8df2e8c1d453bf59e8 | [
"Apache-2.0"
]
| permissive | Logan118/huaweicloud-sdk-python-v3 | eca15e9b08bdccef7122e40735d444ddc958efa8 | bb230c03bd00225b9f5780a56adce596e9456420 | refs/heads/master | 2023-07-17T14:57:50.799564 | 2021-08-25T10:40:43 | 2021-08-25T10:40:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,991 | py | # coding: utf-8
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class AddFacesByUrlResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'face_set_id': 'str',
'face_set_name': 'str',
'faces': 'list[FaceSetFace]'
}
attribute_map = {
'face_set_id': 'face_set_id',
'face_set_name': 'face_set_name',
'faces': 'faces'
}
def __init__(self, face_set_id=None, face_set_name=None, faces=None):
"""AddFacesByUrlResponse - a model defined in huaweicloud sdk"""
super(AddFacesByUrlResponse, self).__init__()
self._face_set_id = None
self._face_set_name = None
self._faces = None
self.discriminator = None
if face_set_id is not None:
self.face_set_id = face_set_id
if face_set_name is not None:
self.face_set_name = face_set_name
if faces is not None:
self.faces = faces
@property
def face_set_id(self):
"""Gets the face_set_id of this AddFacesByUrlResponse.
人脸库ID。 调用失败时无此字段。
:return: The face_set_id of this AddFacesByUrlResponse.
:rtype: str
"""
return self._face_set_id
@face_set_id.setter
def face_set_id(self, face_set_id):
"""Sets the face_set_id of this AddFacesByUrlResponse.
人脸库ID。 调用失败时无此字段。
:param face_set_id: The face_set_id of this AddFacesByUrlResponse.
:type: str
"""
self._face_set_id = face_set_id
@property
def face_set_name(self):
"""Gets the face_set_name of this AddFacesByUrlResponse.
人脸库名称。 调用失败时无此字段。
:return: The face_set_name of this AddFacesByUrlResponse.
:rtype: str
"""
return self._face_set_name
@face_set_name.setter
def face_set_name(self, face_set_name):
"""Sets the face_set_name of this AddFacesByUrlResponse.
人脸库名称。 调用失败时无此字段。
:param face_set_name: The face_set_name of this AddFacesByUrlResponse.
:type: str
"""
self._face_set_name = face_set_name
@property
def faces(self):
"""Gets the faces of this AddFacesByUrlResponse.
人脸库当中的人脸结构,详见[FaceSetFace](zh-cn_topic_0106912070.xml)。 调用失败时无此字段。
:return: The faces of this AddFacesByUrlResponse.
:rtype: list[FaceSetFace]
"""
return self._faces
@faces.setter
def faces(self, faces):
"""Sets the faces of this AddFacesByUrlResponse.
人脸库当中的人脸结构,详见[FaceSetFace](zh-cn_topic_0106912070.xml)。 调用失败时无此字段。
:param faces: The faces of this AddFacesByUrlResponse.
:type: list[FaceSetFace]
"""
self._faces = faces
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AddFacesByUrlResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
48d3d2345ecb774006b7797e6dfb19ea0489873f | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/63/usersdata/147/28536/submittedfiles/swamee.py | 87c492832ea836815b97f42dfba4a884abe8852b | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | # -*- coding: utf-8 -*-
import math
#COMECE SEU CÓDIGO AQUI
g=9.81
epsilon=0.000005
f=float(input('digite valor de f:'))
L=float(input('digite valor de L:'))
Q=float(input('digite valor de Q:'))
deltaH=float(input('digite valor de deltaH:'))
v=float(input('digite valor de v:'))
d=((8*f*L*(Q**2))/((math.pi**2)*g*deltaH))**0.5
rey=(4*Q)/(math.pi*d*v)
k=(0.25)/math.log10((epsilon/(3.7*d))+(5.7/(rey**0.9))**2)
print('o valor de d é %.2f' %d)
print('o valor de rey é %.2f' %rey)
print('o valor de k é %.2f' %k) | [
"[email protected]"
]
| |
0079ec1753397ec8e2d4db72f17762047e237974 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_113/ch20_2020_09_16_11_25_21_743333.py | 2edf4921f042e6256794183644e3ed17b47e767a | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | dist = int(input('Qual a distancia de sua viajem?: '))
if dist <= 200:
preco = dist * 0.5
print(preco)
elif dist > 200:
dist -= 200
preco = dist * 0.45
preco += 100
print ('{0:.2f}'.format(preco))
| [
"[email protected]"
]
| |
33986ed30f53a19439cdd7d07c782a582f0d133e | 18d087b0fca0f80018861da6197e30d712fc248b | /S05/question_files/main.com.py | 9a975a8ffc12d8004a7b40ff9d66beceaed06180 | []
| no_license | pymft/mft-05 | 6a92f3e9e9e9568b602f0de8daae310e76646fac | dde1ff239163123494535ab1b4c3c86c4b01599f | refs/heads/master | 2020-06-03T21:56:12.289836 | 2019-08-29T13:52:02 | 2019-08-29T13:52:02 | 191,747,168 | 1 | 5 | null | null | null | null | UTF-8 | Python | false | false | 1,104 | py | import glob
def path_to_root(dct, number):
parent = dct[number]
if parent == 0:
return [number]
return path_to_root(dct, parent) + [number]
def convert(parents):
children = {0: []}
for k in parents:
children[k] = []
for k in parents:
val = parents[k]
children[val].append(k)
return children
def find_no_children_nodes(parent_to_children):
res = []
for k in parent_to_children:
if parent_to_children[k] == []:
res.append(k)
return res
child_to_parent = {}
list_of_files = glob.glob('./files/*.txt')
for f in list_of_files:
child = f[8:-4]
parent = open(f).read()
child = int(child)
parent = int(parent)
child_to_parent[child] = parent
parent_to_children = convert(child_to_parent)
print(child_to_parent)
print(parent_to_children)
max_path = []
for node in find_no_children_nodes(parent_to_children):
path = path_to_root(child_to_parent, node)
if len(path) > len(max_path):
max_path = path
print(path_to_root(child_to_parent, 6638932548))
print(max_path) | [
"[email protected]"
]
| |
7ebfec0556e46db57e2c4d1eca4d13ef6452d0ce | 005a6421cd6159fb6be8c61cc675654377e8f226 | /cairis/core/TemplateObstacleParameters.py | dd6ad8c3e8c2351b3464509195afd601d5e88470 | [
"Apache-2.0"
]
| permissive | cairis-platform/cairis | d667bc91ba28f0b7cd4fc88e6528eb3339e4ee6f | 55abb93a9377664f5b03c027bad7ce3cf168c5ad | refs/heads/master | 2023-04-06T17:04:08.781186 | 2023-02-17T22:51:15 | 2023-02-17T22:51:15 | 3,790,944 | 105 | 36 | Apache-2.0 | 2022-03-19T15:04:14 | 2012-03-21T20:17:05 | Python | UTF-8 | Python | false | false | 1,712 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
__author__ = 'Shamal Faily'
from . import ObjectCreationParameters
class TemplateObstacleParameters(ObjectCreationParameters.ObjectCreationParameters):
def __init__(self,obsName,obsCat,obsDef,obsConcerns,obsResp,obsProb,obsProbRat):
ObjectCreationParameters.ObjectCreationParameters.__init__(self)
self.theName = obsName
self.theCategory = obsCat
self.theDefinition = obsDef
self.theConcerns = obsConcerns
self.theResponsibilities = obsResp
self.theProbability = obsProb
self.theProbabilityRationale = obsProbRat
def name(self): return self.theName
def category(self): return self.theCategory
def definition(self): return self.theDefinition
def concerns(self): return self.theConcerns
def responsibilities(self): return self.theResponsibilities
def probability(self): return self.theProbability
def probabilityRationale(self): return self.theProbabilityRationale
| [
"[email protected]"
]
| |
e0cef3709184ba38a5b1c49088dd488ff94fe2d7 | 5a4436884af5341ce855c0e84866b972a0f61c05 | /day4/classes/student/13.py | 42d32a7eccc68bd225d21bb75b5d00847af1380c | []
| no_license | sreejithev/pythoncodes | 74a420c4f025b893e27f17ba85632a4a096f17fd | 70df14871a9687916d1c4ada76c055607f13e8ce | refs/heads/master | 2021-01-21T20:59:47.056167 | 2017-06-19T09:43:17 | 2017-06-19T09:43:17 | 92,292,259 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | class student:
def __init__(self, name, age, rank):
self.name = name
self.age = age
self.rank = rank
def __str__(self):
return 'name = %s, age = %d, rank = %d' % (self.name, self.age, self.rank)
def __lt__(self, other):
if self.rank > other.rank:
return True
else:
return False
student1 = student('John', 20, 100)
student2 = student('Ram', 19, 120)
s = student2 < student1
print s
# s = student2__lt__(student1)
| [
"[email protected]"
]
| |
071d6d5852eff59b3a1dc1dfe98706254fa481ae | 901944f407f4a06a4c4027d6139ce21165976857 | /RL4/rl_mar2018_99_stableversion_andimplicit/train4.py | 0e03a42afd50da8c62faf9a50b887d5278e716f9 | []
| no_license | chriscremer/Other_Code | a406da1d567d63bf6ef9fd5fbf0a8f177bc60b05 | 7b394fa87523803b3f4536b316df76cc44f8846e | refs/heads/master | 2021-01-17T02:34:56.215047 | 2020-05-26T13:59:05 | 2020-05-26T13:59:05 | 34,680,279 | 7 | 4 | null | null | null | null | UTF-8 | Python | false | false | 13,372 | py |
import os
from os.path import expanduser
home = expanduser("~")
import sys
for i in range(len(sys.path)):
if 'er/Documents' in sys.path[i]:
sys.path.remove(sys.path[i])#[i]
break
import copy
import glob
import os
import time
import gym
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable
from torch.utils.data.sampler import BatchSampler, SubsetRandomSampler
sys.path.insert(0, '../baselines/')
sys.path.insert(0, '../baselines/baselines/common/vec_env')
from subproc_vec_env import SubprocVecEnv
sys.path.insert(0, './utils/')
from envs import make_env, make_env_monitor, make_env_basic
# from agent_modular2 import a2c
# from agent_modular2 import ppo
# from agent_modular2 import a2c_minibatch
# from agent_modular2 import a2c_list_rollout
# from agent_modular2 import a2c_with_var
from a2c_agents import a2c
from train_utils import do_vid, do_gifs, do_params, do_ls, update_ls_plot, save_params_v2, load_params_v2
sys.path.insert(0, './visualizations/')
from make_plots import make_plots
import argparse
import json
import subprocess
from discriminator import CNN_Discriminator
from discrim_preds import discrim_predictions
def train(model_dict):
def update_current_state(current_state, state, channels):
# current_state: [processes, channels*stack, height, width]
state = torch.from_numpy(state).float() # (processes, channels, height, width)
# if num_stack > 1:
#first stack*channel-channel frames = last stack*channel-channel , so slide them forward
current_state[:, :-channels] = current_state[:, channels:]
current_state[:, -channels:] = state #last frame is now the new one
return current_state
def update_rewards(reward, done, final_rewards, episode_rewards, current_state):
# Reward, Done: [P], [P]
# final_rewards, episode_rewards: [P,1]. [P,1]
# current_state: [P,C*S,H,W]
reward = torch.from_numpy(np.expand_dims(np.stack(reward), 1)).float() #[P,1]
episode_rewards += reward #keeps track of current episode cumulative reward
masks = torch.FloatTensor([[0.0] if done_ else [1.0] for done_ in done]) #[P,1]
final_rewards *= masks #erase the ones that are done
final_rewards += (1 - masks) * episode_rewards #set it to the cumulative episode reward
episode_rewards *= masks #erase the done ones
masks = masks.type(dtype) #cuda
if current_state.dim() == 4: # if state is a frame/image
current_state *= masks.unsqueeze(2).unsqueeze(2) #[P,1,1,1]
else:
current_state *= masks #restart the done ones, by setting the state to zero
return reward, masks, final_rewards, episode_rewards, current_state
num_frames = model_dict['num_frames']
cuda = model_dict['cuda']
which_gpu = model_dict['which_gpu']
num_steps = model_dict['num_steps']
num_processes = model_dict['num_processes']
seed = model_dict['seed']
env_name = model_dict['env']
save_dir = model_dict['save_to']
num_stack = model_dict['num_stack']
algo = model_dict['algo']
save_interval = model_dict['save_interval']
log_interval = model_dict['log_interval']
save_params = model_dict['save_params']
vid_ = model_dict['vid_']
gif_ = model_dict['gif_']
ls_ = model_dict['ls_']
os.environ['OMP_NUM_THREADS'] = '1'
os.environ['CUDA_VISIBLE_DEVICES'] = str(which_gpu)
if cuda:
torch.cuda.manual_seed(seed)
dtype = torch.cuda.FloatTensor
model_dict['dtype']=dtype
else:
torch.manual_seed(seed)
dtype = torch.FloatTensor
model_dict['dtype']=dtype
# Create environments
print (num_processes, 'processes')
monitor_rewards_dir = os.path.join(save_dir, 'monitor_rewards')
if not os.path.exists(monitor_rewards_dir):
os.makedirs(monitor_rewards_dir)
print ('Made dir', monitor_rewards_dir)
envs = SubprocVecEnv([make_env(env_name, seed, i, monitor_rewards_dir) for i in range(num_processes)])
if vid_:
print ('env for video')
envs_video = make_env_monitor(env_name, save_dir)
if gif_:
print ('env for gif')
envs_gif = make_env_basic(env_name)
if ls_:
print ('env for ls')
envs_ls = make_env_basic(env_name)
obs_shape = envs.observation_space.shape # (channels, height, width)
obs_shape = (obs_shape[0] * num_stack, *obs_shape[1:]) # (channels*stack, height, width)
shape_dim0 = envs.observation_space.shape[0] #channels
model_dict['obs_shape']=obs_shape
model_dict['shape_dim0']=shape_dim0
# print (envs.action_space)
# print (envs.action_space.shape)
action_size = envs.action_space.shape[0]
print (obs_shape)
print(action_size)
fasd
if action_size == 1:
action_size = 2
# model_dict['action_size'] = envs.action_space.n
model_dict['action_size'] = action_size
# Create agent
if algo == 'a2c':
agent = a2c(envs, model_dict)
print ('init a2c agent')
elif algo == 'ppo':
agent = ppo(envs, model_dict)
print ('init ppo agent')
elif algo == 'a2c_minibatch':
agent = a2c_minibatch(envs, model_dict)
print ('init a2c_minibatch agent')
elif algo == 'a2c_list_rollout':
agent = a2c_list_rollout(envs, model_dict)
print ('init a2c_list_rollout agent')
elif algo == 'a2c_with_var':
agent = a2c_with_var(envs, model_dict)
print ('init a2c_with_var agent')
# elif algo == 'a2c_bin_mask':
# agent = a2c_with_var(envs, model_dict)
# print ('init a2c_with_var agent')
# agent = model_dict['agent'](envs, model_dict)
#Load model
if model_dict['load_params']:
# agent.actor_critic = torch.load(os.path.join(args.load_path))
# agent.actor_critic = torch.load(args.load_path).cuda()
if model_dict['load_params_implicit']:
# load_params_v2(home+'/Documents/tmp/confirm_implicit_works3/BreakoutNoFrameskip-v4/A2C_Implicit/seed0/', agent, 5500160, model_dict)
# load_params_v2(home+'/Documents/tmp/confirm_works_1_withsaving/PongNoFrameskip-v4/a2c/seed0/', agent, 8000160, model_dict)
# print ('loaded ', args.load_path)
if model_dict['load_number'] == 1:
# load_params_v2(home+'/Documents/tmp/confirm_works_1_withsaving/PongNoFrameskip-v4/a2c/seed0/', agent, 3000160, model_dict)
load_params_v2(home+'/Documents/tmp/confirm_implicit_works3/BreakoutNoFrameskip-v4/A2C_Implicit/seed0/', agent, 1000160, model_dict)
elif model_dict['load_number'] == 3:
# load_params_v2(home+'/Documents/tmp/confirm_works_1_withsaving/PongNoFrameskip-v4/a2c/seed0/', agent, 6000160, model_dict)
load_params_v2(home+'/Documents/tmp/confirm_implicit_works3/BreakoutNoFrameskip-v4/A2C_Implicit/seed0/', agent, 3000160, model_dict)
elif model_dict['load_number'] == 5:
# load_params_v2(home+'/Documents/tmp/confirm_works_1_withsaving/PongNoFrameskip-v4/a2c/seed0/', agent, 9000160, model_dict)
load_params_v2(home+'/Documents/tmp/confirm_implicit_works3/BreakoutNoFrameskip-v4/A2C_Implicit/seed0/', agent, 5000160, model_dict)
# else:
# load_params_v2(home+'/Documents/tmp/confirm_works_1_withsaving/PongNoFrameskip-v4/a2c/seed0/', agent, 8000160, model_dict)
else:
PROBLEM
if model_dict['implicit']:
action_predictor = CNN_Discriminator(model_dict).cuda()
print ('init action_predictor')
# Init state
state = envs.reset() # (processes, channels, height, width)
current_state = torch.zeros(num_processes, *obs_shape) # (processes, channels*stack, height, width)
current_state = update_current_state(current_state, state, shape_dim0).type(dtype) #add the new frame, remove oldest
agent.insert_first_state(current_state) #storage has states: (num_steps + 1, num_processes, *obs_shape), set first step
# These are used to compute average rewards for all processes.
episode_rewards = torch.zeros([num_processes, 1]) #keeps track of current episode cumulative reward
final_rewards = torch.zeros([num_processes, 1])
num_updates = int(num_frames) // num_steps // num_processes
save_interval_num_updates = int(save_interval /num_processes/num_steps)
#Begin training
# count =0
start = time.time()
start2 = time.time()
for j in range(num_updates):
for step in range(num_steps):
# Act, [P,1], [P], [P,1], [P]
# value, action = agent.act(Variable(agent.rollouts.states[step], volatile=True))
value, action, action_log_probs, dist_entropy = agent.act(Variable(agent.rollouts.states[step]))#, volatile=True))
# print (action_log_probs.size())
# print (dist_entropy.size())
cpu_actions = action.data.squeeze(1).cpu().numpy() #[P]
# cpu_actions = action.data.cpu().numpy() #[P]
# print (actions.size())
# Step, S:[P,C,H,W], R:[P], D:[P]
state, reward, done, info = envs.step(cpu_actions)
# Record rewards and update state
reward, masks, final_rewards, episode_rewards, current_state = update_rewards(reward, done, final_rewards, episode_rewards, current_state)
current_state = update_current_state(current_state, state, shape_dim0)
# Agent record step
# agent.insert_data(step, current_state, action.data, value.data, reward, masks, action_log_probs.data, dist_entropy.data)
agent.insert_data(step, current_state, action.data, value, reward, masks, action_log_probs, dist_entropy) #, done)
#Optimize agent
if model_dict['implicit']:
discrim_errors = discrim_predictions(model_dict, agent.rollouts, action_predictor)
discrim_errors_reverse = discrim_predictions(model_dict, agent.rollouts, action_predictor, reverse=True)
#Optimize action_predictor
action_predictor.optimize(discrim_errors)
#Optimize agent
agent.update2(discrim_errors, discrim_errors_reverse) #agent.update(j,num_updates)
# #Old
else:
agent.update() #agent.update(j,num_updates)
agent.insert_first_state(agent.rollouts.states[-1])
# print ('save_interval_num_updates', save_interval_num_updates)
# print ('num_updates', num_updates)
# print ('j', j)
total_num_steps = (j + 1) * num_processes * num_steps
# if total_num_steps % save_interval == 0 and save_dir != "":
if j % save_interval_num_updates == 0 and save_dir != "" and j != 0:
#Save model
if save_params:
do_params(save_dir, agent, total_num_steps, model_dict)
save_params_v2(save_dir, agent, total_num_steps, model_dict)
#make video
if vid_:
do_vid(envs_video, update_current_state, shape_dim0, dtype, agent, model_dict, total_num_steps)
#make gif
if gif_:
do_gifs(envs_gif, agent, model_dict, update_current_state, update_rewards, total_num_steps)
#Print updates
if j % log_interval == 0:# and j!=0:
end = time.time()
to_print_info_string = "{}, {}, {:.1f}/{:.1f}/{:.1f}/{:.1f}, {}, {:.1f}, {:.1f}".format(j, total_num_steps,
final_rewards.min(),
final_rewards.median(),
final_rewards.mean(),
final_rewards.max(),
int(total_num_steps / (end - start)),
end - start,
end - start2)
print(to_print_info_string)
start2 = time.time()
to_print_legend_string = "Upts, n_timesteps, min/med/mean/max, FPS, Time"
if j % (log_interval*30) == 0:
if ls_:
do_ls(envs_ls, agent, model_dict, total_num_steps, update_current_state, update_rewards)
# print("Upts, n_timesteps, min/med/mean/max, FPS, Time, Plot updated, LS updated")
# print(to_print_info_string + ' LS recorded')#, agent.current_lr)
# else:
#update plots
try:
if ls_:
update_ls_plot(model_dict)
make_plots(model_dict)
print(to_print_legend_string + " Plot updated")
except:
raise #pass
print(to_print_legend_string)
try:
make_plots(model_dict)
except:
print ()
# pass #raise
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--m')
args = parser.parse_args()
#Load model dict
with open(args.m, 'r') as infile:
model_dict = json.load(infile)
train(model_dict)
| [
"[email protected]"
]
| |
d251d96aa118620cf5b52cd71eb9e82cbb437e15 | 8a41a7f9340cfa784cb36d35dca1ecb1630e4097 | /Programming/Python/TestFrameworks/pytest_practice/test_pytest_requests.py | f15e998dc821c7bb5c3b6d0ca5291bb0d4f895cd | []
| no_license | anishst/Learn | 02e6b6cce43cf21621d328ef0fc25168267a9a3d | a1aed8b78b19acdb23e20be57b67fb242e0aefc5 | refs/heads/master | 2022-05-13T10:17:40.293640 | 2022-03-30T12:44:21 | 2022-03-30T12:44:21 | 173,595,812 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 812 | py | # http://pythontesting.net/framework/pytest/pytest-fixtures-nuts-bolts/#scope
import pytest
@pytest.fixture()
def my_fixture(request):
print('\n-----------------')
print('fixturename : %s' % request.fixturename)
print('scope : %s' % request.scope)
print('function : %s' % request.function.__name__)
print('cls : %s' % request.cls)
print('module : %s' % request.module.__name__)
print('fspath : %s' % request.fspath)
print('-----------------')
if request.function.__name__ == 'test_three':
request.applymarker(pytest.mark.xfail)
def test_one(my_fixture):
print('test_one():')
class TestClass():
def test_two(self, my_fixture):
print('test_two()')
def test_three(my_fixture):
print('test_three()')
assert False | [
"[email protected]"
]
| |
14082e84e8cc42dec1bcbc028a0ce10087db4dd4 | 4d4fcde3efaa334f7aa56beabd2aa26fbcc43650 | /server/src/uds/migrations/0039_auto_20201111_1329.py | 4d48ca91318e70def9c7828155e6812d0e528f18 | []
| no_license | xezpeleta/openuds | a8b11cb34eb0ef7bb2da80f67586a81b2de229ef | 840a7a02bd7c9894e8863a8a50874cdfdbf30fcd | refs/heads/master | 2023-08-21T17:55:48.914631 | 2021-10-06T10:39:06 | 2021-10-06T10:39:06 | 414,489,331 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 676 | py | # Generated by Django 3.1.2 on 2020-11-11 13:29
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('uds', '0038_auto_20200505_config'),
]
operations = [
migrations.RemoveField(
model_name='metapool',
name='accessCalendars',
),
migrations.RemoveField(
model_name='metapool',
name='pools',
),
migrations.RemoveField(
model_name='servicepool',
name='accessCalendars',
),
migrations.RemoveField(
model_name='servicepool',
name='actionsCalendars',
),
]
| [
"[email protected]"
]
| |
ac9cbff6616993fe2ea2f872485ef2cd05863776 | 419873dd3b7412f704b1a7907b64a60b44cedf39 | /python/树/1448. 统计二叉树中好节点的数目.py | 00d74027dba7eea0cc91e63759b58e33a3e74545 | []
| no_license | Weless/leetcode | 0585c5bfa260713f44dabc51fa58ebf8a10e7814 | 0566622daa5849f7deb0cfdc6de2282fb3127f4c | refs/heads/master | 2021-11-13T07:59:20.299920 | 2021-10-25T02:09:53 | 2021-10-25T02:09:53 | 203,720,668 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,627 | py | class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def goodNodes(self, root: TreeNode) -> int:
if not root:
return 0
from collections import deque
queue = deque()
queue.appendleft((root.val,root))
count = 1
while queue:
nodeVal,node = queue.pop()
if node.left:
if node.left.val >= nodeVal:
queue.appendleft((node.left.val,node.left))
count+=1
else:
queue.appendleft((nodeVal,node.left))
if node.right:
if node.right.val >= nodeVal:
queue.appendleft((node.right.val,node.right))
count+=1
else:
queue.appendleft((nodeVal,node.right))
return count
class Solution:
def goodNodes(self, root: TreeNode) -> int:
if not root:
return 0
self.res = 0
def dfs(root,val):
if not root:
return
if root.left:
if root.left.val >= val:
self.res +=1
dfs(root.left,root.left.val)
else:
dfs(root.left,val)
if root.right:
if root.right.val >= val:
self.res +=1
dfs(root.right,root.right.val)
else:
dfs(root.right,val)
dfs(root,root.val)
return self.res | [
"[email protected]"
]
| |
c09f88c25686fb1faac2e42d494e7f86ee7d7702 | ae0c2806c009263fbd608b1381d96eb378ff115b | /aizynthfinder/utils/models.py | 30f964dcb6fcb867dfd6a421ad9798c733115492 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | yinxx/aizynthfinder | 45efcbafcf1d11199ec997129bc6a44ad4365952 | 20a7aed2db46e7df9dfeeaae0427f1e6e01b54e3 | refs/heads/master | 2023-06-25T00:21:36.983089 | 2021-07-26T08:52:47 | 2021-07-26T08:52:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,042 | py | """ Module containing helper routines for using Keras and Tensorflow models
"""
from __future__ import annotations
import functools
import os
from typing import TYPE_CHECKING
import numpy as np
import requests
import grpc
import tensorflow as tf
from google.protobuf.json_format import MessageToDict
from tensorflow_serving.apis import (
predict_pb2,
get_model_metadata_pb2,
prediction_service_pb2_grpc,
)
from tensorflow.keras.metrics import top_k_categorical_accuracy
from tensorflow.keras.models import load_model as load_keras_model
from aizynthfinder.utils.logging import logger
from aizynthfinder.utils.exceptions import ExternalModelAPIError
if TYPE_CHECKING:
from aizynthfinder.utils.type_utils import Any, Union, Callable, List
_ModelInput = Union[np.ndarray, List[np.ndarray]]
top10_acc = functools.partial(top_k_categorical_accuracy, k=10)
top10_acc.__name__ = "top10_acc" # type: ignore
top50_acc = functools.partial(top_k_categorical_accuracy, k=50)
top50_acc.__name__ = "top50_acc" # type: ignore
CUSTOM_OBJECTS = {"top10_acc": top10_acc, "top50_acc": top50_acc, "tf": tf}
_logger = logger()
TF_SERVING_HOST = os.environ.get("TF_SERVING_HOST")
TF_SERVING_REST_PORT = os.environ.get("TF_SERVING_REST_PORT")
TF_SERVING_GRPC_PORT = os.environ.get("TF_SERVING_GRPC_PORT")
def load_model(
source: str, key: str, use_remote_models: bool
) -> Union["LocalKerasModel", "ExternalModelViaGRPC", "ExternalModelViaREST"]:
"""
Load model from a configuration specification.
If `use_remote_models` is True, tries to load:
1. A Tensorflow server through gRPC
2. A Tensorflow server through REST API
3. A local model
otherwise it just loads the local model
:param source: if fallbacks to a local model, this is the filename
:param key: when connecting to Tensrflow server this is the model name
:param use_remote_models: if True will try to connect to remote model server
:return: a model object with a predict object
"""
if not use_remote_models:
return LocalKerasModel(source)
try:
return ExternalModelViaGRPC(key)
except ExternalModelAPIError:
pass
try:
return ExternalModelViaREST(key)
except ExternalModelAPIError:
pass
return LocalKerasModel(source)
class LocalKerasModel:
"""
A keras policy model that is executed locally.
The size of the input vector can be determined with the len() method.
:ivar model: the compiled model
:ivar output_size: the length of the output vector
:param filename: the path to a Keras checkpoint file
"""
def __init__(self, filename: str) -> None:
self.model = load_keras_model(filename, custom_objects=CUSTOM_OBJECTS)
try:
self._model_dimensions = int(self.model.input.shape[1])
except AttributeError:
self._model_dimensions = int(self.model.input[0].shape[1])
self.output_size = int(self.model.output.shape[1])
def __len__(self) -> int:
return self._model_dimensions
def predict(self, input_: _ModelInput) -> np.ndarray:
"""
Perform a forward pass of the neural network.
:param input_: the input vector
:return: the vector of the output layer
"""
return self.model.predict(input_)
def _log_and_reraise_exceptions(method: Callable) -> Callable:
@functools.wraps(method)
def wrapper(*args, **kwargs):
try:
return method(*args, **kwargs)
except Exception as err:
msg = "Error when requesting from tensorflow model API"
_logger.error("%s: %s", msg, err)
raise ExternalModelAPIError(msg)
return wrapper
class ExternalModelViaREST:
"""
A neural network model implementation using TF Serving via REST API.
:param name: the name of model
"""
def __init__(self, name: str) -> None:
self._model_url = self._get_model_url(name)
self._sig_def = self._get_sig_def()
def __len__(self) -> int:
first_input_name = list(self._sig_def["inputs"].keys())[0]
return int(
self._sig_def["inputs"][first_input_name]["tensor_shape"]["dim"][1]["size"]
)
def predict(self, inputs: _ModelInput) -> np.ndarray:
"""
Get prediction from model.
:param inputs: the input vector or list of vectors
:return: the vector of the output layer
"""
url = self._model_url + ":predict"
res = self._handle_rest_api_request(
"POST", url, json=self._make_payload(inputs)
)
return np.asarray(res["outputs"])
def _get_sig_def(self) -> dict:
res = self._handle_rest_api_request("GET", self._model_url + "/metadata")
return res["metadata"]["signature_def"]["signature_def"]["serving_default"]
# pylint: disable=no-self-use
@_log_and_reraise_exceptions
def _handle_rest_api_request(
self, method: str, url: str, *args: Any, **kwargs: Any
) -> dict:
res = requests.request(method, url, *args, **kwargs)
if res.status_code != 200 or (
res.headers["Content-Type"] != "application/json"
):
raise ExternalModelAPIError(
f"Unexpected response from REST API: {res.status_code}\n{res.text}"
)
return res.json()
def _make_payload(self, inputs: _ModelInput) -> dict:
if isinstance(inputs, np.ndarray):
inputs = [inputs]
data = {
name: fp.tolist()
for name, fp in zip(self._sig_def["inputs"].keys(), inputs)
}
return {"inputs": data}
@staticmethod
def _get_model_url(name: str) -> str:
warning = f"Failed to get url of REST service for external model {name}"
if not TF_SERVING_HOST:
_logger.warning(warning)
raise ExternalModelAPIError("Host not set for model {name}")
if not TF_SERVING_REST_PORT:
_logger.warning(warning)
raise ExternalModelAPIError("REST port not set for model {name}")
return f"http://{TF_SERVING_HOST}:{TF_SERVING_REST_PORT}/v1/models/{name}"
class ExternalModelViaGRPC:
"""
A neural network model implementation using TF Serving via gRPC.
:param name: the name of model
"""
def __init__(self, name: str) -> None:
self._server = self._get_server(name)
self._model_name = name
self._sig_def = self._get_sig_def()
def __len__(self) -> int:
first_input_name = list(self._sig_def["inputs"].keys())[0]
return int(
self._sig_def["inputs"][first_input_name]["tensorShape"]["dim"][1]["size"]
)
@_log_and_reraise_exceptions
def predict(self, inputs: _ModelInput) -> np.ndarray:
"""
Get prediction from model.
:param inputs: the input vector or list of vectors
:return: the vector of the output layer
"""
input_tensors = self._make_payload(inputs)
channel = grpc.insecure_channel(self._server)
service = prediction_service_pb2_grpc.PredictionServiceStub(channel)
request = predict_pb2.PredictRequest()
request.model_spec.name = self._model_name
for name, tensor in input_tensors.items():
request.inputs[name].CopyFrom(tensor)
key = list(self._sig_def["outputs"].keys())[0]
return tf.make_ndarray(service.Predict(request, 10.0).outputs[key])
@_log_and_reraise_exceptions
def _get_sig_def(self) -> dict:
channel = grpc.insecure_channel(self._server)
service = prediction_service_pb2_grpc.PredictionServiceStub(channel)
request = get_model_metadata_pb2.GetModelMetadataRequest()
request.model_spec.name = self._model_name
request.metadata_field.append("signature_def")
result = MessageToDict(service.GetModelMetadata(request, 10.0))
# close the channel so that it won't be reused after fork and fail
channel.close()
return result["metadata"]["signature_def"]["signatureDef"]["serving_default"]
def _make_payload(self, inputs: _ModelInput) -> dict:
if isinstance(inputs, np.ndarray):
inputs = [inputs]
tensors = {}
for name, fp_ in zip(self._sig_def["inputs"].keys(), inputs):
size = int(self._sig_def["inputs"][name]["tensorShape"]["dim"][1]["size"])
tensors[name] = tf.make_tensor_proto(fp_, dtype=np.float32, shape=(1, size))
return tensors
@staticmethod
def _get_server(name: str) -> str:
warning = f"Failed to get gRPC server for external model {name}"
if not TF_SERVING_HOST:
_logger.warning(warning)
raise ExternalModelAPIError(f"Host not set for model {name}")
if not TF_SERVING_GRPC_PORT:
_logger.warning(warning)
raise ExternalModelAPIError(f"GRPC port not set for model {name}")
return f"{TF_SERVING_HOST}:{TF_SERVING_GRPC_PORT}"
| [
"[email protected]"
]
| |
17371c0c05eb0a54d12cfae1089ffbf4af13250e | 0a2167a58687db61423fa71dc3982194c8dbf3a4 | /photo_upload_js/search/urls.py | 57e7e59472bb8529fa18d1bbca7daf94b1e18388 | []
| no_license | nilldiggonto/js_with_django3 | e6728192bc45313b43fc8c1696207ee0eb990445 | ec8df007732ada06aba8956460a2ecb0d5f20b27 | refs/heads/main | 2023-03-13T18:17:13.465079 | 2021-03-01T12:43:40 | 2021-03-01T12:43:40 | 338,062,634 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 213 | py | from django.urls import path
from .views import SearchListView,post_with_photo_view
urlpatterns = [
path('',SearchListView.as_view(),name='search-list'),
path('up/',post_with_photo_view,name='up-post'),
] | [
"[email protected]"
]
| |
726b58567c2d9b96312f1a2247fda43614f50976 | 1eedb7439266cc63df179aac8fae28cdcf83b3d5 | /nuwe_data_viewer/plugin/project_explorer/model/container_node.py | 311b62d82ec1eb6b6954727a6b4e65bfa9ca2073 | []
| no_license | perillaroc/nuwe-data-viewer | 6f11d19de920bbe2c9937ee4c3169cbe20dfafcc | 12a49844980946f07523f87786b054aa6d9c2e10 | refs/heads/master | 2020-03-27T04:59:41.085059 | 2019-05-11T14:24:44 | 2019-05-11T14:24:44 | 145,986,751 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | py | # coding: utf-8
from nuwe_data_viewer.plugin.project_explorer.model.node import Node
class ContainerNode(Node):
def __init__(self, display_name="", node_id=None):
Node.__init__(self, display_name, node_id)
| [
"[email protected]"
]
| |
f09d106fdba174b4d50bd24e47c76d79bcff3de6 | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/galex_j16155+5048/sdB_galex_j16155+5048_coadd.py | 910403f482b6eda6d8e12bdf3a0cae2ad2051389 | []
| no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 453 | py | from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[243.88925,50.807131], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_galex_j16155+5048/sdB_galex_j16155+5048_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_galex_j16155+5048/sdB_galex_j16155+5048_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
23776c548405a31bbc4586662ae2da0f5154d617 | dd50e92d9d05f00d96aefd61e1422f1897397af1 | /venv/Scripts/futurize-script.py | 482848934b8f88ae0adfafc67a6dc8fcb065b8d9 | []
| no_license | zrxingchen/bwshop | ca60f55a948b64a07df4b31d6dc8b26bae0ceb4b | 53d5bee96adf8b7fea8f560907555f2b4068b6ce | refs/heads/master | 2022-07-29T21:19:49.183645 | 2020-05-23T07:54:15 | 2020-05-23T07:54:15 | 266,286,628 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | #!D:\BWshop\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'future==0.18.2','console_scripts','futurize'
__requires__ = 'future==0.18.2'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('future==0.18.2', 'console_scripts', 'futurize')()
)
| [
"[email protected]"
]
| |
08631b60708e517e228451d1629faaf2e74402f4 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/tree-big-6757.py | b4b3b9f3005553b5202d6d4dff1c2e95e4d0376b | []
| no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,289 | py | # Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:int = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:$ID = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
| [
"[email protected]"
]
| |
767c2bfac9638826491205fbf82df7b3dfcd3672 | 6169a0af24553278c9493c9ac14d2351e9085afd | /tests/providers/pagerduty/hooks/test_pagerduty_events.py | 3c68ba8247954e373fa2502a56287ba653a750a3 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
]
| permissive | Nextdoor/airflow | c994f8fbaf48bebd891300f44dd78a58fd0b057b | 863ec46e25ea49d6d5b006d8fd3a83f50aa9db79 | refs/heads/master | 2023-06-12T19:25:58.052324 | 2023-01-20T17:43:14 | 2023-01-20T17:43:14 | 54,076,271 | 7 | 8 | Apache-2.0 | 2023-06-05T20:38:53 | 2016-03-17T00:34:45 | Python | UTF-8 | Python | false | false | 2,285 | py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.models import Connection
from airflow.providers.pagerduty.hooks.pagerduty import PagerdutyEventsHook
from airflow.utils import db
DEFAULT_CONN_ID = "pagerduty_events_default"
@pytest.fixture(scope="class")
def events_connections():
db.merge_conn(Connection(conn_id=DEFAULT_CONN_ID, conn_type="pagerduty_events", password="events_token"))
class TestPagerdutyEventsHook:
def test_get_integration_key_from_password(self, events_connections):
hook = PagerdutyEventsHook(pagerduty_events_conn_id=DEFAULT_CONN_ID)
assert hook.integration_key == "events_token", "token initialised."
def test_token_parameter_override(self, events_connections):
hook = PagerdutyEventsHook(integration_key="override_key", pagerduty_events_conn_id=DEFAULT_CONN_ID)
assert hook.integration_key == "override_key", "token initialised."
def test_create_event(self, requests_mock, events_connections):
hook = PagerdutyEventsHook(pagerduty_events_conn_id=DEFAULT_CONN_ID)
mock_response_body = {
"status": "success",
"message": "Event processed",
"dedup_key": "samplekeyhere",
}
requests_mock.post("https://events.pagerduty.com/v2/enqueue", json=mock_response_body)
resp = hook.create_event(
summary="test",
source="airflow_test",
severity="error",
)
assert resp == mock_response_body
| [
"[email protected]"
]
| |
04b8ed50c24c320d25836ef6911aab27ca4dc7b7 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP.py | da7cb8b996dbd193802a4c80260e2d37c3f3b78e | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 17,953 | py | #
# PySNMP MIB module NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP
# Produced by pysmi-0.3.4 at Wed May 1 14:23:28 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint")
NoiAdditionalText, NoiEventTime, NoiAlarmTableCount = mibBuilder.importSymbols("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-COMMON-DEFINITION", "NoiAdditionalText", "NoiEventTime", "NoiAlarmTableCount")
NoiMeasurementJobStatus, NoiMeasurementResultTransfer, NoiMeasurementResultIdentifier, NoiMeasurementFileTransfer, NoiMeasurementFileName, NoiMeasurementActivationError, NoiMeasurementFileDirectory = mibBuilder.importSymbols("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-COMMON-DEFINITION", "NoiMeasurementJobStatus", "NoiMeasurementResultTransfer", "NoiMeasurementResultIdentifier", "NoiMeasurementFileTransfer", "NoiMeasurementFileName", "NoiMeasurementActivationError", "NoiMeasurementFileDirectory")
noiPmTable, noiPmCompliance, noiPmVariable, noiOpenInterfaceModule, noiPmNotification = mibBuilder.importSymbols("NOKIA-NE3S-REGISTRATION-MIB", "noiPmTable", "noiPmCompliance", "noiPmVariable", "noiOpenInterfaceModule", "noiPmNotification")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
Gauge32, ModuleIdentity, NotificationType, Bits, Unsigned32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, iso, ObjectIdentity, Counter32, Counter64, TimeTicks, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "ModuleIdentity", "NotificationType", "Bits", "Unsigned32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "iso", "ObjectIdentity", "Counter32", "Counter64", "TimeTicks", "Integer32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
noiSnmpPmIrp = ModuleIdentity((1, 3, 6, 1, 4, 1, 94, 7, 1, 1, 4))
noiSnmpPmIrp.setRevisions(('1970-01-01 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: noiSnmpPmIrp.setRevisionsDescriptions(('Version 1.0.6',))
if mibBuilder.loadTexts: noiSnmpPmIrp.setLastUpdated('200227020000Z')
if mibBuilder.loadTexts: noiSnmpPmIrp.setOrganization('Nokia Networks')
if mibBuilder.loadTexts: noiSnmpPmIrp.setContactInfo('e-mail: NET-OSS-OPEN-SNMP DL (Microsoft Outlook, Nokia internal) [email protected]')
if mibBuilder.loadTexts: noiSnmpPmIrp.setDescription('This SNMP MIB-module specifies the SNMP Solution Set of the PM Integration Reference Point (IRP) also known as Enhanced SNMP Solution Suite. The purpose of this IRP is to define an interface though which a network element manager or a network element) can communicate PM information for its managed objects to Nokia OS, NetAct.')
noiPmIrpVersion = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(5, 5)).setFixedLength(5)).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiPmIrpVersion.setStatus('current')
if mibBuilder.loadTexts: noiPmIrpVersion.setDescription("This object represents the version of the PM IRP supported by the agent. The format is 'n.m,o', where 'n' is the main version number of the interface model and 'm' and 'o' the release number within the main version. This version is 1.0.6")
noiPmFileTransferProtocol = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 2), NoiMeasurementFileTransfer()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiPmFileTransferProtocol.setStatus('current')
if mibBuilder.loadTexts: noiPmFileTransferProtocol.setDescription('Contains the supported file transfer mechanism for various files within NE3S. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiPmResultTransfer = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 3), NoiMeasurementResultTransfer()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiPmResultTransfer.setStatus('current')
if mibBuilder.loadTexts: noiPmResultTransfer.setDescription('Contains the supported transfer mechanism for measurement result, e.g. notification based or polling based. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementScheduleFileDirectory = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 4), NoiMeasurementFileDirectory()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiMeasurementScheduleFileDirectory.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementScheduleFileDirectory.setDescription('Contains the directory where the measurement schedule file is stored within the agent. The manager polls the value before downloading the measurement file. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementRepositoryDirectory = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 5), NoiMeasurementFileDirectory()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiMeasurementRepositoryDirectory.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementRepositoryDirectory.setDescription('Contains the directory where the measurement repository file is stored within the agent. The manager polls the value before retrieving the repository file. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementRepositoryFile = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 6), NoiMeasurementFileName()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiMeasurementRepositoryFile.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementRepositoryFile.setDescription('Contains the file name of the repository file. The manager polls the value before retrieving the repository file. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementJobStatus = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 7), NoiMeasurementJobStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiMeasurementJobStatus.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementJobStatus.setDescription('This object represent the measurement job status. The agent will update the value according to the state model defined in the interface specification.')
noiMeasurementActivationError = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 8), NoiMeasurementActivationError()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementActivationError.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementActivationError.setDescription('Contains the error code in case of failure in measurement administration.')
noiPmAdditionalText = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 9), NoiAdditionalText()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiPmAdditionalText.setStatus('current')
if mibBuilder.loadTexts: noiPmAdditionalText.setDescription('Contains additional text and is used in conjunction with the notification noiMeasurementResultTableRebuild and in case of failure in measurement administration.')
noiPmFileStoringPeriod = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiPmFileStoringPeriod.setStatus('current')
if mibBuilder.loadTexts: noiPmFileStoringPeriod.setDescription(' Contains the storage duraion for the measurement file in the agent. Duration in minutes. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementResultTableCount = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 1), NoiAlarmTableCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementResultTableCount.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultTableCount.setDescription('Contains the number or current active entries in the measurement table. When the table is empty, the value of this object is zero (0).')
noiMeasurementResultTableMaxCount = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 2), NoiAlarmTableCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementResultTableMaxCount.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultTableMaxCount.setDescription('Contains the maximum number of entries in the in the measurement table.')
noiPmLastMeasurementResultId = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 3), NoiMeasurementResultIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiPmLastMeasurementResultId.setStatus('current')
if mibBuilder.loadTexts: noiPmLastMeasurementResultId.setDescription('This object represent the measurement identifier of last send notification noiMeasurementResultReady The manager can retrieve the current value of this object to detect lost notifications. This mechanism can be used by the manager when no notification is received for a certain time (e.g. 30 minutes) to evaluate whether an retrieval of of entries from the measurement table shall be performed')
noiMeasurementResultTable = MibTable((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4), )
if mibBuilder.loadTexts: noiMeasurementResultTable.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultTable.setDescription('Table containing information about the measurement files that are currently stored in the Network Element and accessible for the manager. Agent will create a new entry, whenever a new measurement file has been created. When removing a measurement file, the corresponding entry in the table must be removed.')
noiMeasurementResultEntry = MibTableRow((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1), ).setIndexNames((0, "NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultIdentifier"))
if mibBuilder.loadTexts: noiMeasurementResultEntry.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultEntry.setDescription('One entry in the measurement table, containing the information of one measurement file.')
noiMeasurementResultIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1, 1), NoiMeasurementResultIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementResultIdentifier.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultIdentifier.setDescription('This object represents the measurement identifier of an entry in the measurement table. It uniquely identifies an entry in the table.')
noiMeasurementFileName = MibTableColumn((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1, 2), NoiMeasurementFileName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementFileName.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementFileName.setDescription('This object represents the file name of a measurement result file.')
noiMeasurementFileDirectory = MibTableColumn((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1, 3), NoiMeasurementFileDirectory()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementFileDirectory.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementFileDirectory.setDescription('This object represents the full path of a measurement resulta file.')
noiPmEventTime = MibTableColumn((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1, 4), NoiEventTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiPmEventTime.setStatus('current')
if mibBuilder.loadTexts: noiPmEventTime.setDescription('This object represents the time the event occured.')
noiMeasurementResultReady = NotificationType((1, 3, 6, 1, 4, 1, 94, 7, 3, 3, 0, 1)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultIdentifier"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementFileDirectory"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementFileName"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmEventTime"))
if mibBuilder.loadTexts: noiMeasurementResultReady.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultReady.setDescription('This notification is used when a new measurement data file has been created and a new entry in the measurement table has been inserted.')
noiMeasurementResultTableRebuild = NotificationType((1, 3, 6, 1, 4, 1, 94, 7, 3, 3, 0, 2)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmAdditionalText"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmEventTime"))
if mibBuilder.loadTexts: noiMeasurementResultTableRebuild.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultTableRebuild.setDescription('This notification is used when the measurement table in the agent has been rebuild. The notification will be emitted after the measurement table has been dropped and all previously stored entries have been removed')
noiPmIRPCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 94, 7, 3, 6, 1)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmMandatoryGroup"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmNotificationOptionalGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
noiPmIRPCompliance = noiPmIRPCompliance.setStatus('current')
if mibBuilder.loadTexts: noiPmIRPCompliance.setDescription('This specifies the objects that are required to claim compliance to NE3S PM Fragment.')
noiPmMandatoryGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 94, 7, 3, 6, 2)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmIrpVersion"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmLastMeasurementResultId"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementScheduleFileDirectory"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultTableCount"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultTableMaxCount"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultIdentifier"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementFileDirectory"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementFileName"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmEventTime"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmFileStoringPeriod"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
noiPmMandatoryGroup = noiPmMandatoryGroup.setStatus('current')
if mibBuilder.loadTexts: noiPmMandatoryGroup.setDescription('A collection of objects that represents mandatory PM attributes.')
noiPmNotificationOptionalGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 94, 7, 3, 6, 3)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultReady"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultTableRebuild"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
noiPmNotificationOptionalGroup = noiPmNotificationOptionalGroup.setStatus('current')
if mibBuilder.loadTexts: noiPmNotificationOptionalGroup.setDescription('A collection of optional measurement notifications.')
mibBuilder.exportSymbols("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", noiPmMandatoryGroup=noiPmMandatoryGroup, noiMeasurementResultIdentifier=noiMeasurementResultIdentifier, noiSnmpPmIrp=noiSnmpPmIrp, noiMeasurementFileDirectory=noiMeasurementFileDirectory, noiMeasurementResultTableCount=noiMeasurementResultTableCount, noiPmIrpVersion=noiPmIrpVersion, noiPmNotificationOptionalGroup=noiPmNotificationOptionalGroup, noiMeasurementJobStatus=noiMeasurementJobStatus, noiMeasurementFileName=noiMeasurementFileName, noiMeasurementResultTableRebuild=noiMeasurementResultTableRebuild, noiPmEventTime=noiPmEventTime, noiPmLastMeasurementResultId=noiPmLastMeasurementResultId, noiMeasurementResultEntry=noiMeasurementResultEntry, noiPmResultTransfer=noiPmResultTransfer, noiPmFileStoringPeriod=noiPmFileStoringPeriod, noiMeasurementActivationError=noiMeasurementActivationError, noiPmAdditionalText=noiPmAdditionalText, noiMeasurementResultTable=noiMeasurementResultTable, noiMeasurementScheduleFileDirectory=noiMeasurementScheduleFileDirectory, noiMeasurementRepositoryDirectory=noiMeasurementRepositoryDirectory, noiMeasurementResultReady=noiMeasurementResultReady, noiMeasurementRepositoryFile=noiMeasurementRepositoryFile, noiMeasurementResultTableMaxCount=noiMeasurementResultTableMaxCount, noiPmIRPCompliance=noiPmIRPCompliance, noiPmFileTransferProtocol=noiPmFileTransferProtocol, PYSNMP_MODULE_ID=noiSnmpPmIrp)
| [
"[email protected]"
]
| |
f446b6c8b2833b421592915d637db99761f2c596 | 18aee5d93a63eab684fe69e3aa0abd1372dd5d08 | /python/paddle/nn/layer/distance.py | e2fb10f252f1008f0ddc5e41e1e48afbedb8d67c | [
"Apache-2.0"
]
| permissive | Shixiaowei02/Paddle | 8d049f4f29e281de2fb1ffcd143997c88078eadb | 3d4d995f26c48f7792b325806ec3d110fc59f6fc | refs/heads/develop | 2023-06-26T06:25:48.074273 | 2023-06-14T06:40:21 | 2023-06-14T06:40:21 | 174,320,213 | 2 | 1 | Apache-2.0 | 2022-12-28T05:14:30 | 2019-03-07T10:09:34 | C++ | UTF-8 | Python | false | false | 3,333 | py | # Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .. import functional as F
from .layers import Layer
__all__ = []
class PairwiseDistance(Layer):
r"""
It computes the pairwise distance between two vectors. The
distance is calculated by p-oreder norm:
.. math::
\Vert x \Vert _p = \left( \sum_{i=1}^n \vert x_i \vert ^ p \right) ^ {1/p}.
Parameters:
p (float, optional): The order of norm. Default: :math:`2.0`.
epsilon (float, optional): Add small value to avoid division by zero.
Default: :math:`1e-6`.
keepdim (bool, optional): Whether to reserve the reduced dimension
in the output Tensor. The result tensor is one dimension less than
the result of ``|x-y|`` unless :attr:`keepdim` is True. Default: False.
name (str, optional): For details, please refer to :ref:`api_guide_Name`.
Generally, no setting is required. Default: None.
Shape:
- x: :math:`[N, D]` or :math:`[D]`, where :math:`N` is batch size, :math:`D`
is the dimension of the data. Available data type is float16, float32, float64.
- y: :math:`[N, D]` or :math:`[D]`, y have the same dtype as x.
- output: The same dtype as input tensor.
- If :attr:`keepdim` is True, the output shape is :math:`[N, 1]` or :math:`[1]`,
depending on whether the input has data shaped as :math:`[N, D]`.
- If :attr:`keepdim` is False, the output shape is :math:`[N]` or :math:`[]`,
depending on whether the input has data shaped as :math:`[N, D]`.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([[1., 3.], [3., 5.]], dtype=paddle.float64)
y = paddle.to_tensor([[5., 6.], [7., 8.]], dtype=paddle.float64)
dist = paddle.nn.PairwiseDistance()
distance = dist(x, y)
print(distance)
# Tensor(shape=[2], dtype=float64, place=Place(gpu:0), stop_gradient=True,
# [4.99999860, 4.99999860])
"""
def __init__(self, p=2.0, epsilon=1e-6, keepdim=False, name=None):
super().__init__()
self.p = p
self.epsilon = epsilon
self.keepdim = keepdim
self.name = name
def forward(self, x, y):
return F.pairwise_distance(
x, y, self.p, self.epsilon, self.keepdim, self.name
)
def extra_repr(self):
main_str = 'p={p}'
if self.epsilon != 1e-6:
main_str += ', epsilon={epsilon}'
if self.keepdim is not False:
main_str += ', keepdim={keepdim}'
if self.name is not None:
main_str += ', name={name}'
return main_str.format(**self.__dict__)
| [
"[email protected]"
]
| |
61b29b234ac538819a506f9b35ad04235d880747 | 6e1f550b71e212cec90f070e2e9a1dd714de55d6 | /setup.py | 1586e43ed05c743040c5c403f039e0139055220a | [
"MIT"
]
| permissive | wbond/gears-libsass | b4f7986ffa642b79a5b1ab3125c9d9056d6777fc | a861089306572b5250c3d3716abd916dc140fa20 | refs/heads/master | 2020-04-10T06:21:20.079859 | 2018-12-07T17:09:42 | 2018-12-07T17:09:42 | 160,851,933 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,004 | py | import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-libsass',
version='0.1.0',
url='https://github.com/wbond/gears-libsass',
license='MIT',
author='Will Bond',
author_email='[email protected]',
description='Python libsass-based SCSS compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
install_requires=[
"libsass >= 0.16.1"
],
) | [
"[email protected]"
]
| |
4b0cd507b0fe4a7edf15fe8c9200e2b3b34115f5 | 59522e46a73630181f19251b8bfef90e497c2f82 | /coop_cms/apps/test_app/forms.py | a28945b3361774cf45cf14979bb0f7de7f2e8161 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | ljean/coop_cms | 9befe74edda007686007f8566cd2555856099ae8 | 9e6c70afb61b57dc0326fbb64f9d6b19c04f48a1 | refs/heads/master | 2023-07-11T16:02:35.945029 | 2023-06-30T12:16:26 | 2023-06-30T12:16:26 | 5,846,409 | 3 | 5 | NOASSERTION | 2019-08-30T10:55:02 | 2012-09-17T19:53:56 | Python | UTF-8 | Python | false | false | 1,084 | py | # -*- coding: utf-8 -*-
"""forms"""
import floppyforms as forms
from coop_html_editor.widgets import get_inline_html_widget
from ...forms.articles import NewArticleForm, ArticleSettingsForm
from ...forms.base import InlineHtmlEditableModelForm
from ...forms.newsletters import NewsletterSettingsForm
from .models import TestClass
class TestClassForm(InlineHtmlEditableModelForm):
"""for unit-testing"""
class Meta:
model = TestClass
fields = ('field1', 'field2', 'field3', 'bool_field', 'int_field', 'float_field')
widgets = {
'field2': get_inline_html_widget(),
}
no_inline_html_widgets = ('field2', 'field3', 'bool_field', 'int_field', 'float_field')
class MyNewArticleForm(NewArticleForm):
"""for unit-testing"""
dummy = forms.CharField(required=False)
class MyArticleSettingsForm(ArticleSettingsForm):
"""for unit-testing"""
dummy = forms.CharField(required=False)
class MyNewsletterSettingsForm(NewsletterSettingsForm):
"""for unit-testing"""
dummy = forms.CharField(required=False)
| [
"[email protected]"
]
| |
dd1221db4a088cf90bcdefff2c489c4642863126 | 9c3934402046850104523e9d942d62e42175b512 | /theblog/urls.py | e8d6cd26d99ae97986d6defb2b4be29934047079 | []
| no_license | Dekatron322/myblog | 39954bf26ac7468dae2e888aba1a1855a0832835 | 4d4118eecb458dc53073cd8c3ff9eaa0235926c8 | refs/heads/master | 2022-06-18T07:01:57.218863 | 2020-05-08T08:35:57 | 2020-05-08T08:35:57 | 262,269,197 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 531 | py | from django.urls import path, include
from .import views
urlpatterns = [
path('', views.index, name="index"),
path('blog/', views.blog, name="post-list"),
path('post/<id>/', views.post, name="post-detail"),
path('search/', views.search, name='search'),
path('tinymce/', include('tinymce.urls')),
path('post/<id>/update', views.post_update, name="post-update"),
path('post/<id>/delete', views.post_delete, name="post-delete"),
path('create/', views.post_create, name="post-create"),
]
| [
"[email protected]"
]
| |
97d31bc99318da98c36566bc2f7a502e1953d6d9 | 54e4c1a57765519c77d04fc02112c7f3bbacc595 | /prob_1317.py | 11e01b22fc965fafc4a81f4f1b4e4ef0ee88e358 | []
| no_license | Hrishikesh-3459/leetCode | 80a864228a8a2ae41ac2623f970a13f409234eed | 42def57b8f70d179ca688314ae43747fc1e410a0 | refs/heads/master | 2023-05-07T01:37:19.375229 | 2021-05-25T01:58:05 | 2021-05-25T01:58:05 | 254,803,743 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 744 | py | class Solution:
def sortString(self, s: str) -> str:
x = list(s)
ans = []
ans_2 = []
fin = []
count = 0
while True:
for i in sorted(x):
if (i not in ans):
ans.append(i)
count +=1
x.pop(x.index(i))
for j in sorted(x)[::-1]:
if (j not in ans_2):
count +=1
ans_2.append(j)
x.pop(x.index(j))
fin += ans + ans_2
if (count == len(s)):
break
ans.clear()
ans_2.clear()
ans_str = ""
for j in fin:
ans_str += j
return ans_str
| [
"[email protected]"
]
| |
9cf95ea8c8587547eda0ba121e569f8022e1aa55 | 19172e15355d4ef8ae4622d0ed6993c0cd4d80ea | /watchmen/pipeline/single/stage/unit/utils/units_func.py | 1092797ba1dbe85e75457093d67cdc1b76bc612d | [
"MIT"
]
| permissive | TSRAW/watchmen-matryoshka-doll | d5b50fc4014fbea11c2765f377ca904d17a4c579 | ab17e36a0f26f8e461296a222e6f7f2a4532c877 | refs/heads/master | 2023-03-18T05:40:08.354019 | 2021-03-17T15:52:34 | 2021-03-17T15:52:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,313 | py | from datetime import datetime
from watchmen.common.constants import parameter_constants, pipeline_constants
from watchmen.topic.factor.factor import Factor
from watchmen.topic.topic import Topic
INSERT = "insert"
UPDATE = "update"
SEQUENCE = "sequence"
NUMBER = "number"
UNSIGNED = "unsigned" # 0 & positive
TEXT = "text"
TIME = 'time'
# address
ADDRESS = "address"
CONTINENT = "continent"
REGION = "region"
COUNTRY = "country"
PROVINCE = "province"
CITY = "city"
DISTRICT = "district"
ROAD = "road"
COMMUNITY = "community"
FLOOR = "floor"
RESIDENCE_TYPE = "residence-type"
RESIDENTIAL_AREA = "residential-area"
EMAIL = "email"
PHONE = "phone"
MOBILE = "mobile"
FAX = "fax"
DATETIME = "datetime" # YYYY - MM - DD
DATE = "date" # YYYY - MM - DD
TIME = "time" # HH: mm:ss
YEAR = "year" # 4
HALF_YEAR = "half-year" # 1: first
QUARTER = "quarter" # 1 - 4
SEASON = "season" # 1: spring, 2: summer, 3: autumn, 4: winter
MONTH = "month" # 1 - 12
HALF_MONTH = "half-month" # 1: first
TEN_DAYS = "ten-days" # 1, 2, 3
WEEK_OF_YEAR = "week-of-year" # 1 - 53
WEEK_OF_MONTH = "week-of-month" # 1 - 6
HALF_WEEK = "half-week" # 1: first
DAY_OF_MONTH = "day-of-month" # 1 - 31, according
DAY_OF_WEEK = "day-of-week" # 1 - 7
DAY_KIND = "day-kind" # 1: workday, 2: weekend, 3: holiday
HOUR = "hour" # 0 - 23
HOUR_KIND = "hour-kind" # 1: work
MINUTE = "minute" # 0 - 59
SECOND = "second" # 0 - 59
AM_PM = "am-pm" # 1, 2
# individual
GENDER = "gender"
OCCUPATION = "occupation"
DATE_OF_BIRTH = "date-of-birth" # YYYY - MM - DD
AGE = "age"
ID_NO = "id-no"
RELIGION = "religion"
NATIONALITY = "nationality"
# organization
BIZ_TRADE = "biz-trade"
BIZ_SCALE = "biz-scale"
BOOLEAN = "boolean"
ENUM = "enum"
OBJECT = "object"
ARRAY = "array"
def check_condition(operator, left_value, right_value):
if operator == "equals":
return left_value == right_value
elif operator == "not-equals":
return left_value != right_value
elif operator == "less":
return left_value < right_value
elif operator == "less-equals":
return left_value <= right_value
elif operator == "more":
return left_value > right_value
elif operator == "more-equals":
return left_value >= right_value
else:
raise Exception("NotImplemented:", operator)
def convert_factor_type(value, factor_type):
if factor_type == TEXT:
return str(value)
elif factor_type == NUMBER:
# TODO process number type
return float(value)
elif factor_type == DATETIME:
return datetime.fromisoformat(value)
elif factor_type == BOOLEAN:
return bool(value)
elif factor_type == SEQUENCE:
return int(value)
elif factor_type == YEAR:
return int(value)
elif factor_type == MONTH:
return int(value)
elif factor_type == TIME:
return round(value * 1000)
else:
return value
def build_factor_dict(topic: Topic):
factor_dict = {}
for factor in topic.factors:
factor_dict[factor.factorId] = factor
return factor_dict
def get_factor(factor_id, target_topic):
for factor in target_topic.factors:
if factor.factorId == factor_id:
return factor
def get_execute_time(start_time):
time_elapsed = datetime.now() - start_time
execution_time = time_elapsed.microseconds / 1000
return execution_time
def get_value(factor: Factor, data):
if factor.name in data:
value = data[factor.name]
return convert_factor_type(value, factor.type)
elif factor.type == "number":
return None
elif factor.type == "text":
return None
else:
return None
def add_audit_columns(dictionary, audit_type):
if audit_type == INSERT:
dictionary[pipeline_constants.INSERT_TIME] = datetime.now()
elif audit_type == UPDATE:
dictionary[pipeline_constants.UPDATE_TIME] = datetime.now()
else:
raise Exception("unknown audit_type")
def add_trace_columns(dictionary, trace_type, pipeline_uid):
dictionary[trace_type] = pipeline_uid
def process_variable(variable_name):
if variable_name.startswith("{"):
return "memory", variable_name.replace("{", "").replace("}", "")
else:
return parameter_constants.CONSTANT, variable_name
| [
"[email protected]"
]
| |
b393f63f6ac9ee26aceb40dd7bb00e64e25785d3 | d806dd4a6791382813d2136283a602207fb4b43c | /sirius/blueprints/api/remote_service/tula/passive/childbirth/views.py | 1e86681447f1b9f6b1e9f4f7d3e504d827b7a501 | []
| no_license | MarsStirner/sirius | 5bbf2a03dafb7248db481e13aff63ff989fabbc2 | 8839460726cca080ca8549bacd3a498e519c8f96 | refs/heads/master | 2021-03-24T12:09:14.673193 | 2017-06-06T16:28:53 | 2017-06-06T16:28:53 | 96,042,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,382 | py | #! coding:utf-8
"""
@author: BARS Group
@date: 03.10.2016
"""
import sys
from flask import request
from sirius.blueprints.api.remote_service.tula.app import module
from sirius.blueprints.api.remote_service.tula.entities import TulaEntityCode
from sirius.blueprints.api.remote_service.tula.passive.childbirth.xform import \
ChildbirthTulaXForm
from sirius.blueprints.monitor.exception import remote_api_method
from sirius.blueprints.monitor.logformat import hook
parent_id_name = 'card_id'
@module.route('/api/integration/<int:api_version>/card/<' + parent_id_name + '>/childbirth/',
methods=['POST', 'PUT', 'DELETE'])
@remote_api_method(hook=hook)
def api_childbirth_change(api_version, **kwargs):
# main_id = kwargs.get(main_id_name)
parent_id = kwargs.get(parent_id_name)
stream_id = kwargs.get('stream_id')
data = None
delete = request.method == 'DELETE'
xform = ChildbirthTulaXForm(api_version, stream_id)
if not delete:
data = request.get_json()
xform.validate(data)
# main_id = data.get('main_id')
# xform.check_params(card_id, main_id, data)
service_name = sys._getframe().f_code.co_name
parents_params = {
parent_id_name: {'entity': TulaEntityCode.CARD, 'id': parent_id},
}
xform.send_messages(parent_id, parent_id_name, data, service_name, request.method, parents_params)
| [
"[email protected]"
]
| |
c6569d076ffb391b828b0b0ad13e3266739a768b | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/attestation/v20210601preview/_enums.py | 647247b71cec4cfaee5ae075082eafac95c1b2cc | [
"BSD-3-Clause",
"Apache-2.0"
]
| permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 921 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from enum import Enum
__all__ = [
'PrivateEndpointServiceConnectionStatus',
'PublicNetworkAccessType',
]
class PrivateEndpointServiceConnectionStatus(str, Enum):
"""
Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
"""
PENDING = "Pending"
APPROVED = "Approved"
REJECTED = "Rejected"
class PublicNetworkAccessType(str, Enum):
"""
Controls whether traffic from the public network is allowed to access the Attestation Provider APIs.
"""
ENABLED = "Enabled"
"""Enables public network connectivity to the Attestation Provider REST APIs."""
DISABLED = "Disabled"
"""Disables public network connectivity to the Attestation Provider REST APIs."""
| [
"[email protected]"
]
| |
45403bc3673f7fdd17f2e29878219415405ea12a | 9e538305f9263d86e780a4a3f205c972f658f54d | /src/order/models/managers.py | 7b7538f837adf7cf43e89ce2fef561ffcab76f9c | []
| no_license | tanjibpa/mednet | bb188582b0d90407015622b34f0291557acb1919 | 19a7535d583077fec7b7030c298fceb4c4df3207 | refs/heads/main | 2023-05-26T07:44:27.615506 | 2021-06-10T06:30:19 | 2021-06-10T06:30:19 | 355,774,065 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 178 | py | from django.db import models
class SupplierOrderList(models.Manager):
def supplier_order(self):
return super().get_queryset().filter(producer__org_type="supplier")
| [
"[email protected]"
]
| |
6ccc7f4bb583c7554918ac244ca1883a446d6583 | 8f2c55a2530c3e59dab5907c0044c618b88dd09b | /_pydevd_bundle/pydevd_reload.py | 507e73be2481c064a04777f28cadb48cc7177f70 | [
"Apache-2.0",
"EPL-1.0"
]
| permissive | fabioz/PyDev.Debugger | 5a9c6d4c09be85a0e2d9fb93567fd65faf04c81d | 26864816cbfcf002a99913bcc31ebef48042a4ac | refs/heads/main | 2023-08-18T01:08:34.323363 | 2023-04-15T11:15:47 | 2023-04-15T11:15:47 | 21,870,144 | 363 | 126 | Apache-2.0 | 2023-07-30T23:03:31 | 2014-07-15T18:01:12 | Python | UTF-8 | Python | false | false | 15,773 | py | """
Based on the python xreload.
Changes
======================
1. we don't recreate the old namespace from new classes. Rather, we keep the existing namespace,
load a new version of it and update only some of the things we can inplace. That way, we don't break
things such as singletons or end up with a second representation of the same class in memory.
2. If we find it to be a __metaclass__, we try to update it as a regular class.
3. We don't remove old attributes (and leave them lying around even if they're no longer used).
4. Reload hooks were changed
These changes make it more stable, especially in the common case (where in a debug session only the
contents of a function are changed), besides providing flexibility for users that want to extend
on it.
Hooks
======================
Classes/modules can be specially crafted to work with the reload (so that it can, for instance,
update some constant which was changed).
1. To participate in the change of some attribute:
In a module:
__xreload_old_new__(namespace, name, old, new)
in a class:
@classmethod
__xreload_old_new__(cls, name, old, new)
A class or module may include a method called '__xreload_old_new__' which is called when we're
unable to reload a given attribute.
2. To do something after the whole reload is finished:
In a module:
__xreload_after_reload_update__(namespace):
In a class:
@classmethod
__xreload_after_reload_update__(cls):
A class or module may include a method called '__xreload_after_reload_update__' which is called
after the reload finishes.
Important: when providing a hook, always use the namespace or cls provided and not anything in the global
namespace, as the global namespace are only temporarily created during the reload and may not reflect the
actual application state (while the cls and namespace passed are).
Current limitations
======================
- Attributes/constants are added, but not changed (so singletons and the application state is not
broken -- use provided hooks to workaround it).
- Code using metaclasses may not always work.
- Functions and methods using decorators (other than classmethod and staticmethod) are not handled
correctly.
- Renamings are not handled correctly.
- Dependent modules are not reloaded.
- New __slots__ can't be added to existing classes.
Info
======================
Original: http://svn.python.org/projects/sandbox/trunk/xreload/xreload.py
Note: it seems https://github.com/plone/plone.reload/blob/master/plone/reload/xreload.py enhances it (to check later)
Interesting alternative: https://code.google.com/p/reimport/
Alternative to reload().
This works by executing the module in a scratch namespace, and then patching classes, methods and
functions in place. This avoids the need to patch instances. New objects are copied into the
target namespace.
"""
from _pydev_bundle.pydev_imports import execfile
from _pydevd_bundle import pydevd_dont_trace
import types
from _pydev_bundle import pydev_log
from _pydevd_bundle.pydevd_constants import get_global_debugger
NO_DEBUG = 0
LEVEL1 = 1
LEVEL2 = 2
DEBUG = NO_DEBUG
def write_err(*args):
py_db = get_global_debugger()
if py_db is not None:
new_lst = []
for a in args:
new_lst.append(str(a))
msg = ' '.join(new_lst)
s = 'code reload: %s\n' % (msg,)
cmd = py_db.cmd_factory.make_io_message(s, 2)
if py_db.writer is not None:
py_db.writer.add_command(cmd)
def notify_info0(*args):
write_err(*args)
def notify_info(*args):
if DEBUG >= LEVEL1:
write_err(*args)
def notify_info2(*args):
if DEBUG >= LEVEL2:
write_err(*args)
def notify_error(*args):
write_err(*args)
#=======================================================================================================================
# code_objects_equal
#=======================================================================================================================
def code_objects_equal(code0, code1):
for d in dir(code0):
if d.startswith('_') or 'line' in d or d in ('replace', 'co_positions', 'co_qualname'):
continue
if getattr(code0, d) != getattr(code1, d):
return False
return True
#=======================================================================================================================
# xreload
#=======================================================================================================================
def xreload(mod):
"""Reload a module in place, updating classes, methods and functions.
mod: a module object
Returns a boolean indicating whether a change was done.
"""
r = Reload(mod)
r.apply()
found_change = r.found_change
r = None
pydevd_dont_trace.clear_trace_filter_cache()
return found_change
# This isn't actually used... Initially I planned to reload variables which are immutable on the
# namespace, but this can destroy places where we're saving state, which may not be what we want,
# so, we're being conservative and giving the user hooks if he wants to do a reload.
#
# immutable_types = [int, str, float, tuple] #That should be common to all Python versions
#
# for name in 'long basestr unicode frozenset'.split():
# try:
# immutable_types.append(__builtins__[name])
# except:
# pass #Just ignore: not all python versions are created equal.
# immutable_types = tuple(immutable_types)
#=======================================================================================================================
# Reload
#=======================================================================================================================
class Reload:
def __init__(self, mod, mod_name=None, mod_filename=None):
self.mod = mod
if mod_name:
self.mod_name = mod_name
else:
self.mod_name = mod.__name__ if mod is not None else None
if mod_filename:
self.mod_filename = mod_filename
else:
self.mod_filename = mod.__file__ if mod is not None else None
self.found_change = False
def apply(self):
mod = self.mod
self._on_finish_callbacks = []
try:
# Get the module namespace (dict) early; this is part of the type check
modns = mod.__dict__
# Execute the code. We copy the module dict to a temporary; then
# clear the module dict; then execute the new code in the module
# dict; then swap things back and around. This trick (due to
# Glyph Lefkowitz) ensures that the (readonly) __globals__
# attribute of methods and functions is set to the correct dict
# object.
new_namespace = modns.copy()
new_namespace.clear()
if self.mod_filename:
new_namespace["__file__"] = self.mod_filename
try:
new_namespace["__builtins__"] = __builtins__
except NameError:
raise # Ok if not there.
if self.mod_name:
new_namespace["__name__"] = self.mod_name
if new_namespace["__name__"] == '__main__':
# We do this because usually the __main__ starts-up the program, guarded by
# the if __name__ == '__main__', but we don't want to start the program again
# on a reload.
new_namespace["__name__"] = '__main_reloaded__'
execfile(self.mod_filename, new_namespace, new_namespace)
# Now we get to the hard part
oldnames = set(modns)
newnames = set(new_namespace)
# Create new tokens (note: not deleting existing)
for name in newnames - oldnames:
notify_info0('Added:', name, 'to namespace')
self.found_change = True
modns[name] = new_namespace[name]
# Update in-place what we can
for name in oldnames & newnames:
self._update(modns, name, modns[name], new_namespace[name])
self._handle_namespace(modns)
for c in self._on_finish_callbacks:
c()
del self._on_finish_callbacks[:]
except:
pydev_log.exception()
def _handle_namespace(self, namespace, is_class_namespace=False):
on_finish = None
if is_class_namespace:
xreload_after_update = getattr(namespace, '__xreload_after_reload_update__', None)
if xreload_after_update is not None:
self.found_change = True
on_finish = lambda: xreload_after_update()
elif '__xreload_after_reload_update__' in namespace:
xreload_after_update = namespace['__xreload_after_reload_update__']
self.found_change = True
on_finish = lambda: xreload_after_update(namespace)
if on_finish is not None:
# If a client wants to know about it, give him a chance.
self._on_finish_callbacks.append(on_finish)
def _update(self, namespace, name, oldobj, newobj, is_class_namespace=False):
"""Update oldobj, if possible in place, with newobj.
If oldobj is immutable, this simply returns newobj.
Args:
oldobj: the object to be updated
newobj: the object used as the source for the update
"""
try:
notify_info2('Updating: ', oldobj)
if oldobj is newobj:
# Probably something imported
return
if type(oldobj) is not type(newobj):
# Cop-out: if the type changed, give up
if name not in ('__builtins__',):
notify_error('Type of: %s (old: %s != new: %s) changed... Skipping.' % (name, type(oldobj), type(newobj)))
return
if isinstance(newobj, types.FunctionType):
self._update_function(oldobj, newobj)
return
if isinstance(newobj, types.MethodType):
self._update_method(oldobj, newobj)
return
if isinstance(newobj, classmethod):
self._update_classmethod(oldobj, newobj)
return
if isinstance(newobj, staticmethod):
self._update_staticmethod(oldobj, newobj)
return
if hasattr(types, 'ClassType'):
classtype = (types.ClassType, type) # object is not instance of types.ClassType.
else:
classtype = type
if isinstance(newobj, classtype):
self._update_class(oldobj, newobj)
return
# New: dealing with metaclasses.
if hasattr(newobj, '__metaclass__') and hasattr(newobj, '__class__') and newobj.__metaclass__ == newobj.__class__:
self._update_class(oldobj, newobj)
return
if namespace is not None:
# Check for the `__xreload_old_new__` protocol (don't even compare things
# as even doing a comparison may break things -- see: https://github.com/microsoft/debugpy/issues/615).
xreload_old_new = None
if is_class_namespace:
xreload_old_new = getattr(namespace, '__xreload_old_new__', None)
if xreload_old_new is not None:
self.found_change = True
xreload_old_new(name, oldobj, newobj)
elif '__xreload_old_new__' in namespace:
xreload_old_new = namespace['__xreload_old_new__']
xreload_old_new(namespace, name, oldobj, newobj)
self.found_change = True
# Too much information to the user...
# else:
# notify_info0('%s NOT updated. Create __xreload_old_new__(name, old, new) for custom reload' % (name,))
except:
notify_error('Exception found when updating %s. Proceeding for other items.' % (name,))
pydev_log.exception()
# All of the following functions have the same signature as _update()
def _update_function(self, oldfunc, newfunc):
"""Update a function object."""
oldfunc.__doc__ = newfunc.__doc__
oldfunc.__dict__.update(newfunc.__dict__)
try:
newfunc.__code__
attr_name = '__code__'
except AttributeError:
newfunc.func_code
attr_name = 'func_code'
old_code = getattr(oldfunc, attr_name)
new_code = getattr(newfunc, attr_name)
if not code_objects_equal(old_code, new_code):
notify_info0('Updated function code:', oldfunc)
setattr(oldfunc, attr_name, new_code)
self.found_change = True
try:
oldfunc.__defaults__ = newfunc.__defaults__
except AttributeError:
oldfunc.func_defaults = newfunc.func_defaults
return oldfunc
def _update_method(self, oldmeth, newmeth):
"""Update a method object."""
# XXX What if im_func is not a function?
if hasattr(oldmeth, 'im_func') and hasattr(newmeth, 'im_func'):
self._update(None, None, oldmeth.im_func, newmeth.im_func)
elif hasattr(oldmeth, '__func__') and hasattr(newmeth, '__func__'):
self._update(None, None, oldmeth.__func__, newmeth.__func__)
return oldmeth
def _update_class(self, oldclass, newclass):
"""Update a class object."""
olddict = oldclass.__dict__
newdict = newclass.__dict__
oldnames = set(olddict)
newnames = set(newdict)
for name in newnames - oldnames:
setattr(oldclass, name, newdict[name])
notify_info0('Added:', name, 'to', oldclass)
self.found_change = True
# Note: not removing old things...
# for name in oldnames - newnames:
# notify_info('Removed:', name, 'from', oldclass)
# delattr(oldclass, name)
for name in (oldnames & newnames) - set(['__dict__', '__doc__']):
self._update(oldclass, name, olddict[name], newdict[name], is_class_namespace=True)
old_bases = getattr(oldclass, '__bases__', None)
new_bases = getattr(newclass, '__bases__', None)
if str(old_bases) != str(new_bases):
notify_error('Changing the hierarchy of a class is not supported. %s may be inconsistent.' % (oldclass,))
self._handle_namespace(oldclass, is_class_namespace=True)
def _update_classmethod(self, oldcm, newcm):
"""Update a classmethod update."""
# While we can't modify the classmethod object itself (it has no
# mutable attributes), we *can* extract the underlying function
# (by calling __get__(), which returns a method object) and update
# it in-place. We don't have the class available to pass to
# __get__() but any object except None will do.
self._update(None, None, oldcm.__get__(0), newcm.__get__(0))
def _update_staticmethod(self, oldsm, newsm):
"""Update a staticmethod update."""
# While we can't modify the staticmethod object itself (it has no
# mutable attributes), we *can* extract the underlying function
# (by calling __get__(), which returns it) and update it in-place.
# We don't have the class available to pass to __get__() but any
# object except None will do.
self._update(None, None, oldsm.__get__(0), newsm.__get__(0))
| [
"[email protected]"
]
| |
77e3a3bf9a976c804784f6bbc248d5188678a70b | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_fawn.py | 260afb89b3b0bae13a38db08457adb7aad8566e8 | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 322 | py |
#calss header
class _FAWN():
def __init__(self,):
self.name = "FAWN"
self.definitions = [u'a young deer', u'a pale yellowish-brown colour']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
]
| |
636287a026b036c4db22cc1f8fbad2a93d1e3f6b | 90e39e45d469bb5dd9cb36805a88c97f41c147de | /2-do_deploy_web_static.py | 5fc6c711832e5b59a00fa0831b049af1e986aac4 | []
| no_license | Noeuclides/AirBnB_clone_v2 | 372b3d01ba76d41a79dca166d6ca7d471749a07d | 13fac5127af0149e7bef9a94b70e6d2746eeb4fd | refs/heads/master | 2020-07-03T19:16:10.404783 | 2019-09-11T00:42:29 | 2019-09-11T00:42:29 | 202,020,044 | 0 | 2 | null | 2019-08-19T03:08:39 | 2019-08-12T22:44:22 | Python | UTF-8 | Python | false | false | 1,700 | py | #!/usr/bin/python3
from datetime import datetime
from fabric.api import *
from os import path
'''automatize with fabric
'''
'''env.user = 'localhost'
'''
env.hosts = ['35.231.53.89', '35.190.176.186']
def do_pack():
'''making a pack on web_static folder
'''
now = datetime.now()
file = 'web_static_' + now.strftime("%Y%m%d%H%M%S") + '.' + 'tgz'
local("mkdir -p versions")
check = local("tar -cvzf versions/{} web_static".format(file))
if check is not None:
return file
else:
return None
def do_deploy(archive_path):
'''distribute an archive to web servers
'''
print(archive_path)
print(str(path.exists(archive_path)))
if str(path.exists(archive_path)) is False:
return False
oper = []
file = archive_path.split("/")
oper.append(put(archive_path, '/tmp'))
folder = file[1].split('.')
print(folder[0])
oper.append(
run("mkdir -p /data/web_static/releases/{}".format(
folder[0])))
oper.append(run(
"tar -xzf /tmp/{file} -C /data/web_static/releases/{}".format(
file[1], folder[0])))
oper.append(run("rm /tmp/{}".format(file[1])))
oper.append(run("mv /data/web_static/releases/{0}/web_static/* /data/web_static/releases/{0}".format(
folder[0])))
oper.append(run(
"rm -rf /data/web_static/releases/{}/web_static".format(
folder[0])))
oper.append(run("rm -rf /data/web_static/current"))
oper.append(run(
"ln -s /data/web_static/releases/{}/ /data/web_static/current".format(
folder[0])))
print(oper)
for op in oper:
if op is False:
return False
return True
| [
"[email protected]"
]
| |
6af689639ddfcb358242510a287fa6c89aca2e3a | b22588340d7925b614a735bbbde1b351ad657ffc | /athena/LArCalorimeter/LArCalibTools/share/LArMCConditions2Ntuple.py | 1a35ffa4835cfb273b6320e18243c2bfdc57f847 | []
| no_license | rushioda/PIXELVALID_athena | 90befe12042c1249cbb3655dde1428bb9b9a42ce | 22df23187ef85e9c3120122c8375ea0e7d8ea440 | refs/heads/master | 2020-12-14T22:01:15.365949 | 2020-01-19T03:59:35 | 2020-01-19T03:59:35 | 234,836,993 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,483 | py | import AthenaCommon.AtlasUnixGeneratorJob #use MC event selector
from string import split,join
## get a handle to the default top-level algorithm sequence
from AthenaCommon.AlgSequence import AlgSequence
topSequence = AlgSequence()
#Input Parameters:
# PoolFiles: sequence of pool files to read from though CondProxyProvider
# if not given, read from COOL
#
# RunNumber: Input to COOL IOV-DB if reading from
#
# RootFile: root file for the ntuple
#
# Objects: List of objects written to ntuple (PEDESTAL OFC, RAMP,
DBTag='OFLCOND-SDR-BS14T-IBL-06'
if not 'InputDB' in dir():
InputDB="COOLOFL_LAR/OFLP200"
if not "OFCFolder" in dir():
OFCFolder="5samples1phase"
if not 'RunNumber' in dir():
RunNumber=2147483647
if not "RootFile" in dir():
RootFile="LArConditions.root"
if not "Objects" in dir():
Objects=["PEDESTAL","RAMP","OFC","MPHYSOVERMCAL","SHAPE","UA2MEV"]
if not "DBTag" in dir():
DBTag="LARCALIB-000-01"
def doObj(objName):
for o in Objects:
if o.upper().find(objName.upper())!=-1:
return True
return False
def getDBFolderAndTag(folder):
if "TagSuffix" in dir():
tag="<tag>"+join(split(folder, '/'),'') + TagSuffix+"</tag>"
else:
tag=""
return "<db>"+InputDB+"</db>"+folder+tag
from AthenaCommon.GlobalFlags import globalflags
globalflags.DataSource="geant4"
globalflags.InputFormat="pool"
from AthenaCommon.JobProperties import jobproperties
jobproperties.Global.DetDescrVersion = "ATLAS-GEO-18-01-03"
from AthenaCommon.DetFlags import DetFlags
DetFlags.Calo_setOff()
DetFlags.ID_setOff()
DetFlags.Muon_setOff()
DetFlags.Truth_setOff()
DetFlags.LVL1_setOff()
DetFlags.digitize.all_setOff()
#Set up GeoModel (not really needed but crashes without)
from AtlasGeoModel import SetGeometryVersion
from AtlasGeoModel import GeoModelInit
#Get identifier mapping (needed by LArConditionsContainer)
svcMgr.IOVDbSvc.GlobalTag=DBTag
include( "LArConditionsCommon/LArIdMap_comm_jobOptions.py" )
theApp.EvtMax = 1
svcMgr.EventSelector.RunNumber = RunNumber
conddb.addFolder("","<db>COOLOFL_LAR/OFLP200</db>/LAR/BadChannels/BadChannels<key>/LAR/BadChannels/BadChannels</key>")
conddb.addFolder("","<db>COOLOFL_LAR/OFLP200</db>/LAR/BadChannels/MissingFEBs<key>/LAR/BadChannels/MissingFEBs</key>")
conddb.addOverride('/LAR/Identifier/FebRodAtlas','FebRodAtlas-005')
conddb.addOverride('/LAR/Identifier/OnOffIdAtlas','OnOffIdAtlas-012')
if 'PoolFiles' in dir():
from AthenaCommon.ConfigurableDb import getConfigurable
from AthenaCommon.AppMgr import ServiceMgr
ServiceMgr.ProxyProviderSvc.ProviderNames += [ "CondProxyProvider" ]
ServiceMgr += getConfigurable( "CondProxyProvider" )()
svcMgr.CondProxyProvider.InputCollections=PoolFiles
if 'PoolCat' in dir():
svcMgr.PoolSvc.ReadCatalog+=["xmlcatalog_file:"+PoolCat]
loadCastorCat=False
if doObj("PEDESTAL"):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/Pedestal"))
from LArCalibTools.LArCalibToolsConf import LArPedestals2Ntuple
LArPedestals2Ntuple=LArPedestals2Ntuple("LArPedestals2Ntuple")
LArPedestals2Ntuple.AddFEBTempInfo=False
topSequence+=LArPedestals2Ntuple
if doObj("AUTOCORR"):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibOfl/AutoCorrs/AutoCorr"))
from LArCalibTools.LArCalibToolsConf import LArAutoCorr2Ntuple
LArAutoCorr2Ntuple=LArAutoCorr2Ntuple("LArAutoCorr2Ntuple")
LArAutoCorr2Ntuple.AddFEBTempInfo=False
topSequence+=LArAutoCorr2Ntuple
if doObj("OFC"):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/HVScaleCorr"))
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/Noise"))
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/AutoCorr"))
from LArRecUtils.LArADC2MeVToolDefault import LArADC2MeVToolDefault
from LArRecUtils.LArAutoCorrNoiseToolDefault import LArAutoCorrNoiseToolDefault
theLArADC2MeVToolDefault = LArADC2MeVToolDefault()
ToolSvc += theLArADC2MeVToolDefault
theLArAutoCorrNoiseToolDefault = LArAutoCorrNoiseToolDefault()
theLArAutoCorrNoiseToolDefault.NSamples = 5
ToolSvc += theLArAutoCorrNoiseToolDefault
from LArRecUtils.LArOFCToolDefault import LArOFCToolDefault
theOFCTool = LArOFCToolDefault()
theOFCTool.Dump=True
ToolSvc += theOFCTool
from LArCalibTools.LArCalibToolsConf import LArOFC2Ntuple
LArOFC2Ntuple = LArOFC2Ntuple("LArOFC2Ntuple")
LArOFC2Ntuple.ContainerKey = "LArOFC"
LArOFC2Ntuple.AddFEBTempInfo=False
LArOFC2Ntuple.IsMC = True
LArOFC2Ntuple.OFCTool = theOFCTool
topSequence+=LArOFC2Ntuple
if (doObj("SHAPE")):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/Shape"))
from LArCalibTools.LArCalibToolsConf import LArShape2Ntuple
LArShape2Ntuple = LArShape2Ntuple("LArShape2Ntuple")
LArShape2Ntuple.ContainerKey = "LArShape"
LArShape2Ntuple.AddFEBTempInfo=False
LArShape2Ntuple.IsMC = True
topSequence+=LArShape2Ntuple
if doObj("RAMP"):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/Ramp"))
from LArCalibTools.LArCalibToolsConf import LArRamps2Ntuple
LArRamps2Ntuple=LArRamps2Ntuple("LArRamps2Ntuple")
LArRamps2Ntuple.NtupleName = "RAMPS"
LArRamps2Ntuple.RawRamp = False
LArRamps2Ntuple.IsMC = True
LArRamps2Ntuple.AddFEBTempInfo=False
topSequence+=LArRamps2Ntuple
if (doObj("UA2MEV")):
print 'DAC2uA check : ',getDBFolderAndTag("/LAR/ElecCalibMC/DAC2uA")
print 'uA2MeV check : ',getDBFolderAndTag("/LAR/ElecCalibMC/uA2MeV")
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/DAC2uA"))
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/uA2MeV"))
from LArCalibTools.LArCalibToolsConf import LAruA2MeV2Ntuple
LAruA2MeV2Ntuple=LAruA2MeV2Ntuple("LAruA2MeV2Ntuple")
LAruA2MeV2Ntuple.AddFEBTempInfo=False
topSequence+=LAruA2MeV2Ntuple
if (doObj("MPHYSOVERMCAL")):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/MphysOverMcal"))
from LArCalibTools.LArCalibToolsConf import LArMphysOverMcal2Ntuple
LArMphysOverMcal2Ntuple=LArMphysOverMcal2Ntuple("LArMphysOverMcal2Ntuple")
LArMphysOverMcal2Ntuple.AddFEBTempInfo=False
LArMphysOverMcal2Ntuple.IsMC=True
topSequence+=LArMphysOverMcal2Ntuple
if loadCastorCat:
svcMgr.PoolSvc.ReadCatalog += ['xmlcatalog_file:'+'/afs/cern.ch/atlas/conditions/poolcond/catalogue/poolcond/PoolCat_comcond_castor.xml']
theApp.HistogramPersistency = "ROOT"
from GaudiSvc.GaudiSvcConf import NTupleSvc
svcMgr += NTupleSvc()
svcMgr.NTupleSvc.Output = [ "FILE1 DATAFILE='"+RootFile+"' OPT='NEW'" ]
svcMgr.MessageSvc.OutputLevel = DEBUG
svcMgr.IOVDbSvc.DBInstance="OFLP200"
| [
"[email protected]"
]
| |
b968c173c17ee7e0970eab6d6e3ee7ba4a7e8ab9 | ca77e9e45d666771c7b0897e7e3093b3d3c12f65 | /random_scripts/update_costs/update_wo_costs.py | 20ad1cd9856990dbe5c11c2b9e38c6a9d2275cf5 | []
| no_license | 2gDigitalPost/custom | 46175d3a3fc4c3be21dc20203ff0a48fb93b5639 | 6a3a804ef4ef6178044b70ad1e4bc5c56ab42d8d | refs/heads/master | 2020-04-04T07:40:17.962611 | 2016-12-28T18:35:28 | 2016-12-28T18:35:28 | 39,648,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,309 | py | import os, sys, math, hashlib, getopt, tacticenv, time
def kill_mul_spaces(origstrg):
newstrg = ''
for word in origstrg.split():
newstrg=newstrg+' '+word
return newstrg
def make_data_dict(file_name):
the_file = open(file_name, 'r')
fields = []
data_dict = {}
count = 0
for line in the_file:
line = line.rstrip('\r\n')
data = line.split('\t')
if count == 0:
for field in data:
field = kill_mul_spaces(field)
field = field.strip(' ')
fields.append(field)
else:
data_count = 0
this_code = ''
for val in data:
val = kill_mul_spaces(val)
val = val.strip(' ')
if data_count == 0:
data_dict[val] = {}
this_code = val
else:
data_dict[this_code][fields[data_count]] = val
data_count = data_count + 1
count = count + 1
the_file.close()
print "FIELDS = %s" % fields
return data_dict
opts, work_order_file = getopt.getopt(sys.argv[1], '-m')
print "work_order_file = %s" % work_order_file
opts, task_file = getopt.getopt(sys.argv[2], '-m')
print "task_file = %s" % task_file
opts, group_file = getopt.getopt(sys.argv[3], '-m')
print "group_file = %s" % group_file
opts, login_in_group_file = getopt.getopt(sys.argv[4], '-m')
print "login_in_group_file = %s" % login_in_group_file
opts, work_hour_file = getopt.getopt(sys.argv[5], '-m')
print "work_hour_file = %s" % work_hour_file
lookup_codes = {}
work_orders = make_data_dict(work_order_file)
#print "WORK ORDERS = %s" % work_orders
tasks = make_data_dict(task_file)
#print "TASKS = %s" % tasks
groups = make_data_dict(group_file)
#print "GROUPS = %s" % groups
login_in_groups = make_data_dict(login_in_group_file)
#print "LOGIN IN GROUPS = %s" % login_in_groups
work_hours = make_data_dict(work_hour_file)
#print "WORK HOURS = %s" % work_hours
work_order_codes = work_orders.keys()
task_codes = tasks.keys()
work_hour_codes = work_hours.keys()
out_lines = []
problem_lines = []
for woc in work_order_codes:
#Expected first
s_status = work_orders[woc]['s_status']
if s_status not in ['retired','r']:
work_group = work_orders[woc]['work_group']
estimated_work_hours = work_orders[woc]['estimated_work_hours']
if work_group not in [None,''] and estimated_work_hours not in [None,'',0,'0']:
estimated_work_hours = float(estimated_work_hours)
group_rate = groups[work_group]['hourly_rate']
if group_rate not in [None,'']:
group_rate = float(group_rate)
new_expected_cost = float(estimated_work_hours * group_rate)
out_lines.append("update work_order set expected_cost = '%s' where code = '%s';" % (new_expected_cost, woc))
else:
problem_lines.append("Work Order %s is incomplete. Work Group = %s, Est_WH = %s" % (woc, work_group, estimated_work_hours))
task_code = work_orders[woc]['task_code']
if task_code not in [None,'']:
summed_actual_cost = 0
if task_code in task_codes:
if tasks[task_code]['s_status'] not in ['retired','r']:
for whc in work_hour_codes:
if work_hours[whc]['task_code'] == task_code:
user = work_hours[whc]['login']
straight_time = work_hours[whc]['straight_time']
if straight_time not in [None,'',0,'0']:
straight_time = float(straight_time)
group_chosen = ''
group_rate = 0
for lg in login_in_groups.keys():
if login_in_groups[lg]['login'] == user:
if group_chosen == '':
group_chosen = login_in_groups[lg]['login_group']
if group_chosen in groups.keys():
group_rate = groups[group_chosen]['hourly_rate']
if group_rate not in [None,'',0,'0.0']:
group_rate = float(group_rate)
else:
group_rate = 0
else:
this_group = login_in_groups[lg]['login_group']
if this_group in groups.keys():
this_rate = groups[this_group]['hourly_rate']
if this_rate not in [None,'',0,'0.0']:
this_rate = float(this_rate)
else:
this_rate = 0
if this_rate > group_rate:
group_rate = this_rate
group_chosen = this_group
if group_rate not in [None,'']:
if group_rate == 0:
problem_lines.append("GROUP RATE WAS 0 for %s, user %s, group %s" % (whc, user, group_chosen))
else:
summed_actual_cost = summed_actual_cost + float(group_rate * straight_time)
if summed_actual_cost not in [None,'']:
out_lines.append("update work_order set actual_cost = '%s' where code = '%s';" % (summed_actual_cost, woc))
out_file = open('work_order_cost_fix','w')
for ol in out_lines:
out_file.write('%s\n' % ol)
out_file.close()
problem_file = open('work_order_cost_problems', 'w')
for pl in problem_lines:
problem_file.write('%s\n' % pl)
problem_file.close()
| [
"[email protected]"
]
| |
0c0a1446e1f0184e7126eb177937b571e856de8d | 84a96dbd96e926ebb5c658e3cb897db276c32d6c | /tensorflow/python/ops/ragged/ragged_segment_op_test.py | d29708a5f5d98360502b4aef830d8d7c69c18c5c | [
"Apache-2.0"
]
| permissive | MothCreations/gavlanWheels | bc9189092847369ad291d1c7d3f4144dd2239359 | 01d8a43b45a26afec27b971f686f79c108fe08f9 | refs/heads/master | 2022-12-06T09:27:49.458800 | 2020-10-13T21:56:40 | 2020-10-13T21:56:40 | 249,206,716 | 6 | 5 | Apache-2.0 | 2022-11-21T22:39:47 | 2020-03-22T14:57:45 | C++ | UTF-8 | Python | false | false | 9,618 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ragged_range op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
from absl.testing import parameterized
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_math_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import googletest
def prod(values):
val = 1
for v in values:
val *= v
return val
# return reduce(lambda x, y: x * y, values, 1)
def mean(values):
return 1.0 * sum(values) / len(values)
def sqrt_n(values):
return 1.0 * sum(values) / math.sqrt(len(values))
@test_util.run_all_in_graph_and_eager_modes
class RaggedSegmentOpsTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
def expected_value(self, data, segment_ids, num_segments, combiner):
"""Find the expected value for a call to ragged_segment_<aggregate>.
Args:
data: The input RaggedTensor, expressed as a nested python list.
segment_ids: The segment ids, as a python list of ints.
num_segments: The number of segments, as a python int.
combiner: The Python function used to combine values.
Returns:
The expected value, as a nested Python list.
"""
self.assertLen(data, len(segment_ids))
# Build an empty (num_segments x ncols) "grouped" matrix
ncols = max(len(row) for row in data)
grouped = [[[] for _ in range(ncols)] for row in range(num_segments)]
# Append values from data[row] to grouped[segment_ids[row]]
for row in range(len(data)):
for col in range(len(data[row])):
grouped[segment_ids[row]][col].append(data[row][col])
# Combine the values.
return [[combiner(values)
for values in grouped_row
if values]
for grouped_row in grouped]
@parameterized.parameters(
(ragged_math_ops.segment_sum, sum, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_sum, sum, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_sum, sum, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_sum, sum, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_prod, prod, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_prod, prod, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_prod, prod, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_prod, prod, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_min, min, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_min, min, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_min, min, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_min, min, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_max, max, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_max, max, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_max, max, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_max, max, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_mean, mean, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_mean, mean, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_mean, mean, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_mean, mean, [0, 0, 0, 10, 10, 10]),
)
def testRaggedSegment_Int(self, segment_op, combiner, segment_ids):
rt_as_list = [[0, 1, 2, 3], [4], [], [5, 6], [7], [8, 9]]
rt = ragged_factory_ops.constant(rt_as_list)
num_segments = max(segment_ids) + 1
expected = self.expected_value(rt_as_list, segment_ids, num_segments,
combiner)
segmented = segment_op(rt, segment_ids, num_segments)
self.assertAllEqual(segmented, expected)
@parameterized.parameters(
(ragged_math_ops.segment_sum, sum, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_sum, sum, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_sum, sum, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_sum, sum, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_prod, prod, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_prod, prod, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_prod, prod, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_prod, prod, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_min, min, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_min, min, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_min, min, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_min, min, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_max, max, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_max, max, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_max, max, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_max, max, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_mean, mean, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_mean, mean, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_mean, mean, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_mean, mean, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_sqrt_n, sqrt_n, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_sqrt_n, sqrt_n, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_sqrt_n, sqrt_n, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_sqrt_n, sqrt_n, [0, 0, 0, 10, 10, 10]),
)
def testRaggedSegment_Float(self, segment_op, combiner, segment_ids):
rt_as_list = [[0., 1., 2., 3.], [4.], [], [5., 6.], [7.], [8., 9.]]
rt = ragged_factory_ops.constant(rt_as_list)
num_segments = max(segment_ids) + 1
expected = self.expected_value(rt_as_list, segment_ids, num_segments,
combiner)
segmented = segment_op(rt, segment_ids, num_segments)
self.assertAllClose(segmented, expected)
def testRaggedRankTwo(self):
rt = ragged_factory_ops.constant([
[[111, 112, 113, 114], [121],], # row 0
[], # row 1
[[], [321, 322], [331]], # row 2
[[411, 412]] # row 3
]) # pyformat: disable
segment_ids1 = [0, 2, 2, 2]
segmented1 = ragged_math_ops.segment_sum(rt, segment_ids1, 3)
expected1 = [[[111, 112, 113, 114], [121]], # row 0
[], # row 1
[[411, 412], [321, 322], [331]] # row 2
] # pyformat: disable
self.assertAllEqual(segmented1, expected1)
segment_ids2 = [1, 2, 1, 1]
segmented2 = ragged_math_ops.segment_sum(rt, segment_ids2, 3)
expected2 = [[],
[[111+411, 112+412, 113, 114], [121+321, 322], [331]],
[]] # pyformat: disable
self.assertAllEqual(segmented2, expected2)
def testRaggedSegmentIds(self):
rt = ragged_factory_ops.constant([
[[111, 112, 113, 114], [121],], # row 0
[], # row 1
[[], [321, 322], [331]], # row 2
[[411, 412]] # row 3
]) # pyformat: disable
segment_ids = ragged_factory_ops.constant([[1, 2], [], [1, 1, 2], [2]])
segmented = ragged_math_ops.segment_sum(rt, segment_ids, 3)
expected = [[],
[111+321, 112+322, 113, 114],
[121+331+411, 412]] # pyformat: disable
self.assertAllEqual(segmented, expected)
def testShapeMismatchError1(self):
dt = constant_op.constant([1, 2, 3, 4, 5, 6])
segment_ids = ragged_factory_ops.constant([[1, 2], []])
self.assertRaisesRegexp(
ValueError, 'segment_ids.shape must be a prefix of data.shape, '
'but segment_ids is ragged and data is not.',
ragged_math_ops.segment_sum, dt, segment_ids, 3)
def testShapeMismatchError2(self):
rt = ragged_factory_ops.constant([
[[111, 112, 113, 114], [121]], # row 0
[], # row 1
[[], [321, 322], [331]], # row 2
[[411, 412]] # row 3
]) # pyformat: disable
segment_ids = ragged_factory_ops.constant([[1, 2], [1], [1, 1, 2], [2]])
# Error is raised at graph-building time if we can detect it then.
self.assertRaisesRegexp(
errors.InvalidArgumentError,
'segment_ids.shape must be a prefix of data.shape.*',
ragged_math_ops.segment_sum, rt, segment_ids, 3)
# Otherwise, error is raised when we run the graph.
segment_ids2 = ragged_tensor.RaggedTensor.from_row_splits(
array_ops.placeholder_with_default(segment_ids.values, None),
array_ops.placeholder_with_default(segment_ids.row_splits, None))
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
'segment_ids.shape must be a prefix of data.shape.*'):
self.evaluate(ragged_math_ops.segment_sum(rt, segment_ids2, 3))
if __name__ == '__main__':
googletest.main()
| [
"[email protected]"
]
| |
335a917f993a2444982d969d5168c22b7ae98d6d | 6d5fd2e7b9a66e17593a490a80e96e95d36436a3 | /src/profiles/signals.py | 81924a9d88eb4f0810a55822910c9384d971ed0a | []
| no_license | Husain-Jinia/Django-Report-Generator | f38276b47c263824b2f6794d793ff63872ba31fc | 277a06584e7d333d9380f213abc217e29ecafd17 | refs/heads/master | 2023-04-20T06:05:13.294836 | 2021-05-15T08:01:11 | 2021-05-15T08:01:11 | 365,412,503 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 376 | py | from .models import Profile
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
@receiver(post_save, sender=User)
def post_save_create_profile(sender, instance, created, **kwargs):
print(sender)
print(instance)
print(created)
if created:
Profile.objects.create(user=instance) | [
"[email protected]"
]
| |
66a4c5f13e1dc79c7ef110ee7f36ab90411658d1 | 3a6cf9261ca5e58468622f49cfa109d65f7b4eda | /src/python/spikes/stereo.py | 3bfe579ce6e4881dbb41e9f01fcc2026dd9dddab | []
| no_license | romilly/cluster-hat | a6784f85da5287466a73ef61a0111063bcd171b1 | a872da5bfa6ab2cb666095ab6845bafa5d4badca | refs/heads/master | 2021-05-09T13:30:34.743067 | 2018-01-30T15:11:35 | 2018-01-30T15:11:35 | 119,036,856 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 295 | py | import numpy as np
import cv2
from matplotlib import pyplot as plt
imgL = cv2.imread('images/p1-image1.jpg',0)
imgR = cv2.imread('images/p2-image1.jpg',0)
stereo = cv2.StereoBM_create(numDisparities=16, blockSize=15)
disparity = stereo.compute(imgL,imgR)
plt.imshow(disparity,'gray')
plt.show() | [
"[email protected]"
]
| |
95d13e0f751a416bc4b06580bcf2b908508684b6 | a1b8b807a389fd3971ac235e46032c0be4795ff1 | /Repo_Files/Zips/plugin.video.streamhub/resources/lib/sources/en/watchfree.py | 499eb10d07d5e83d78835d4d22adcf9be4794a51 | []
| no_license | sClarkeIsBack/StreamHub | 0cd5da4b3229592a4e2cf7ce3e857294c172aaba | 110983579645313b8b60eac08613435c033eb92d | refs/heads/master | 2020-05-23T09:09:54.898715 | 2020-02-29T12:15:32 | 2020-02-29T12:15:32 | 80,440,827 | 9 | 20 | null | 2017-10-04T07:32:52 | 2017-01-30T16:43:46 | Python | UTF-8 | Python | false | false | 8,483 | py | # -*- coding: utf-8 -*-
'''
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,base64
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import proxy
class source:
def __init__(self):
self.priority = 0
self.language = ['en']
self.domains = ['watchfree.to','watchfree.unblockall.org','www6-watchfree6-to.unblocked.lol']
self.base_link = 'http://watchfree.unblockall.org'
self.base_link = 'http://www6-watchfree6-to.unblocked.lol'
self.moviesearch_link = '/?keyword=%s&search_section=1'
self.tvsearch_link = '/?keyword=%s&search_section=2'
def movie(self, imdb, title, localtitle, aliases, year):
try:
query = self.moviesearch_link % urllib.quote_plus(cleantitle.query(title))
query = urlparse.urljoin(self.base_link, query)
result = str(proxy.request(query, 'free movies'))
if 'page=2' in result or 'page%3D2' in result: result += str(proxy.request(query + '&page=2', 'free movies'))
result = client.parseDOM(result, 'div', attrs = {'class': 'item'})
title = 'watch' + cleantitle.get(title)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title')) for i in result]
result = [(i[0][0], i[1][0]) for i in result if len(i[0]) > 0 and len(i[1]) > 0]
result = [i for i in result if any(x in i[1] for x in years)]
r = [(proxy.parse(i[0]), i[1]) for i in result]
match = [i[0] for i in r if title == cleantitle.get(i[1]) and '(%s)' % str(year) in i[1]]
match2 = [i[0] for i in r]
match2 = [x for y,x in enumerate(match2) if x not in match2[:y]]
if match2 == []: return
for i in match2[:5]:
try:
if len(match) > 0: url = match[0] ; break
r = proxy.request(urlparse.urljoin(self.base_link, i), 'free movies')
r = re.findall('(tt\d+)', r)
if imdb in r: url = i ; break
except:
pass
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
query = self.tvsearch_link % urllib.quote_plus(cleantitle.query(tvshowtitle))
query = urlparse.urljoin(self.base_link, query)
result = str(proxy.request(query, 'free movies'))
if 'page=2' in result or 'page%3D2' in result: result += str(proxy.request(query + '&page=2', 'free movies'))
result = client.parseDOM(result, 'div', attrs = {'class': 'item'})
tvshowtitle = 'watch' + cleantitle.get(tvshowtitle)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title')) for i in result]
result = [(i[0][0], i[1][0]) for i in result if len(i[0]) > 0 and len(i[1]) > 0]
result = [i for i in result if any(x in i[1] for x in years)]
r = [(proxy.parse(i[0]), i[1]) for i in result]
match = [i[0] for i in r if tvshowtitle == cleantitle.get(i[1]) and '(%s)' % str(year) in i[1]]
match2 = [i[0] for i in r]
match2 = [x for y,x in enumerate(match2) if x not in match2[:y]]
if match2 == []: return
for i in match2[:5]:
try:
if len(match) > 0: url = match[0] ; break
r = proxy.request(urlparse.urljoin(self.base_link, i), 'free movies')
r = re.findall('(tt\d+)', r)
if imdb in r: url = i ; break
except:
pass
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.urljoin(self.base_link, url)
result = proxy.request(url, 'tv_episode_item')
result = client.parseDOM(result, 'div', attrs = {'class': 'tv_episode_item'})
title = cleantitle.get(title)
premiered = re.compile('(\d{4})-(\d{2})-(\d{2})').findall(premiered)[0]
premiered = '%s %01d %s' % (premiered[1].replace('01','January').replace('02','February').replace('03','March').replace('04','April').replace('05','May').replace('06','June').replace('07','July').replace('08','August').replace('09','September').replace('10','October').replace('11','November').replace('12','December'), int(premiered[2]), premiered[0])
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'span', attrs = {'class': 'tv_episode_name'}), client.parseDOM(i, 'span', attrs = {'class': 'tv_num_versions'})) for i in result]
result = [(i[0], i[1][0], i[2]) for i in result if len(i[1]) > 0] + [(i[0], None, i[2]) for i in result if len(i[1]) == 0]
result = [(i[0], i[1], i[2][0]) for i in result if len(i[2]) > 0] + [(i[0], i[1], None) for i in result if len(i[2]) == 0]
result = [(i[0][0], i[1], i[2]) for i in result if len(i[0]) > 0]
url = [i for i in result if title == cleantitle.get(i[1]) and premiered == i[2]][:1]
if len(url) == 0: url = [i for i in result if premiered == i[2]]
if len(url) == 0 or len(url) > 1: url = [i for i in result if 'season-%01d-episode-%01d' % (int(season), int(episode)) in i[0]]
url = url[0][0]
url = proxy.parse(url)
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
url = urlparse.urljoin(self.base_link, url)
result = proxy.request(url, 'link_ite')
links = client.parseDOM(result, 'table', attrs = {'class': 'link_ite.+?'})
for i in links:
try:
url = client.parseDOM(i, 'a', ret='href')
url = [x for x in url if 'gtfo' in x][-1]
url = proxy.parse(url)
url = urlparse.parse_qs(urlparse.urlparse(url).query)['gtfo'][0]
url = base64.b64decode(url)
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
host = host.encode('utf-8')
quality = client.parseDOM(i, 'div', attrs = {'class': 'quality'})
if any(x in ['[CAM]', '[TS]'] for x in quality): quality = 'CAM'
else: quality = 'SD'
quality = quality.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'direct': False, 'debridonly': False})
except:
pass
return sources
except:
return sources
def resolve(self, url):
return url
| [
"[email protected]"
]
| |
a64456fc0046b8a42ad60ddaa19ba450e3c4bfac | 4a7804ee05485c345b4e3c39a0c96ed4012542ac | /editor/emacs/emacs-python/actions.py | b0523c81855ca5f5c430977b55c02381e42c60ee | []
| no_license | Erick-Pardus/Pardus | 1fef143c117c62a40e3779c3d09f5fd49b5a6f5c | 2693e89d53304a216a8822978e13f646dce9b1d3 | refs/heads/master | 2020-12-31T02:49:33.189799 | 2013-03-17T06:29:33 | 2013-03-17T06:29:33 | 17,247,989 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 363 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/copyleft/gpl.txt.
from pisi.actionsapi import pisitools
from pisi.actionsapi import get
WorkDir = "python-mode.el-6.0.11"
def install():
pisitools.insinto("/usr/share/emacs/site-lisp/python", "*.el", "python-mode.el")
| [
"[email protected]"
]
| |
0653972e0dd62e235f1b6c73af6da5b96e246c6f | 1a812d520fa0788864cab3c6bbd4e2ba0e8872c2 | /employeedataandprintthatdata.py | d97719e66d1ee36ecddc97ae0f16f35d728b4462 | []
| no_license | manutdmohit/pythonprogramexamples | b6f6906a6169ad2ecd9b16d95495474d570b065e | 06ac4af8ce13872bbe843175a61d7ad77e0f92b6 | refs/heads/main | 2023-01-14T13:14:57.468947 | 2020-11-25T05:39:01 | 2020-11-25T05:39:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | eno=int(input('Enter employee number:'))
ename=input('Enter employee name:')
esal=float(input('Enter employee salary:'))
eaddr=input('Enter employee address:')
married=bool(input('Employee married?[True/False]:'))
print('Please confirm your provided information')
print('Employee Number:',eno)
print('Employee Name:',ename)
print('Employee Salary:',esal)
print('Employee Address:',eaddr)
print('Employee Married?:',married)
| [
"[email protected]"
]
| |
961781e9a4421f843daec46bf7d27a5b190cffc6 | 989b3499948137f57f14be8b2c77d0610d5975e6 | /python-package/daily_study/python/question_python(resolved)/chapter4_conditional_and_loops(완결)/i_is_member.py | fb8ea88f0fd87a269fb0ec00839eb849b2386979 | []
| no_license | namkiseung/python_BasicProject | 76b4c070934ad4cb9d16ce844efa05f64fb09ac0 | 460d05248b2d1431624aba960e28bece888643e4 | refs/heads/master | 2022-12-13T21:12:06.865241 | 2020-04-23T01:30:08 | 2020-04-23T01:30:08 | 142,980,920 | 1 | 1 | null | 2022-12-08T02:27:40 | 2018-07-31T07:49:17 | Python | UTF-8 | Python | false | false | 550 | py | # -*- coding: utf-8 -*-
def is_member(member_list, mem):
""" 리스트 member_list 와, 어떤 데이터 mem을 전달받고, mem이 member_list에 포함되어 있는지를 True/False로 반환하는 함수를 작성하자
sample in/out:
is_member([1, 5, 8, 3], 3) -> True
is_member([5, 8, 3], -1) -> False
"""
# 여기 작성
return mem in member_list
if __name__ == "__main__":
print is_member([1, 5, 8, 3], 3)# -> True
print is_member([5, 8, 3], -1) #-> False
pass
| [
"[email protected]"
]
| |
4f2d7e9a93ccb1c73bfa12146ad9add11e573b27 | d07a26e443538c5fc6b0711aff6e233daef79611 | /LearnPythonGuessGame.py | e3a41526a4b12716d27871e2464f08f1855a7ba6 | []
| no_license | Zahidsqldba07/Python-learn | bd602d490ee53f8e5331e70f92919ca315944ff9 | ffc1608695ed6c7c3d2b6789913e34235dcf468e | refs/heads/master | 2023-03-16T02:18:19.155281 | 2020-09-19T09:12:48 | 2020-09-19T09:12:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 538 | py | secret_word = "respect"
guess = ''
guess_count = 0
guess_limit = 7
out_of_guesses = False
while guess != secret_word and not (out_of_guesses):
if guess_count < guess_limit:
guess = input("What's the secret word?: ")
guess_count += 1
if guess != secret_word:
print("Hint: " + secret_word[int(guess_count)-1])
else:
out_of_guesses = True
if out_of_guesses:
print("All out of guesses, better luck next time!")
exit()
else:
print("Nice work!")
exit() | [
"[email protected]"
]
| |
1817dddcfb6a350fe4323472755486725543c750 | d70db722710bccf7a834e8e4acdb376b151b20a1 | /apps/finances/models.py | 0f4b847dc96b1d4ee9872b62f624905c17cde98f | []
| no_license | intentaware/Vader | b0d433f640b244d592126b2713506d214dc1d287 | 54d5d799beab1fc5cef99fb90d4e50e00720bfe0 | refs/heads/master | 2021-01-20T07:07:11.393929 | 2017-12-06T19:16:53 | 2017-12-06T19:16:53 | 30,995,526 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,356 | py | import shortuuid
from django.db import models
from django.utils.text import slugify, Truncator
from django.contrib.postgres.fields import JSONField
from django_extensions.db.fields import ShortUUIDField
from apps.common.models import *
from apps.common.utils.money import convert_to_cents
from .mixins import Stripe, CURRENCY_CHOICES
class BasePaymentModel(Stripe, TimeStamped):
"""Basic Payment Model, inherits Stripe model, will be used for multiple
Attributes:
amount (Decimal): total amount charged to customer
attempted_on (Time): time on which the charge was attempted
attempts (Int): Number of times we tried to charge
charged_on (Time): If charge was succesful, populate the field with current time
gateway_response (Json): Response from the server
is_paid (Bool): if charge was succesful
service_charges (Decimal): Service charges if any, amount is inclusive of service_charges
taxes (Decimal): Taxes if any, Note: amount is inclusive of taxes
"""
amount = models.DecimalField(default=0.00, max_digits=20, decimal_places=4)
currency = models.CharField(
max_length=4,
choices=CURRENCY_CHOICES,
default='USD'
)
attempts = models.IntegerField(default=0)
#service charges
service_charges = models.DecimalField(
default=0.00,
max_digits=20,
decimal_places=4
)
taxes = models.DecimalField(default=0.0, max_digits=20, decimal_places=4)
#total_amount = models.DecimalField(default=0.00, max_digits=20, decimal_places=4)
# extra timestamps
attempted_on = models.DateTimeField(blank=True, null=True)
charged_on = models.DateTimeField(blank=True, null=True)
# json mapped response from stripe
gateway_response = JSONField(default={})
is_paid = models.BooleanField(default=False)
class Meta:
abstract = True
@property
def line_items_total(self):
return self.amount - self.service_charges - self.taxes
class Invoice(BasePaymentModel):
stripe_id = models.CharField(
max_length=256,
blank=True,
null=True,
help_text='id obtained from stripe'
)
company = models.ForeignKey('companies.Company', related_name='invoices')
class Module(TimeStamped):
[CORE, DMP, REPORTING] = range(3)
SEGMENT_CHOICES = [
(CORE, 'Core'),
(DMP, 'Data Management Platform'),
(REPORTING, 'Reporting'),
]
name = models.CharField(max_length=128, help_text='The name of the module')
segment = models.IntegerField(
choices=SEGMENT_CHOICES,
default=CORE,
help_text='The segment it is part of'
)
def __unicode__(self):
return self.name
class Plan(TimeStamped, Stripe):
[UNTIL_EXPIRY, DAY, WEEK, MONTH, YEAR] = range(5)
INTERVAL_CHOICES = [
(UNTIL_EXPIRY, 'untill expiry'),
(DAY, 'day'),
(WEEK, 'week'),
(MONTH, 'month'),
(YEAR, 'year'),
]
amount = models.DecimalField(default=0.00, max_digits=20, decimal_places=2)
currency = models.CharField(
max_length=4,
choices=CURRENCY_CHOICES,
default='USD'
)
name = models.CharField(max_length=128)
interval = models.IntegerField(
choices=INTERVAL_CHOICES,
default=UNTIL_EXPIRY
)
modules = models.ManyToManyField(Module, through='finances.PlanModule')
limit_campaigns = models.IntegerField(
default=0,
help_text='0 means unlimited'
)
limit_impressions = models.IntegerField(
default=0,
help_text='0 means unlimited'
)
stripe_id = ShortUUIDField(blank=True, null=True)
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
"""Override the default save to hook the plans with Stripe.
Args:
*args: arguments, normally plain arguments
**kwargs: Keyword arguments
Returns:
name (obj): Django Plan model object
"""
plan = None
sd = self.stripe_dictionary
if sd and self.stripe_id:
try:
plan = self.stripe_plan
if int(plan.amount) != convert_to_cents(
self.amount
) or self.currency.lower() != plan.currency:
print 'not equal, creating new account'
self.stripe_id = shortuuid.uuid()
self.id = None
self.create_stripe_plan()
except self._stripe.error.InvalidRequestError:
self.create_stripe_plan()
return super(Plan, self).save(*args, **kwargs)
class Meta:
ordering = ['amount']
def create_stripe_plan(self, *args, **kwargs):
return self._stripe.Plan.create(**self.stripe_dictionary)
@property
def stripe_plan(self):
return self._stripe.Plan.retrieve(self.stripe_id)
def features(self):
from itertools import groupby
modules = Module.objects.all().values('id', 'name', 'segment')
plan_modules = self.modules.all().values('id', 'name', 'segment')
for m in modules:
if m in plan_modules:
m['is_included'] = True
else:
m['is_included'] = False
doc = dict()
for k, v in groupby(modules, lambda x: x['segment']):
doc[Module.SEGMENT_CHOICES[k][1]] = list(v)
return doc
@property
def stripe_dictionary(self):
doc = None
if not self.interval == 0:
doc = {
'id': self.stripe_id,
'name': '{name} ({currency})'.format(
name=self.name,
currency=self.currency
),
'amount': convert_to_cents(self.amount),
'currency': self.currency,
'interval': self.INTERVAL_CHOICES[self.interval][1],
'statement_descriptor': Truncator(
'IA: {name}'.format(
name=self.name
)
).chars(22)
}
return doc
class PlanModule(TimeStamped):
plan = models.ForeignKey(Plan)
module = models.ForeignKey(Module)
class Meta:
unique_together = ['plan', 'module']
| [
"[email protected]"
]
| |
a7a66ee6bfc9b3d26e5dbb4a0a9df8f27b2a72e3 | 4c44c593048fa4e00fb0334209632a286886efd9 | /sale_business_unit/models/product_business_unit.py | df6f50b9832b5d5adf851f1930983b0a7f67bcba | []
| no_license | treytux/trey-addons | 0c3fec43c584d46bd299b4bca47dcc334bedca60 | 1cda42c0eae702684badce769f9ec053c59d6e42 | refs/heads/12.0 | 2023-06-08T21:56:09.945084 | 2023-05-29T10:05:53 | 2023-05-29T10:05:53 | 114,281,765 | 19 | 49 | null | 2023-05-29T10:05:55 | 2017-12-14T18:10:39 | Python | UTF-8 | Python | false | false | 8,333 | py | ###############################################################################
# For copyright and license notices, see __manifest__.py file in root directory
###############################################################################
from datetime import date
from odoo import _, fields, models
class ProductBusinessUnit(models.Model):
_inherit = 'product.business.unit'
quotation_count = fields.Integer(
compute='_compute_sales',
string='Quotations',
readonly=True,
)
quotation_order_count = fields.Integer(
compute='_compute_sales',
string='Quotation Orders',
readonly=True,
)
quotation_amount = fields.Float(
compute='_compute_sales',
string='Quotations Revenues',
readonly=True,
)
sale_count = fields.Integer(
compute='_compute_sales',
string='Sales',
readonly=True,
)
sale_order_count = fields.Integer(
compute='_compute_sales',
string='Sale Orders',
readonly=True,
)
sale_amount = fields.Float(
compute='_compute_sales',
string='Sales Revenues',
readonly=True,
)
invoice_count = fields.Integer(
compute='_compute_invoices',
string='Sales',
readonly=True,
)
invoice_order_count = fields.Integer(
compute='_compute_invoices',
string='Sale Orders',
readonly=True,
)
invoice_amount = fields.Float(
compute='_compute_invoices',
string='Sales Revenues',
readonly=True,
)
dashboard_graph_model = fields.Selection(
selection_add=[
('sale.report', 'Sales'),
('account.invoice.report', 'Invoices'),
],
)
invoiced = fields.Integer(
compute='_compute_invoices',
string='Invoiced This Month',
readonly=True,
help=(
'Invoice revenue for the current month. This is the amount the '
'sales unit has invoiced this month. It is used to compute the '
'progression ratio of the current and target revenue on the '
'kanban view.'
),
)
invoiced_target = fields.Integer(
string='Invoicing Target',
help=(
'Target of invoice revenue for the current month. This is the '
'amount the sales unit estimates to be able to invoice this '
'month.'
),
)
def _compute_sales(self):
for unit in self:
lines = self.env['sale.order.line'].search([
('product_id', '!=', False),
('product_id.unit_id', '=', unit.id)])
quotation_lines = lines.filtered(
lambda l: l.order_id.state in ['draft', 'sent'])
sale_lines = lines.filtered(
lambda l: l.order_id.state in ['sale', 'done'])
unit.quotation_count = len(quotation_lines)
unit.quotation_order_count = len(
quotation_lines.mapped('order_id'))
unit.quotation_amount = sum(
quotation_lines.mapped('price_subtotal'))
unit.sale_count = len(sale_lines)
unit.sale_order_count = len(
sale_lines.mapped('order_id'))
unit.sale_amount = sum(sale_lines.mapped('price_subtotal'))
def _compute_invoices(self):
for unit in self:
lines = self.env['account.invoice.line'].search([
('invoice_id.state', 'not in', ['cancel', 'draft']),
('product_id', '!=', False),
('product_id.unit_id', '=', unit.id)])
unit.invoice_count = len(lines)
unit.invoice_amount = sum(lines.mapped('price_subtotal'))
invoices = lines.mapped('invoice_id')
unit.invoice_order_count = len(invoices)
month_invoices = invoices.filtered(
lambda i:
i.date <= date.today()
and i.date >= date.today().replace(day=1)
)
unit.invoiced = sum(month_invoices.mapped('amount_untaxed_signed'))
def update_invoiced_target(self, value):
return self.write({'invoiced_target': round(float(value or 0))})
def action_view_quotation_lines(self):
self.ensure_one()
lines = self.env['sale.order.line'].search([
('order_id.state', 'in', ['draft', 'sent']),
('product_id', '!=', False),
('product_id.unit_id', '=', self.id)])
action = self.env.ref(
'sale_business_unit.sale_order_line_quotation_action').read()[0]
action['domain'] = [('id', 'in', lines.ids)]
return action
def action_view_sale_lines(self):
self.ensure_one()
lines = self.env['sale.order.line'].search([
('order_id.state', 'in', ['sale', 'done']),
('product_id', '!=', False),
('product_id.unit_id', '=', self.id)])
action = self.env.ref(
'sale_business_unit.sale_order_line_sale_action').read()[0]
action['domain'] = [('id', 'in', lines.ids)]
return action
def action_view_invoice_lines(self):
self.ensure_one()
lines = self.env['account.invoice.line'].search([
('invoice_id.state', 'not in', ['cancel', 'draft']),
('product_id', '!=', False),
('product_id.unit_id', '=', self.id)])
action = self.env.ref(
'sale_business_unit.account_invoice_line_action').read()[0]
action['domain'] = [('id', 'in', lines.ids)]
return action
def action_view_quotation(self):
self.ensure_one()
lines = self.env['sale.order.line'].search([
('order_id.state', 'in', ['draft', 'sent']),
('product_id', '!=', False),
('product_id.unit_id', '=', self.id)])
action = self.env.ref('sale.action_quotations').read()[0]
action.update({
'domain': [('id', 'in', lines.mapped('order_id').ids)],
'context': {},
})
return action
def action_view_sale(self):
self.ensure_one()
lines = self.env['sale.order.line'].search([
('product_id', '!=', False),
('product_id.unit_id', '=', self.id)])
sale_lines = lines.filtered(
lambda l: l.order_id.state in ['sale', 'done'])
action = self.env.ref('sale.action_orders').read()[0]
action.update({
'domain': [('id', 'in', sale_lines.mapped('order_id').ids)],
'context': {},
})
return action
def action_view_invoice(self):
self.ensure_one()
lines = self.env['account.invoice.line'].search([
('product_id', '!=', False),
('product_id.unit_id', '=', self.id)])
invoice_lines = lines.filtered(
lambda l: l.invoice_id.state not in ['cancel', 'draft'])
action = self.env.ref('account.action_invoice_tree1').read()[0]
action.update({
'domain': [('id', 'in', invoice_lines.mapped('invoice_id').ids)],
'context': {},
})
return action
def _graph_date_column(self):
if self.dashboard_graph_model == 'sale.report':
return 'confirmation_date'
elif self.dashboard_graph_model == 'account.invoice.report':
return 'date'
return super()._graph_date_column()
def _graph_y_query(self):
if self.dashboard_graph_model == 'sale.report':
return 'SUM(price_subtotal)'
elif self.dashboard_graph_model == 'account.invoice.report':
return 'SUM(price_total)'
return super()._graph_y_query()
def _extra_sql_conditions(self):
if self.dashboard_graph_model == 'sale.report':
return "AND state in ('sale', 'done')"
elif self.dashboard_graph_model == 'account.invoice.report':
return "AND state in ('open', 'in_payment', 'paid')"
return super()._extra_sql_conditions()
def _graph_title_and_key(self):
if self.dashboard_graph_model == 'sale.report':
return ['', _('Sales: Untaxed Total')]
elif self.dashboard_graph_model == 'account.invoice.report':
return ['', _('Invoices: Untaxed Total')]
return super()._graph_title_and_key()
| [
"[email protected]"
]
| |
cba4f7f7a86bbd2c06e4b92208fe3e95d44f31ac | 99b84337ae66ad2877544fd158f20e7f4cd96520 | /day01-10/day04/晚间作业/4_求分数和.py | 88cd4c266f76603fc47dbe64b8a9ae5e47a016b0 | []
| no_license | jiajiabin/python_study | cf145d54cabce2cb98914b3448ed7d0e5c1c146c | b4faaff26ee9728af2e80942ba6a7c7f6a8b0f86 | refs/heads/master | 2020-06-21T21:31:26.034978 | 2019-08-26T11:39:34 | 2019-08-26T11:39:34 | 197,556,254 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 93 | py | #求1/1 + 1/2 + 1/3 + 1/10 的值
a = 10
b = 0
for i in range(1,a+1):
b += 1 / i
print(b) | [
"[email protected]"
]
| |
19261cb62700033a9cef08d8687bae4821b6f92d | 21569b68b510b55bdc2acb1ff5ae521b31d44a79 | /bin/pyrsa-encrypt-bigfile | 9afaf7317207ef369910d93588778e7aefc825d6 | []
| no_license | howarder3/Rpi3_study | a99faef434ae4f751d4d9f339aca918186f7cb3e | 533ba60ae4d11b5e3cebc12283e067ccee5a5cfd | refs/heads/master | 2020-03-18T18:11:01.030936 | 2018-05-27T20:46:40 | 2018-05-27T20:46:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 242 | #!/home/pi/myenv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from rsa.cli import encrypt_bigfile
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(encrypt_bigfile())
| [
"[email protected]"
]
| ||
667907153fb3690183536d53d10538fd0e5ee2f8 | bfc25f1ad7bfe061b57cfab82aba9d0af1453491 | /data/external/repositories_2to3/197978/Grasp-and-lift-EEG-challenge-master/genInfos.py | 3fe287f7ae615d7d863ba13934411a5cad7ad2b9 | [
"MIT"
]
| permissive | Keesiu/meta-kaggle | 77d134620ebce530d183467202cf45639d9c6ff2 | 87de739aba2399fd31072ee81b391f9b7a63f540 | refs/heads/master | 2020-03-28T00:23:10.584151 | 2018-12-20T19:09:50 | 2018-12-20T19:09:50 | 147,406,338 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,710 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Aug 13 21:35:28 2015.
@author: fornax
"""
import numpy as np
import pandas as pd
from glob import glob
from mne import concatenate_raws
from preprocessing.aux import creat_mne_raw_object
# #### define lists #####
subjects = list(range(1, 13))
lbls_tot = []
subjects_val_tot = []
series_val_tot = []
ids_tot = []
subjects_test_tot = []
series_test_tot = []
# #### generate predictions #####
for subject in subjects:
print('Loading data for subject %d...' % subject)
# ############### READ DATA ###############################################
fnames = glob('data/train/subj%d_series*_data.csv' % (subject))
fnames.sort()
fnames_val = fnames[-2:]
fnames_test = glob('data/test/subj%d_series*_data.csv' % (subject))
fnames_test.sort()
raw_val = concatenate_raws([creat_mne_raw_object(fname, read_events=True)
for fname in fnames_val])
raw_test = concatenate_raws([creat_mne_raw_object(fname, read_events=False)
for fname in fnames_test])
# extract labels for series 7&8
labels = raw_val._data[32:]
lbls_tot.append(labels.transpose())
# aggregate infos for validation (series 7&8)
raw_series7 = creat_mne_raw_object(fnames_val[0])
raw_series8 = creat_mne_raw_object(fnames_val[1])
series = np.array([7] * raw_series7.n_times +
[8] * raw_series8.n_times)
series_val_tot.append(series)
subjs = np.array([subject]*labels.shape[1])
subjects_val_tot.append(subjs)
# aggregate infos for test (series 9&10)
ids = np.concatenate([np.array(pd.read_csv(fname)['id'])
for fname in fnames_test])
ids_tot.append(ids)
raw_series9 = creat_mne_raw_object(fnames_test[1], read_events=False)
raw_series10 = creat_mne_raw_object(fnames_test[0], read_events=False)
series = np.array([10] * raw_series10.n_times +
[9] * raw_series9.n_times)
series_test_tot.append(series)
subjs = np.array([subject]*raw_test.n_times)
subjects_test_tot.append(subjs)
# save validation infos
subjects_val_tot = np.concatenate(subjects_val_tot)
series_val_tot = np.concatenate(series_val_tot)
lbls_tot = np.concatenate(lbls_tot)
toSave = np.c_[lbls_tot, subjects_val_tot, series_val_tot]
np.save('infos_val.npy', toSave)
# save test infos
subjects_test_tot = np.concatenate(subjects_test_tot)
series_test_tot = np.concatenate(series_test_tot)
ids_tot = np.concatenate(ids_tot)
toSave = np.c_[ids_tot, subjects_test_tot, series_test_tot]
np.save('infos_test.npy', toSave)
| [
"[email protected]"
]
| |
7d61b22340803854812ce2fb50445f429aebeeb0 | df44affab179c2546fb3e0d1dc29eebcfdf51c1c | /toughradius/common/smsapi.py | 06534cfab472a0cec1e3f4ef54e6c8980f14269e | []
| no_license | sailorhdx/taurusradius | 121c508e7faffaddcd5326d2b6d3710eaf0ed08e | 92d30820611a0c9102ae41713ea3c35437a3c6ee | refs/heads/master | 2021-01-22T02:28:31.543338 | 2017-06-17T02:15:33 | 2017-06-17T02:15:33 | 92,362,551 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,021 | py | #!/usr/bin/env python
# coding=utf-8
import time
import json
import base64
from urllib import urlencode
from toughradius.toughlib import apiutils
from toughradius.toughlib import logger
from toughradius.toughlib import utils
from toughradius.toughlib.smsutils import smscn
from toughradius.toughlib.smsutils import qcloud
from toughradius.toughlib.smsutils import sendcloud
from toughradius.toughlib.smsutils import toughcloud
from toughradius.toughlib.btforms import rules
from cyclone import httpclient
from twisted.internet import defer
class SmsApi(object):
def __init__(self):
self.gateways = ['toughcloud',
'smscn',
'qcloud',
'sendcloud']
self.smscalls = {}
def get_instance(self, gateway, apikey, apisecret):
if gateway in self.smscalls:
return self.smscalls[gateway]
if gateway == 'smscn':
self.smscalls[gateway] = smscn.SmsApi(apikey, apisecret)
elif gateway == 'qcloud':
self.smscalls[gateway] = qcloud.SmsApi(apikey, apisecret)
elif gateway == 'sendcloud':
self.smscalls[gateway] = sendcloud.SmsApi(apikey, apisecret)
elif gateway == 'toughcloud':
self.smscalls[gateway] = toughcloud.SmsApi(apikey, apisecret)
return self.smscalls.get(gateway)
@defer.inlineCallbacks
def send_sms(self, gateway, apikey, apisecret, sendphone, tplid, args = [], kwargs = {}):
if gateway not in self.gateways:
raise ValueError(u'gateway [%s] not support' % gateway)
if not rules.is_mobile.valid(sendphone):
raise ValueError(u'sendsms: %s mobile format error' % sendphone)
try:
api = self.get_instance(gateway, apikey, apisecret)
resp = yield api.send_sms(sendphone, tplid, args=args, kwargs=kwargs)
defer.returnValue(resp)
except Exception as err:
logger.exception(err)
defer.returnValue(False)
_smsapi = SmsApi()
send_sms = _smsapi.send_sms | [
"[email protected]"
]
| |
b8a0233512848689eab5dea8d359062c641e2a1d | 6a2bda031f53b057e7aac3aeebd070151f5923f1 | /zmqpy/zmqpy.py | d012bf64ed8ad12196b425161f1ed1cec45fec26 | [
"BSD-2-Clause"
]
| permissive | pfw/zmqpy | ab34b9f9f7e662e5d056a5a35078c27f4c9b5d9b | 185758349176709da43327e1f9b7c7c04d4ca850 | refs/heads/master | 2020-12-24T20:14:41.117019 | 2012-12-10T18:43:17 | 2012-12-10T18:43:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,024 | py | # coding: utf-8
from ._cffi import C, ffi, zmq_version, new_uint64_pointer, \
new_int64_pointer, \
new_int_pointer, \
new_binary_data, \
value_uint64_pointer, \
value_int64_pointer, \
value_int_pointer, \
value_binary_data
from .constants import *
from .error import *
from .utils import jsonapi
class Context(object):
_state = {}
def __init__(self, iothreads=1):
if not iothreads > 0:
raise ZMQError(EINVAL)
self.__dict__ = self._state
self.zmq_ctx = C.zmq_init(iothreads)
self.iothreads = iothreads
self._closed = False
self.n_sockets = 0
self.max_sockets = 32
self._sockets = {}
self.sockopts = {LINGER: 1}
self.linger = 1
def term(self):
if self.closed:
return
for k, s in self._sockets.items():
if not s.closed:
s.close()
del self._sockets[k]
C.zmq_term(self.zmq_ctx)
self.zmq_ctx = None
self._closed = True
self.n_sockets = 0
@property
def closed(self):
return self._closed
def _add_socket(self, socket):
self._sockets[self.n_sockets] = socket
self.n_sockets += 1
return self.n_sockets
def _rm_socket(self, n):
del self._sockets[n]
def socket(self, sock_type):
if self._closed:
raise ZMQError(ENOTSUP)
socket = Socket(self, sock_type)
for option, option_value in self.sockopts.items():
socket.setsockopt(option, option_value)
return socket
def set_linger(self, value):
self.sockopts[LINGER] = value
self.linger = value
def new_pointer_from_opt(option, length=0):
if option in uint64_opts:
return new_uint64_pointer()
elif option in int64_opts:
return new_int64_pointer()
elif option in int_opts:
return new_int_pointer()
elif option in binary_opts:
return new_binary_data(length)
else:
raise ValueError('Invalid option')
def value_from_opt_pointer(option, opt_pointer, length=0):
if option in uint64_opts:
return int(opt_pointer[0])
elif option in int64_opts:
return int(opt_pointer[0])
elif option in int_opts:
return int(opt_pointer[0])
elif option in binary_opts:
return ffi.string(opt_pointer)
else:
raise ValueError('Invalid option')
def initialize_opt_pointer(option, value, length=0):
if option in uint64_opts:
return value_uint64_pointer(value)
elif option in int64_opts:
return value_int64_pointer(value)
elif option in int_opts:
return value_int_pointer(value)
elif option in binary_opts:
return value_binary_data(value, length)
else:
raise ValueError('Invalid option')
class Socket(object):
def __init__(self, context, sock_type):
self.context = context
self.sock_type = sock_type
self.zmq_socket = C.zmq_socket(context.zmq_ctx, sock_type)
if not self.zmq_socket:
raise ZMQError()
self._closed = False
self._attrs = {}
self.n = self.context._add_socket(self)
self.last_errno = None
@property
def closed(self):
return self._closed
def close(self):
if not self._closed:
C.zmq_close(self.zmq_socket)
self._closed = True
def bind(self, address):
ret = C.zmq_bind(self.zmq_socket, address)
return ret
def connect(self, address):
ret = C.zmq_connect(self.zmq_socket, address)
return ret
def setsockopt(self, option, value):
length = None
if isinstance(value, str):
length = len(value)
low_level_data = initialize_opt_pointer(option, value, length)
low_level_value_pointer = low_level_data[0]
low_level_sizet = low_level_data[1]
ret = C.zmq_setsockopt(self.zmq_socket,
option,
ffi.cast('void*', low_level_value_pointer),
low_level_sizet)
return ret
def getsockopt(self, option, length=0):
low_level_data = new_pointer_from_opt(option, length=length)
low_level_value_pointer = low_level_data[0]
low_level_sizet_pointer = low_level_data[1]
ret = C.zmq_getsockopt(self.zmq_socket,
option,
low_level_value_pointer,
low_level_sizet_pointer)
if ret < 0:
self.last_errno = C.zmq_errno()
return -1
return value_from_opt_pointer(option, low_level_value_pointer)
def send(self, message, flags=0, copy=False):
zmq_msg = ffi.new('zmq_msg_t*')
c_message = ffi.new('char[]', message)
C.zmq_msg_init_size(zmq_msg, len(message))
C.memcpy(C.zmq_msg_data(zmq_msg), c_message, len(message))
if zmq_version == 2:
ret = C.zmq_send(self.zmq_socket, zmq_msg, flags)
else:
ret = C.zmq_sendmsg(self. zmq_socket, zmq_msg, flags)
C.zmq_msg_close(zmq_msg)
if ret < 0:
self.last_errno = C.zmq_errno()
return ret
def recv(self, flags=0):
zmq_msg = ffi.new('zmq_msg_t*')
C.zmq_msg_init(zmq_msg)
if zmq_version == 2:
ret = C.zmq_recv(self.zmq_socket, zmq_msg, flags)
else:
ret = C.zmq_recvmsg(self.zmq_socket, zmq_msg, flags)
if ret < 0:
C.zmq_msg_close(zmq_msg)
raise zmqpy.ZMQError(_errno=C.zmq_errno())
value = ffi.buffer(C.zmq_msg_data(zmq_msg), int(C.zmq_msg_size(zmq_msg)))[:]
C.zmq_msg_close(zmq_msg)
return value
def make_zmq_pollitem(socket, flags):
zmq_socket = socket.zmq_socket
zmq_pollitem = ffi.new('zmq_pollitem_t*')
zmq_pollitem.socket = zmq_socket
zmq_pollitem.fd = 0
zmq_pollitem.events = flags
zmq_pollitem.revents = 0
return zmq_pollitem[0]
def _poll(zmq_pollitem_list, poller, timeout=-1):
if zmq_version == 2:
timeout = timeout * 1000
items = ffi.new('zmq_pollitem_t[]', zmq_pollitem_list)
list_length = ffi.cast('int', len(zmq_pollitem_list))
c_timeout = ffi.cast('long', timeout)
C.zmq_poll(items, list_length, c_timeout)
result = []
for index in range(len(items)):
if items[index].revents > 0:
result.append((poller._sockets[items[index].socket],
items[index].revents))
return result
# Code From PyZMQ
class Poller(object):
def __init__(self):
self.sockets = {}
self._sockets = {}
self.c_sockets = {}
def register(self, socket, flags=POLLIN|POLLOUT):
if flags:
self.sockets[socket] = flags
self._sockets[socket.zmq_socket] = socket
self.c_sockets[socket] = make_zmq_pollitem(socket, flags)
elif socket in self.sockets:
# uregister sockets registered with no events
self.unregister(socket)
else:
# ignore new sockets with no events
pass
def modify(self, socket, flags=POLLIN|POLLOUT):
self.register(socket, flags)
def unregister(self, socket):
del self.sockets[socket]
del self._sockets[socket.zmq_socket]
del self.c_sockets[socket]
def poll(self, timeout=None):
if timeout is None:
timeout = -1
timeout = int(timeout)
if timeout < 0:
timeout = -1
items = _poll(self.c_sockets.values(),
self,
timeout=timeout)
return items
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.