id
stringlengths 1
265
| text
stringlengths 6
5.19M
| dataset_id
stringclasses 7
values |
---|---|---|
1693888 | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from paddlerec.core.metrics import PrecisionRecall
import paddle
import paddle.fluid as fluid
def calc_precision(tp_count, fp_count):
if tp_count > 0.0 or fp_count > 0.0:
return tp_count / (tp_count + fp_count)
return 1.0
def calc_recall(tp_count, fn_count):
if tp_count > 0.0 or fn_count > 0.0:
return tp_count / (tp_count + fn_count)
return 1.0
def calc_f1_score(precision, recall):
if precision > 0.0 or recall > 0.0:
return 2 * precision * recall / (precision + recall)
return 0.0
def get_states(idxs, labels, cls_num, weights=None, batch_nums=1):
ins_num = idxs.shape[0]
# TP FP TN FN
states = np.zeros((cls_num, 4)).astype('float32')
for i in range(ins_num):
w = weights[i] if weights is not None else 1.0
idx = idxs[i][0]
label = labels[i][0]
if idx == label:
states[idx][0] += w
for j in range(cls_num):
states[j][2] += w
states[idx][2] -= w
else:
states[label][3] += w
states[idx][1] += w
for j in range(cls_num):
states[j][2] += w
states[label][2] -= w
states[idx][2] -= w
return states
def compute_metrics(states, cls_num):
total_tp_count = 0.0
total_fp_count = 0.0
total_fn_count = 0.0
macro_avg_precision = 0.0
macro_avg_recall = 0.0
for i in range(cls_num):
total_tp_count += states[i][0]
total_fp_count += states[i][1]
total_fn_count += states[i][3]
macro_avg_precision += calc_precision(states[i][0], states[i][1])
macro_avg_recall += calc_recall(states[i][0], states[i][3])
metrics = []
macro_avg_precision /= cls_num
macro_avg_recall /= cls_num
metrics.append(macro_avg_precision)
metrics.append(macro_avg_recall)
metrics.append(calc_f1_score(macro_avg_precision, macro_avg_recall))
micro_avg_precision = calc_precision(total_tp_count, total_fp_count)
metrics.append(micro_avg_precision)
micro_avg_recall = calc_recall(total_tp_count, total_fn_count)
metrics.append(micro_avg_recall)
metrics.append(calc_f1_score(micro_avg_precision, micro_avg_recall))
return np.array(metrics).astype('float32')
class TestPrecisionRecall(unittest.TestCase):
def setUp(self):
self.ins_num = 64
self.cls_num = 10
self.batch_nums = 3
self.datas = []
self.states = np.zeros((self.cls_num, 4)).astype('float32')
for i in range(self.batch_nums):
probs = np.random.uniform(0, 1.0, (self.ins_num,
self.cls_num)).astype('float32')
idxs = np.array(np.argmax(
probs, axis=1)).reshape(self.ins_num, 1).astype('int32')
labels = np.random.choice(range(self.cls_num),
self.ins_num).reshape(
(self.ins_num, 1)).astype('int32')
self.datas.append((probs, labels))
states = get_states(idxs, labels, self.cls_num)
self.states = np.add(self.states, states)
self.metrics = compute_metrics(self.states, self.cls_num)
self.place = fluid.core.CPUPlace()
def build_network(self):
predict = fluid.data(
name="predict",
shape=[-1, self.cls_num],
dtype='float32',
lod_level=0)
label = fluid.data(
name="label", shape=[-1, 1], dtype='int32', lod_level=0)
precision_recall = PrecisionRecall(
input=predict, label=label, class_num=self.cls_num)
return precision_recall
def test_forward(self):
precision_recall = self.build_network()
metrics = precision_recall.get_result()
fetch_vars = []
metric_keys = []
for item in metrics.items():
fetch_vars.append(item[1])
metric_keys.append(item[0])
exe = fluid.Executor(self.place)
exe.run(fluid.default_startup_program())
for i in range(self.batch_nums):
outs = exe.run(
fluid.default_main_program(),
feed={'predict': self.datas[i][0],
'label': self.datas[i][1]},
fetch_list=fetch_vars,
return_numpy=True)
outs = dict(zip(metric_keys, outs))
self.assertTrue(np.allclose(outs['[TP FP TN FN]'], self.states))
self.assertTrue(np.allclose(outs['precision_recall_f1'], self.metrics))
def test_exception(self):
self.assertRaises(Exception, PrecisionRecall)
self.assertRaises(
Exception,
PrecisionRecall,
input=self.datas[0][0],
label=self.datas[0][1],
class_num=self.cls_num)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
3308765 | <reponame>modera-manyrepo-packages/mcloud
import json
from twisted.internet import reactor, defer
import txredisapi as redis
from twisted.python import log
from mcloud.util import txtimeout
class EventBus(object):
redis = None
protocol = None
def __init__(self, redis_connection):
super(EventBus, self).__init__()
self.redis = redis_connection
def fire_event(self, event_name, data=None, *args, **kwargs):
if not data:
if kwargs:
data = kwargs
elif args:
data = args
if not isinstance(data, basestring):
data = 'j:' + json.dumps(data)
else:
data = 'b:' + str(data)
return self.redis.publish(event_name, data)
def connect(self, host="127.0.0.1", port=6379):
log.msg('Event bus connected')
d = defer.Deferred()
reactor.connectTCP(host, port, EventBusFactory(d, self))
return d
def on(self, pattern, callback):
if not self.protocol:
raise Exception('Event bus is not connected yet!')
self.protocol.on(pattern, callback)
log.msg('Registered %s for channel: %s' % (callback, pattern))
def cancel(self, pattern, callback):
if not self.protocol:
raise Exception('Event bus is not connected yet!')
self.protocol.cancel(pattern, callback)
log.msg('unRegistered %s for channel: %s' % (callback, pattern))
def once(self, pattern, callback):
if not self.protocol:
raise Exception('Event bus is not connected yet!')
def _once_and_remove(*args, **kwargs):
self.protocol.cancel(pattern, _once_and_remove)
callback(*args, **kwargs)
self.protocol.on(pattern, _once_and_remove)
log.msg('Registered %s for single invocation on channel: %s' % (callback, pattern))
def wait_for_event(self, pattern, timeout=False):
d = defer.Deferred()
def _on_message(channel, message):
if not d.called:
d.callback(message)
self.on(pattern, _on_message)
if not timeout == 0:
return txtimeout(d, timeout, lambda: d.callback(None))
else:
return d
class EventBusProtocol(redis.SubscriberProtocol):
callbacks = {}
def on(self, pattern, callback):
if not pattern in self.callbacks:
self.callbacks[pattern] = []
if '*' in pattern:
self.psubscribe(pattern)
else:
self.subscribe(pattern)
self.callbacks[pattern].append(callback)
def cancel(self, pattern, callback):
if pattern in self.callbacks:
self.callbacks[pattern].remove(callback)
def connectionMade(self):
self.factory.eb.protocol = self
if self.factory.on_connect:
self.factory.on_connect.callback(self)
self.factory.on_connect = None
#
#print "waiting for messages..."
#print "use the redis client to send messages:"
#print "$ redis-cli publish zz test"
#print "$ redis-cli publish foo.bar hello world"
#self.subscribe("zz")
#self.psubscribe("foo.*")
#reactor.callLater(10, self.unsubscribe, "zz")
#reactor.callLater(15, self.punsubscribe, "foo.*")
# self.continueTrying = False
# self.transport.loseConnection()
def messageReceived(self, pattern, channel, message):
if message.startswith('j:'):
message = json.loads(message[2:])
else:
message = message[2:]
callbacks = []
if pattern and pattern in self.callbacks:
callbacks = self.callbacks[pattern]
elif channel and channel in self.callbacks:
callbacks = self.callbacks[channel]
for clb in callbacks:
clb(channel, message)
def connectionLost(self, reason):
log.msg("Connection lost: %s" % reason)
class EventBusFactory(redis.SubscriberFactory):
maxDelay = 120
continueTrying = True
protocol = EventBusProtocol
def __init__(self, on_connect, eb):
redis.SubscriberFactory.__init__(self)
self.on_connect = on_connect
self.eb = eb
| StarcoderdataPython |
88235 | import unittest
import uuid
import requests
SERVICE_URL = 'http://localhost:8080'
class MyTestCase(unittest.TestCase):
def test_status_returns_ok(self):
r = requests.get(f'{SERVICE_URL}/status')
self.assertEqual('OK', r.json()['status'])
def test_version_returns_some_version(self):
r = requests.get(f'{SERVICE_URL}/version')
self.assertEqual(200, r.status_code)
json = r.json()
self.assertIsNotNone(json['version'])
def test_get_without_bearer_returns_401(self):
r = requests.get(f'{SERVICE_URL}/user')
self.assertEqual(401, r.status_code)
def test_login_without_correct_user_returns_401(self):
auth = {'email': 'none', 'password': '<PASSWORD>'}
r = requests.post(f'{SERVICE_URL}/user/login', json=auth)
self.assertEqual(401, r.status_code)
def test_registration_unique_user(self):
user = {'first_name': 'John', 'last_name': 'Snow', 'email': f'<EMAIL>-{str(uuid.uuid4())}<EMAIL>',
'password': '<PASSWORD>'}
r = requests.put(f'{SERVICE_URL}/user/register', json=user)
self.assertEqual(200, r.status_code)
j = r.json()
self.assertIsNotNone(j['auth_token'])
self.assertEqual('Successfully registered.', j['message'])
self.assertEqual('success', j['status'])
def test_registration_duplicated_user(self):
user = {'first_name': 'John', 'last_name': 'Snow', 'email': f'<EMAIL>-{str(uuid.uuid4())}<EMAIL>',
'password': '<PASSWORD>'}
r = requests.put(f'{SERVICE_URL}/user/register', json=user)
self.assertEqual(200, r.status_code)
r = requests.put(f'{SERVICE_URL}/user/register', json=user)
self.assertEqual(409, r.status_code)
def test_login_self(self):
user = {'first_name': 'John', 'last_name': 'Snow', 'email': f'<EMAIL>(uuid.<EMAIL>',
'password': '<PASSWORD>'}
r = requests.put(f'{SERVICE_URL}/user/register', json=user)
self.assertEqual(200, r.status_code)
auth = {'email': user['email'], 'password': user['password']}
r = requests.post(f'{SERVICE_URL}/user/login', json=auth)
self.assertEqual(200, r.status_code)
self.assertIsNotNone(r.json()['auth_token'])
def test_get_self(self):
user = {'first_name': 'John', 'last_name': 'Snow', 'email': f'<EMAIL>(<EMAIL>',
'password': '<PASSWORD>'}
r = requests.put(f'{SERVICE_URL}/user/register', json=user)
self.assertEqual(200, r.status_code)
token = r.json()['auth_token']
r = requests.get(f'{SERVICE_URL}/user', headers={"Authorization": f"Bearer {token}"})
self.assertEqual(200, r.status_code)
j = r.json()
self.assertEqual(user['email'], j['data']['email'])
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1685836 | # -*- coding: utf-8 -*-
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the auto_forensicate script."""
from __future__ import unicode_literals
import argparse
import logging
import os
import sys
import tempfile
import unittest
from six import StringIO
import mock
from auto_forensicate import auto_acquire
from auto_forensicate import errors
from auto_forensicate import uploader
from auto_forensicate.recipes import base
DEFAULT_ARTIFACT_CONTENT = os.urandom(1000)
# pylint: disable=missing-docstring
# pylint: disable=protected-access
class BytesIORecipe(base.BaseRecipe):
"""A Recipe returning 1 artifact with a BytesIO."""
def __init__(self, name, options=None):
super(BytesIORecipe, self).__init__(name, options=options)
self.ran_collection = False
def GetArtifacts(self):
return [base.StringArtifact('fake/path', DEFAULT_ARTIFACT_CONTENT)]
class FailingRecipe(base.BaseRecipe):
"""A Recipe raising an IOError when running GetArtifact."""
def GetArtifacts(self):
raise errors.RecipeException('Everything is terrible.')
class FileCopyUploader(object):
"""Test implementation of an Uploader object that copies content to a file."""
def __init__(self, destination_file):
self._origin_dir = os.getcwd()
self.destination_file = destination_file
def UploadArtifact(self, artifact, update_callback=None):
data = artifact._GetStream().read()
self.destination_file.write(data)
if update_callback:
update_callback(len(data), len(data))
class BarTest(unittest.TestCase):
"""Tests for the progress bar classes."""
def testHumanReadableSpeed(self):
"""Tests _HumanReadableSpeed."""
progressbar = auto_acquire.BaBar()
self.assertEqual(progressbar._HumanReadableSpeed(0.0), '0.0 B/s')
expected = [
'1.2 B/s', '12.3 B/s', '123.0 B/s',
'1.2 KB/s', '12.3 KB/s', '123.0 KB/s',
'1.2 MB/s', '12.3 MB/s', '123.0 MB/s',
'1.2 GB/s', '12.3 GB/s', '123.0 GB/s',
'1.2 TB/s', '12.3 TB/s', '123.0 TB/s',
'1.2 PB/s', '12.3 PB/s', '123.0 PB/s',
'1230.0 PB/s', '12300.0 PB/s', '123000.0 PB/s',
]
for index, value in enumerate(expected):
self.assertEqual(
progressbar._HumanReadableSpeed(1.23 * (10 ** index)), value)
class AutoForensicateTest(unittest.TestCase):
"""Tests for the AutoForensicate class.
TODO(romaing): Add tests for Main(), by setting sys.argv and testing
the proper recipes ran.
"""
def FakeBadParseGCSJSON(self, _):
return None
def FakeParseGCSJSON(self, _):
return {'client_id': 'fake_client_id'}
def FakeMakeProgressBar(self, max_size, name, message=None): # pylint: disable=unused-argument
return mock.create_autospec(auto_acquire.BaBar, spec_set=True)
def testParseDestination(self):
recipes = {
'test1': None,
'test2': None
}
af = auto_acquire.AutoForensicate(recipes=recipes)
test_args = ['--acquire', 'all', 'destination_url']
options = af.ParseArguments(test_args)
self.assertEqual(options.destination, 'destination_url')
def testParseArgsRequiredJson(self):
recipes = {
'test1': None,
'test2': None
}
af = auto_acquire.AutoForensicate(recipes=recipes)
test_args = ['--acquire', 'test1', '--logging', 'stackdriver']
with self.assertRaises(SystemExit):
prev_stderr = sys.stderr
sys.stderr = StringIO()
af.ParseArguments(test_args)
sys.stderr = prev_stderr
def testParseArgsRequiredURL(self):
recipes = {
'test1': None,
'test2': None
}
af = auto_acquire.AutoForensicate(recipes=recipes)
test_args = ['--acquire', 'test1', '--gs_keyfile=null']
prev_stderr = sys.stderr
sys.stderr = StringIO()
with self.assertRaises(SystemExit):
af.ParseArguments(test_args)
sys.stderr = prev_stderr
def testParseAcquireOneRecipe(self):
recipes = {
'test1': None,
'test2': None
}
test_args = ['--acquire', 'test1', 'nfs://destination']
af = auto_acquire.AutoForensicate(recipes=recipes)
parser = af._CreateParser()
options = parser.parse_args(test_args)
expected_recipes = ['test1']
self.assertEqual(options.acquire, expected_recipes)
def testParseAcquireBad(self):
recipes = {
'test1': None,
'test2': None
}
af = auto_acquire.AutoForensicate(recipes=recipes)
test_args = [
'--acquire', 'test4', '--acquire', 'all',
'--gs_keyfile=file', 'gs://bucket']
prev_stderr = sys.stderr
sys.stderr = StringIO()
with self.assertRaises(SystemExit):
af.ParseArguments(test_args)
sys.stderr = prev_stderr
def testParseAcquireAll(self):
recipes = {
'test1': None,
'test2': None
}
af = auto_acquire.AutoForensicate(recipes=recipes)
test_args = ['--acquire', 'test1', '--acquire', 'all', 'gs://bucket']
options = af.ParseArguments(test_args)
expected_recipes = ['test1', 'test2']
self.assertEqual(options.acquire, expected_recipes)
def testMakeUploader(self):
af = auto_acquire.AutoForensicate(recipes={'test': None})
options = af.ParseArguments(['--acquire', 'all', 'destination'])
uploader_object = af._MakeUploader(options)
self.assertEqual(uploader_object, None)
options = af.ParseArguments(['--acquire', 'all', 'gs://destination'])
with self.assertRaises(errors.BadConfigOption):
# We need a --gs_keyfile option for gs:// URLs
uploader_object = af._MakeUploader(options)
af._ParseGCSJSON = self.FakeBadParseGCSJSON
options = af.ParseArguments(
['--acquire', 'all', '--gs_keyfile', 'keyfile', 'gs://destination'])
with self.assertRaises(errors.BadConfigOption):
# Invalid gs_keyfile
uploader_object = af._MakeUploader(options)
af._ParseGCSJSON = self.FakeParseGCSJSON
options = af.ParseArguments(
['--acquire', 'all', '--gs_keyfile', 'keyfile', 'gs://destination'])
uploader_object = af._MakeUploader(options)
self.assertIsInstance(uploader_object, uploader.GCSUploader)
def testFailDo(self):
af = auto_acquire.AutoForensicate(recipes={})
recipe = FailingRecipe('fail')
with tempfile.TemporaryFile() as destination:
uploader_object = FileCopyUploader(destination)
af._uploader = uploader_object
with self.assertRaises(errors.RecipeException):
af.Do(recipe)
def testDo(self):
af = auto_acquire.AutoForensicate(recipes={})
parser = argparse.ArgumentParser()
parser.add_argument('--fake', action='store_true')
options = parser.parse_args(['--fake'])
af._logger = logging.getLogger(self.__class__.__name__)
af._MakeProgressBar = self.FakeMakeProgressBar
recipe = BytesIORecipe('stringio', options=options)
self.assertTrue(recipe._options.fake)
with tempfile.TemporaryFile() as destination:
uploader_object = FileCopyUploader(destination)
af._uploader = uploader_object
af.Do(recipe)
destination.seek(0)
copied_data = destination.read()
self.assertEqual(copied_data, DEFAULT_ARTIFACT_CONTENT)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1645207 | from lib.gui_controls import Controls
import time
import win32api
import win32gui
import win32con # 导入win32api相关模块
from lib.utils import win_key_dc
def get_xy(x,y):
x_list = [win_key_dc[xx] for xx in str(x)]
y_list = [win_key_dc[yy] for yy in str(y)]
return x_list,y_list
def move_to_pos(hwnd,x,y):
# 出发地图键m
Controls.activate_hwnd(hwnd)
# 视角距离滑动拉到最近
Controls.win_gunlun_qian(hwnd)
Controls.key_post(hwnd,0x4D)
# x,y = get_hwnd_offset(hwnd,115,34)
Controls.win_mouse_click(hwnd,115,34)
input_hwnd(hwnd,x)
Controls.win_mouse_click(hwnd,185,40)
input_hwnd(hwnd,y)
Controls.win_mouse_click(hwnd,223,33)
time.sleep(1)
Controls.win_mouse_move(hwnd,316,281,0.5)
Controls.win_mouse_click(hwnd,316,281)
# 洛阳
# Controls.win_mouse_click(hwnd,328,290)
# 燕京
# Controls.win_mouse_click(hwnd,331,290)
# 关闭地图
Controls.key_post(hwnd,0x4D)
# # 移动到拉镖点
# time.sleep(2)
# box = check(hwnd)
# print(box)
# Controls.win_mouse_move(hwnd,700,93)
# time.sleep(1)
# Controls.win_mouse_click(hwnd,700,93)
# time.sleep(1)
# Controls.win_mouse_click(hwnd,150,221)
# Controls.get_screen(hwnd)
# box = Controls.locate2("D:\project\python\jiuyin_robot\image\la_jiebiao.png")
# if box:
# print(box)
# Controls.win_mouse_move(hwnd,244,479)
# Controls.win_mouse_click(hwnd,244,479)
def input_hwnd(hwnd,input_list):
# 先删除
for _ in range(5):
win32api.PostMessage(hwnd, win32con.WM_KEYDOWN, 0x27, 0x1F0001)
time.sleep(0.2)
win32api.PostMessage(hwnd, win32con.WM_KEYUP, 0x27, 0x1F0001)
for _ in range(5):
win32api.PostMessage(hwnd, win32con.WM_KEYDOWN, 0x8, 0x1F0001)
time.sleep(0.2)
win32api.PostMessage(hwnd, win32con.WM_KEYUP, 0x8, 0x1F0001)
for input in input_list:
win32api.PostMessage(hwnd, win32con.WM_CHAR, input, 0x1F0001)
def check(hwnd):
while True:
for x in range(695,700):
for y in range(88,93):
Controls.win_mouse_move(hwnd,x,y)
time.sleep(0.2)
Controls.get_screen(hwnd)
box = Controls.locate2("D:\project\python\jiuyin_robot\image\lb_liaocanghai.png",0.5)
if box:
return box | StarcoderdataPython |
81180 | <filename>setup.py
from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='An applied Data Science project on anaylzing the COVID-19 data.',
author='<NAME>',
license='MIT',
)
| StarcoderdataPython |
1718108 | '''
point processing based image enhancement using:
1. Log and Inverse log transformations
2. Gamma corrections(Power law transformation) with +ve and -ve values
'''
import cv2
import numpy as np
############## log transform ############
img = cv2.imread('transformation.png', -1)
prop = img.shape
c = 20
for i in range(0, prop[0]):
for j in range(0, prop[1]):
img[i][j] = c* np.log(img[i][j] + 1)
cv2.imshow('log_img',img)
cv2.waitKey(0)
cv2.imwrite('log_img.png', img)
############### Inverse Log ################
img = cv2.imread('transformation.png', -1)
prop = img.shape
for i in range(0, prop[0]):
for j in range(0, prop[1]):
img[i][j] = np.exp( img[i][j] ) * ( np.log(255)/ (255 - 1) - 1)
cv2.imshow('invlog_img',img)
cv2.waitKey(0)
cv2.imwrite('invlog_img.png', img)
############ Power law ############
img = cv2.imread('transformation.png', -1)
prop = img.shape
c = 20
gamma = 8
for i in range(0, prop[0]):
for j in range(0, prop[1]):
img[i][j] = c * img[i][j]** gamma
cv2.imshow('powlaw_img', img)
cv2.waitKey(0)
cv2.imwrite('powlaw_img.png', img)
| StarcoderdataPython |
3294080 | <reponame>OpenIdentityPlatform/forgerock-persistit<gh_stars>1-10
#!/usr/bin/env python
# coding: utf-8
#
# Copyright 2011-2012 Akiban Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import os
import optparse
import commands
import shutil
import time
usage = """%prog [options]"""
parser = optparse.OptionParser(usage=usage)
parser.add_option(
"--xmx",
default = "2G",
help = "Maximum heap size for the JVM when running stress tests. [default: %default]"
)
parser.add_option(
"--jvm-opts",
default = "-ea -Dcom.sun.management.jmxremote.port=8082 -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Xrunjdwp:transport=dt_socket,address=8000,suspend=n,server=y",
help = "Extra options to pass to the JVM (e.g. JMX, debug). [default: %default]"
)
parser.add_option(
"--test-dir",
default = "./src/test/resources/tscripts",
help = "Directory where test scripts are located. [default: %default]"
)
parser.add_option(
"--test-run-dir",
default = "./target/stress_test_runs",
help = "Directory where all artifacts from stress tests are placed. [default: %default]"
)
parser.add_option(
"--tests",
default = "",
help = "Comma separated list of tests to run. (By default all tests are run)"
)
parser.add_option(
"--jar-file",
default = "target",
help = "Path to JAR file to use. [default: %default]"
)
(options, args) = parser.parse_args()
if not os.path.exists(options.test_dir):
print "Test script directory is not valid."
sys.exit(1)
# we create the test run directory
# if it already exists, we blow it away
if not os.path.exists(options.test_run_dir):
os.makedirs(options.test_run_dir)
else:
shutil.rmtree(options.test_run_dir)
os.makedirs(options.test_run_dir)
tests = []
if options.tests != "":
tests = [test for test in options.tests.split(",")]
# if the list of tests specified by the user is empty then
# we populate this list based on the *.plan files in the
# test_dir
if not tests:
for root, dirs, files in os.walk(options.test_dir):
for filename in files:
# ignore any of the stress tests with 10 in them for now
if filename.find("10") == -1:
tests.append(filename)
jar_file = options.jar_file
# pick up the default jar file generated if no jar file is specified
if jar_file == "target":
cmd = "grep version pom.xml | grep SNAPSHOT"
(retcode, output) = commands.getstatusoutput(cmd)
version = output.lstrip()[9:output.lstrip().find('SNAPSHOT')-1]
jar_file = "target/akiban-persistit-%s-SNAPSHOT-jar-with-dependencies-and-tests.jar" % version
if not os.path.isfile(jar_file):
print "Persistit JAR file does not exist! Did you run mvn install?"
sys.exit(1)
test_failures = 0
print "starting test run at: %s\n\n" % time.asctime()
for test in tests:
full_test_path = options.test_dir + "/" + test
test_data_path = options.test_run_dir + "/" + test
os.makedirs(test_data_path)
run_cmd = "java %s -Xmx%s -cp %s com.persistit.test.TestRunner script=%s datapath=%s logpath=%s" % (options.jvm_opts, options.xmx, jar_file, full_test_path, test_data_path, test_data_path)
print "%s\t\t\t" % test,
(retcode, output) = commands.getstatusoutput(run_cmd)
if retcode:
print "[FAIL]"
test_failures = test_failures + 1
else:
print "[PASS]"
print "\n\nfinished test run at: %s\n" % time.asctime()
print "tests run : %d" % len(tests)
print "test failures: %d" % test_failures
if test_failures:
sys.exit(1)
| StarcoderdataPython |
1692457 | from .core import pre_compute
from ..dispatch import dispatch
from ..expr import Expr
from odo.backends.json import JSON, JSONLines
from odo import into
from collections import Iterator
from odo.utils import records_to_tuples
@dispatch(Expr, JSON)
def pre_compute(expr, data, **kwargs):
seq = into(list, data, **kwargs)
leaf = expr._leaves()[0]
return list(records_to_tuples(leaf.dshape, seq))
@dispatch(Expr, JSONLines)
def pre_compute(expr, data, **kwargs):
seq = into(Iterator, data, **kwargs)
leaf = expr._leaves()[0]
return records_to_tuples(leaf.dshape, seq)
| StarcoderdataPython |
3202643 | #!/usr/bin/env python
# coding: utf-8
# info
__version__ = "0.1"
__author__ = "<NAME>"
__date__ = "04/10/19"
from gpiozero import Button
import time
BUCKET_SIZE = 0.2794
rain_count = 0
rain_interval = 5
def bucket_tipped():
global rain_count
rain_count += 1
def reset_rainfall():
global rain_count
rain_count = 0
def calculate_rainfall():
global rain_count
rainfall = rain_count * BUCKET_SIZE
return rainfall
rain_sensor = Button(6)
rain_sensor.when_pressed = bucket_tipped
while True:
rain_count = 0
time.sleep(rain_interval)
print(calculate_rainfall(), "mm") | StarcoderdataPython |
1676889 | <reponame>heldermarchetto/ElmitecReadUviewFileFormat<gh_stars>0
# -*- coding: utf-8 -*-
"""
Copyright 2019 Dr. <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import struct
import numpy as np
import bisect as bs
class readUviewClass():
def __init__(self) -> None:
"""Initializes the readUview object by reading the file contents"""
def __repr__(self):
try:
return str("Filename = %s\nimgSize = (%i,%i)" %(self.fn, self.imageWidth, self.imageHeight))
except AttributeError:
return str("Object not defined")
def getImage(self, fn) -> np.ndarray:
"""Returns the uview images as numpy array"""
self.fn = fn
with open(self.fn, mode='rb') as file: # b is important -> binary
self.fc = file.read()
self.fh = self.fileHeader()
self.ih = self.imageHeader()
self.lp = self.leemParameters()
totalHeaderSize = self.headerSize + self.imageHeadersize + self.attachedMarkupSize + self.LEEMDataVersion
return np.reshape(struct.unpack(str(self.imageWidth*self.imageHeight)+'H',self.fc[totalHeaderSize:]), (self.imageHeight, self.imageWidth))
def fileHeader(self, verbose=False) -> None:
"""Loads the contents of the file header"""
#must supply the file contents (fc). These are read with the open command in 'rb' mode
self.UK_id = "".join(map(chr, self.fc[0:self.fc.index(0)]))
self.UK_size = int.from_bytes(self.fc[20:22], byteorder='little')
self.UK_version = int.from_bytes(self.fc[22:24], byteorder='little')
self.UK_bitsPerPixel = int.from_bytes(self.fc[24:26], byteorder='little')
if self.UK_version >= 8:
self.UK_cameraBitsPerPixel = int.from_bytes(self.fc[26:28], byteorder='little')
self.MCPDiameterInPixels = int.from_bytes(self.fc[28:30], byteorder='little')
self.hBinning = int.from_bytes(self.fc[30:31], byteorder='little')
self.vBinning = int.from_bytes(self.fc[31:32], byteorder='little')
else:
voidNr = int(0)
self.UK_cameraBitsPerPixel = voidNr
self.MCPDiameterInPixels = voidNr
self.hBinning = voidNr
self.vBinning = voidNr
if self.UK_version >= 2:
self.imageWidth = int.from_bytes(self.fc[40:42], byteorder='little')
self.imageHeight = int.from_bytes(self.fc[42:44], byteorder='little')
self.nrImages = int.from_bytes(self.fc[44:46], byteorder='little')
else:
voidNr = int(0)
self.imageWidth = voidNr
self.imageHeight = voidNr
self.nrImages = voidNr
if self.UK_version >= 7:
self.attachedRecipeSize = int.from_bytes(self.fc[46:48], byteorder='little')
else:
self.attachedRecipeSize = int(0)
self.hasRecipe = self.attachedRecipeSize > 0
self.fixedRecipeSize = 128
if self.hasRecipe:
self.headerSize = 104+128
else:
self.headerSize = 104
if verbose:
print('headerSize = %i' %self.headerSize)
print('UK_id = '+self.UK_id)
print('UK_size = '+str(self.UK_size))
print('UK_version = '+str(self.UK_version))
print('UK_bitsPerPixel = '+str(self.UK_bitsPerPixel))
print('UK_cameraBitsPerPixel = '+str(self.UK_cameraBitsPerPixel))
print('MCPDiameterInPixels = '+str(self.MCPDiameterInPixels))
print('hBinning = '+str(self.hBinning))
print('vBinning = '+str(self.vBinning))
print('imageWidth = '+str(self.imageWidth))
print('imageHeight = '+str(self.imageHeight))
print('nrImages = '+str(self.nrImages))
print('attachedRecipeSize = '+str(self.attachedRecipeSize))
print('hasRecipe = '+str(self.hasRecipe))
def imageHeader(self, verbose=False) -> None:
"""Loads the contents of the image header"""
fp = self.headerSize #file pointer
self.imageHeadersize = int.from_bytes(self.fc[fp :fp+ 2], byteorder='little')
self.version = int.from_bytes(self.fc[fp+ 2:fp+ 4], byteorder='little')
self.colorScaleLow = int.from_bytes(self.fc[fp+ 4:fp+ 6], byteorder='little')
self.colorScaleHigh = int.from_bytes(self.fc[fp+ 6:fp+ 8], byteorder='little')
self.imageTime = int.from_bytes(self.fc[fp+ 8:fp+16], byteorder='little')
self.maskXShift = int.from_bytes(self.fc[fp+16:fp+18], byteorder='little')
self.maskYShift = int.from_bytes(self.fc[fp+18:fp+20], byteorder='little')
self.rotateMask = int.from_bytes(self.fc[fp+20:fp+22], byteorder='little', signed=False)
self.attachedMarkupSize = int.from_bytes(self.fc[fp+22:fp+24], byteorder='little')
self.hasAttachedMarkup = self.attachedMarkupSize != 0
if self.hasAttachedMarkup:
self.attachedMarkupSize = 128*((self.attachedMarkupSize//128)+1)
self.spin = int.from_bytes(self.fc[fp+24:fp+26], byteorder='little')
self.LEEMDataVersion = int.from_bytes(self.fc[fp+26:fp+28], byteorder='little')
fp = fp+28
if self.version > 5:
self.hasLEEMData = True
self.LEEMDataStartPos = fp
self.LEEMData = struct.unpack('240c',self.fc[fp:fp+240])
fp = fp+240
self.appliedProcessing = self.fc[fp]
self.grayAdjustZone = self.fc[fp+1]
self.backgroundvalue = int.from_bytes(self.fc[fp+2:fp+4], byteorder='little', signed=False)
self.desiredRendering = self.fc[fp+4]
self.desired_rotation_fraction = self.fc[fp+5]
self.rendering_argShort = int.from_bytes(self.fc[fp+6:fp+8], byteorder='little')
self.rendering_argFloat = struct.unpack('f',self.fc[fp+8:fp+12])[0]
self.desired_rotation = int.from_bytes(self.fc[fp+12:fp+14], byteorder='little')
self.rotaion_offset = int.from_bytes(self.fc[fp+14:fp+16], byteorder='little')
#spare 4
else:
self.hasLEEMData = False
voidNr = int(0)
self.LEEMData = struct.unpack('240c','\x00'*240)
self.appliedProcessing = b''
self.grayAdjustZone = b''
self.backgroundvalue = voidNr
self.desiredRendering = b''
#spare 1
self.rendering_argShort = voidNr
self.rendering_argFloat = 0.0
self.desired_rotation = voidNr
self.rotaion_offset = voidNr
if verbose:
print('imageHeadersize = '+str(self.imageHeadersize))
print('version = '+str(self.version))
print('colorScaleLow = '+str(self.colorScaleLow))
print('colorScaleHigh = '+str(self.colorScaleHigh))
print('imageTime = '+str(self.imageTime))
print('maskXShift = '+str(self.maskXShift))
print('maskYShift = '+str(self.maskYShift))
print('rotateMask = '+str(self.rotateMask))
print('attachedMarkupSize = '+str(self.attachedMarkupSize))
print('hasAttachedMarkup = '+str(self.hasAttachedMarkup))
print('spin = '+str(self.spin))
print('LEEMDataVersion = '+str(self.LEEMDataVersion))
def leemParameters(self, verbose=False) -> None:
"""Loads the image parameters from header"""
startPos = self.headerSize + self.imageHeadersize + self.attachedMarkupSize
endPos = startPos+self.LEEMDataVersion+1
self.leemData = self.fc[startPos:endPos]
endLeemData = endPos-startPos
zeroPos = (i for i, e in enumerate(self.leemData) if e == 0)
self.zeroList = []
nx = next(zeroPos,-1)
while nx > 0:
self.zeroList.append(nx)
nx = next(zeroPos,-1)
endLeemData = self.zeroList[-1]
self.paramList = []
endPos = 0
while endPos <= endLeemData:
res, endPos = self.extractLeemParam(endPos)
self.paramList.append(res)
def getUnits(self, val) -> str:
"""Returns the units according to page 10 of the "FileFormats 2017" specifications"""
return {
'0': 'none',
'1': 'V',
'2': 'mA',
'3': 'A',
'4': 'C',
'5': 'K',
'6': 'mV',
'7': 'pA',
'8': 'nA',
'9': 'uA',
}.get(val, 'none')
def extractLeemParam(self, startPos) -> dict:
"""Extracts the dictionary of a specific parameter starting at startPos"""
#device = {'number':0, 'name':'', 'units':'', 'value':0}
devNr = self.leemData[startPos] & 0x7f #removed the first bit!
if devNr < 100:
#normal device
endName = self.zeroList[bs.bisect_left(self.zeroList, startPos+1)]
device = {'number':0, 'name':'', 'units':'', 'value':0}
device['number'] = int(devNr)
device['name'] = bytes(self.leemData[startPos+1:endName-1]).decode("utf-8")
device['units'] = self.getUnits(bytes(self.leemData[endName-1:endName]).decode("utf-8"))
endVal = endName+5
device['value'] = struct.unpack('f',self.leemData[endName+1:endName+5])[0]
return device, endVal
elif devNr == 100:
#Mitutoyo micrometer readout
device = {'number':0, 'name':'Mitutoyo', 'units':'mm', 'value':0}
device['number'] = int(devNr)
valX = struct.unpack('f',self.leemData[startPos+1:startPos+5])[0]
valY = struct.unpack('f',self.leemData[startPos+1:startPos+5])[0]
device['value'] = str(valX)+','+str(valY)
endPos = startPos+9
return device, endPos
elif devNr == 101:
#FOV
print("Old FoV")
print("############################################")
print("### NOT TESTED - device 101 - NOT TESTED ###")
print("############################################")
endName = self.zeroList[bs.bisect_left(self.zeroList, startPos+1)]
device = {'number':0, 'name':'', 'units':'', 'value':0}
device['number'] = int(devNr)
device['name'] = 'FOV='+bytes(self.leemData[startPos+1:endName-1]).decode("utf-8")
endVal = endName+4
device['value'] = struct.unpack('f',self.leemData[endName+1:endName+5])[0]
return device, endVal
elif devNr == 102:
#Varian controller #1
print("Varian controller #1")
print("############################################")
print("### NOT TESTED - device 102 - NOT TESTED ###")
print("############################################")
elif devNr == 103:
#Varian controller #2
print("Varian controller #2")
print("############################################")
print("### NOT TESTED - device 103 - NOT TESTED ###")
print("############################################")
elif devNr == 104:
#Camera exposure
device = {'number':0, 'name':'', 'units':'', 'value':0}
device['number'] = int(devNr)
device['units'] = 'seconds'
device['value'] = struct.unpack('f',self.leemData[startPos+1:startPos+5])[0]
b1 = self.leemData[startPos+5]
b2 = self.leemData[startPos+6]
if b1 == 255:
avgType = '(sliding average)'
elif b1 == 0:
avgType = '(average off)'
else:
avgType = '(average '+str(b2)+' images)'
device['name'] = 'Camera exposure '+avgType
endPos = startPos+7
return device, endPos
elif devNr == 105:
#Title
endName = self.zeroList[bs.bisect_left(self.zeroList, startPos+1)]
if endName == startPos+1:
device = {'number':int(devNr), 'name':'Title', 'units':'none', 'value':''}
endVal = startPos+2
else:
device = {'number':int(devNr), 'name':'Title', 'units':'none', 'value':bytes(self.leemData[startPos+1:endName]).decode("utf-8")}
endVal = endName
return device, endVal
elif (devNr >= 106) and (devNr <= 109):
device = {'number':0, 'name':'', 'units':'', 'value':0}
device['number'] = int(devNr)
endName =self.zeroList[bs.bisect_left(self.zeroList, startPos)]
device['name'] = bytes(self.leemData[startPos+1:endName]).decode("utf-8")
endUnits = self.zeroList[bs.bisect_left(self.zeroList, endName+1)]
device['units'] = bytes(self.leemData[endName+1:endUnits]).decode("utf-8")
device['value'] = struct.unpack('f',self.leemData[endUnits+1:endUnits+5])[0]
endPos = endUnits+5
return device, endPos
elif devNr == 110:
#FoV
endName = self.zeroList[bs.bisect_left(self.zeroList, startPos+1)]
device = {'number':0, 'name':'', 'units':'', 'value':0}
device['number'] = int(devNr)
fov = self.leemData[startPos+1:endName]
if 181 in fov:
pos = fov.index(181)
device['name'] = 'FOV='+bytes(fov[0:pos]).decode("utf-8")+'\u03BC'+bytes(fov[pos+1:-1]).decode("utf-8")
else:
device['name'] = 'FOV='+bytes(fov).decode("utf-8")
endVal = endName+5
device['value'] = struct.unpack('f',self.leemData[endName+1:endName+5])[0]
return device, endVal
elif devNr == 111:
#PhiTheta
print("Phi/Theta")
print("############################################")
print("### NOT TESTED - device 111 - NOT TESTED ###")
print("############################################")
elif devNr == 112:
#Spin
print("Spin")
print("############################################")
print("### NOT TESTED - device 112 - NOT TESTED ###")
print("############################################")
elif devNr == 113:
#FoV Rotation (from LEEM presets)
print("FoV Rotation (from LEEM presets)")
print("############################################")
print("### NOT TESTED - device 113 - NOT TESTED ###")
print("############################################")
elif devNr == 114:
#Mirror state
print("Mirror state")
print("############################################")
print("### NOT TESTED - device 114 - NOT TESTED ###")
print("############################################")
elif devNr == 115:
#MCP screen voltage in kV
print("MCP screen voltage in kV")
print("############################################")
print("### NOT TESTED - device 115 - NOT TESTED ###")
print("############################################")
elif devNr == 116:
#MCP channelplate voltage in kV
print("MCP channelplate voltage in kV")
print("############################################")
print("### NOT TESTED - device 116 - NOT TESTED ###")
print("############################################")
elif (devNr >= 120) and (devNr <= 130):
#additional gauges
print("additional gauges")
return 0,startPos
#Usage:
#
#self.fn = r'K:\Data\SMART-2\2019\0507_HM_MP_TS_FU-Berlin\20190507a001.dat'
#for p in lp.paramList:
# print(p)
#
#img = readUview(fileContent, fh, ih)
#import matplotlib.pyplot as plt
#plt.imshow(img, cmap=plt.cm.gray) | StarcoderdataPython |
148953 | <filename>astroquery/simbad/__init__.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The SIMBAD query tool creates a `script query
<http://simbad.u-strasbg.fr/simbad/sim-fscript>`__ that returns VOtable XML
data that is then parsed into a :class:`~astroquery.simbad.core.SimbadResult` object.
This object then parses the data and returns a table parsed with `astropy.io.votable.parse`.
"""
from astropy.config import ConfigurationItem
SIMBAD_SERVER = ConfigurationItem('simbad_server', ['simbad.u-strasbg.fr',
'simbad.harvard.edu'], 'Name of the SIMBAD mirror to use.')
SIMBAD_TIMEOUT = ConfigurationItem('timeout', 60, 'time limit for connecting to Simbad server')
# O defaults to the maximum limit
ROW_LIMIT = ConfigurationItem('row_limit', 0, 'maximum number of rows that will be fetched from the result.')
from .core import Simbad
__all__ = ['Simbad']
| StarcoderdataPython |
1641880 | <gh_stars>0
# Natural Language Toolkit utilities used in classifier module, should be migrated to main utilities later
# Author: <NAME> <sumukh dot ghodke at gmail dot com>
#
# URL: <http://nltk.sf.net>
# This software is distributed under GPL, for license information see LICENSE.TXT
import UserList, math
class StatList(UserList.UserList):
def __init__(self, values=None):
UserList.UserList.__init__(self, values)
def mean(self):
if len(self.data) == 0: return 0
return float(sum([each for each in self.data])) / len(self.data)
def variance(self):
_mean = self.mean()
if len(self.data) < 2: return 0
return float(sum([pow((each - _mean), 2) for each in self.data])) / (len(self.data) - 1)
def std_dev(self):
return math.sqrt(self.variance())
def int_array_to_string(int_array):
return ','.join([str(each) for each in int_array])
| StarcoderdataPython |
3203994 | <filename>collector_v2.py
# 이 코드가 완벽히 이해가 된다면 봇 개발 절반은 성공.
print("collector 프로그램이 시작 되었습니다!")
# crtl + alt + 좌우방향키 : 이전 커서 위치로
# 아래 세줄 -> mysql이라는 데이터베이스를 사용하기 위해 필요한 패키지들 -> 암기X! 그냥 다음에 mysql을 사용하고 싶으면 아래 세 줄을 복사, 붙여넣기
from sqlalchemy import create_engine
import pymysql
pymysql.install_as_MySQLdb()
# -> openapi.py 라는 소스파일에 있는 Openapi 클래스만 가져와서 사용하고 싶은 경우. -> openapi에 import 된 라이브러리를 사용하고 싶지 않다!
# from openapi import Openapi
# openapi.py 라는 소스파일에 있는 모든 함수, 라이브러리, 클래스 등을 가져오고 싶을 경우 아래처럼!
from openapi import *
# get_item : 종목 리스트 가져오는 모듈
from get_item import StockItem
class Collector:
def __init__(self):
print("__init__ 함수에 들어왔습니다.")
self.engine_bot = None
# self.api 객체를 만든다. (Openapi클래스의 인스턴스)
# Openapi() 클래스를 통해 self.api 객체를 만들면 openapi프로그램이 실행 되면서 증권사 계정과 연동이 된다.
self.api = Openapi()
# self.item 객체를 만든다. (StockItem클래스의 인스턴스)
# 코스피, 코스닥 종목 리스트를 가져온다.
self.item = StockItem()
def db_setting(self, db_name, db_id, db_passwd, db_ip, db_port):
print("db_setting 함수에 들어왔습니다.")
# mysql 데이터베이스랑 연동하는 방식.
self.engine_bot = create_engine("mysql+mysqldb://" + db_id + ":" + db_passwd + "@"
+ db_ip + ":" + db_port + "/" + db_name, encoding='utf-8')
def print_stock_data(self):
print("get_stock_data 함수에 들어왔습니다.")
# self.item : StockItem 클래스의 인스턴스
# self.item.code_df_kospi : self.item 객체의 속성
print("코스피 종목 리스트 !!!")
print(self.item.code_df_kospi)
# self.item.code_df_kosdaq : self.item 객체의 속성
print("코스닥 종목 리스트 !!!")
print(self.item.code_df_kosdaq)
# 아래 api의 함수들은 ctrl + 클릭해서 들어가보면 사용 방법 설명 되어 있음.
# self.api : openapi클래스의 인스턴스
# get_total_data : Openapi클래스의 메서드
total_data = self.api.get_total_data('005930', '20200424')
print("total_data: !!!")
print(total_data)
# get_one_day_option_data : Openapi클래스의 메서드
one_data = self.api.get_one_day_option_data('005930', '20200424', 'close')
print("one_data: !!!")
print(one_data)
# __name__ ? => 현재 모듈의 이름을 담고 있는 내장 변수, 우리는 colector라는 파일을 실행한다! -> 이 파일의 __name__ 내장변수에는 __main__ 이 저장되어있다
# import openapi를 통해서 openapi소스코드를 참고를 하는 경우 openapi 모듈의 __name__은 "openapi" 가 저장 되어 있다.
# openapi 파일을 실행하면 그때는 참고 한 것이 아니기 때문에 __name__에는 __main__ 이 저장 되어 있다.
print("collector_v2.py 의 __name__ 은?: ", __name__)
if __name__ == "__main__":
print("__main__에 들어왔습니다.")
# 아래는 키움증권 openapi를 사용하기 위해 사용하는 한 줄!
app = QApplication(sys.argv)
# c = collector() 이렇게 c라는 collector라는 클래스의 인스턴스를 만든다.
# 아래 클래스를 호출하자마다 __init__ 함수가 실행이 된다.
c = Collector()
c.print_stock_data()
| StarcoderdataPython |
129899 | ##########################################################################
#
# Copyright (c) 2015, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of <NAME> nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import shutil
import unittest
import six
import imath
import IECore
import Gaffer
import GafferTest
import GafferImage
import GafferImageTest
class ImageReaderTest( GafferImageTest.ImageTestCase ) :
fileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/circles.exr" )
colorSpaceFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/circles_as_cineon.exr" )
offsetDataWindowFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/rgb.100x100.exr" )
jpgFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/circles.jpg" )
largeFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/colorbars_max_clamp.exr" )
def setUp( self ) :
GafferImageTest.ImageTestCase.setUp( self )
self.__defaultColorSpaceFunction = GafferImage.ImageReader.getDefaultColorSpaceFunction()
def tearDown( self ) :
GafferImageTest.ImageTestCase.tearDown( self )
GafferImage.ImageReader.setDefaultColorSpaceFunction( self.__defaultColorSpaceFunction )
def test( self ) :
n = GafferImage.ImageReader()
n["fileName"].setValue( self.fileName )
oiio = GafferImage.OpenImageIOReader()
oiio["fileName"].setValue( self.fileName )
self.assertEqual( n["out"]["format"].getValue(), oiio["out"]["format"].getValue() )
self.assertEqual( n["out"]["dataWindow"].getValue(), oiio["out"]["dataWindow"].getValue() )
self.assertEqual( n["out"]["metadata"].getValue(), oiio["out"]["metadata"].getValue() )
self.assertEqual( n["out"]["deep"].getValue(), oiio["out"]["deep"].getValue() )
self.assertEqual( n["out"]["channelNames"].getValue(), oiio["out"]["channelNames"].getValue() )
self.assertEqual( n["out"].sampleOffsets( imath.V2i( 0 ) ), oiio["out"].sampleOffsets( imath.V2i( 0 ) ) )
self.assertEqual( n["out"].channelData( "R", imath.V2i( 0 ) ), oiio["out"].channelData( "R", imath.V2i( 0 ) ) )
self.assertImagesEqual( n["out"], oiio["out"] )
def testUnspecifiedFilename( self ) :
n = GafferImage.ImageReader()
n["out"]["channelNames"].getValue()
self.assertTrue( GafferImage.BufferAlgo.empty( n["out"]['dataWindow'].getValue() ) )
def testChannelDataHashes( self ) :
# Test that two tiles within the same image have different hashes.
n = GafferImage.ImageReader()
n["fileName"].setValue( self.largeFileName )
h1 = n["out"].channelData( "R", imath.V2i( 0 ) ).hash()
h2 = n["out"].channelData( "R", imath.V2i( GafferImage.ImagePlug().tileSize() ) ).hash()
self.assertNotEqual( h1, h2 )
def testColorSpaceOverride( self ) :
exrReader = GafferImage.ImageReader()
exrReader["fileName"].setValue( self.fileName )
exrReader["colorSpace"].setValue( "Cineon" )
colorSpaceOverrideReader = GafferImage.ImageReader()
colorSpaceOverrideReader["fileName"].setValue( self.colorSpaceFileName )
exrImage = exrReader["out"]
colorSpaceOverrideImage = colorSpaceOverrideReader["out"]
self.assertImagesEqual( colorSpaceOverrideImage, exrImage, ignoreMetadata = True, maxDifference = 0.005 )
def testJpgRead( self ) :
exrReader = GafferImage.ImageReader()
exrReader["fileName"].setValue( self.fileName )
jpgReader = GafferImage.ImageReader()
jpgReader["fileName"].setValue( self.jpgFileName )
self.assertImagesEqual( exrReader["out"], jpgReader["out"], ignoreMetadata = True, maxDifference = 0.001 )
def testSupportedExtensions( self ) :
self.assertEqual( GafferImage.ImageReader.supportedExtensions(), GafferImage.OpenImageIOReader.supportedExtensions() )
def testFileRefresh( self ) :
testFile = self.temporaryDirectory() + "/refresh.exr"
shutil.copyfile( self.fileName, testFile )
reader = GafferImage.ImageReader()
reader["fileName"].setValue( testFile )
image1 = GafferImage.ImageAlgo.image( reader["out"] )
# even though we've change the image on disk, gaffer will
# still have the old one in its cache.
shutil.copyfile( self.jpgFileName, testFile )
self.assertEqual( GafferImage.ImageAlgo.image( reader["out"] ), image1 )
# until we force a refresh
reader["refreshCount"].setValue( reader["refreshCount"].getValue() + 1 )
self.assertNotEqual( GafferImage.ImageAlgo.image( reader["out"] ), image1 )
def testNonexistentFiles( self ) :
reader = GafferImage.ImageReader()
reader["fileName"].setValue( "wellIDontExist.exr" )
six.assertRaisesRegex( self, RuntimeError, ".*wellIDontExist.exr.*", reader["out"]["format"].getValue )
six.assertRaisesRegex( self, RuntimeError, ".*wellIDontExist.exr.*", reader["out"]["dataWindow"].getValue )
six.assertRaisesRegex( self, RuntimeError, ".*wellIDontExist.exr.*", reader["out"]["metadata"].getValue )
six.assertRaisesRegex( self, RuntimeError, ".*wellIDontExist.exr.*", reader["out"]["channelNames"].getValue )
six.assertRaisesRegex( self, RuntimeError, ".*wellIDontExist.exr.*", reader["out"].channelData, "R", imath.V2i( 0 ) )
six.assertRaisesRegex( self, RuntimeError, ".*wellIDontExist.exr.*", GafferImage.ImageAlgo.image, reader["out"] )
def testMissingFrameMode( self ) :
testSequence = IECore.FileSequence( self.temporaryDirectory() + "/incompleteSequence.####.exr" )
shutil.copyfile( self.fileName, testSequence.fileNameForFrame( 1 ) )
shutil.copyfile( self.offsetDataWindowFileName, testSequence.fileNameForFrame( 3 ) )
reader = GafferImage.ImageReader()
reader["fileName"].setValue( testSequence.fileName )
oiio = GafferImage.OpenImageIOReader()
oiio["fileName"].setValue( testSequence.fileName )
def assertMatch() :
self.assertEqual( reader["out"]["format"].getValue(), oiio["out"]["format"].getValue() )
self.assertEqual( reader["out"]["dataWindow"].getValue(), oiio["out"]["dataWindow"].getValue() )
self.assertEqual( reader["out"]["metadata"].getValue(), oiio["out"]["metadata"].getValue() )
self.assertEqual( reader["out"]["channelNames"].getValue(), oiio["out"]["channelNames"].getValue() )
# It is only valid to query the data inside the data window
if not GafferImage.BufferAlgo.empty( reader["out"]["dataWindow"].getValue() ):
self.assertEqual( reader["out"].channelData( "R", imath.V2i( 0 ) ), oiio["out"].channelData( "R", imath.V2i( 0 ) ) )
self.assertImagesEqual( reader["out"], oiio["out"] )
context = Gaffer.Context()
# set to a missing frame
context.setFrame( 2 )
# everything throws
reader["missingFrameMode"].setValue( GafferImage.ImageReader.MissingFrameMode.Error )
with context :
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["format"].getValue )
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["dataWindow"].getValue )
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["metadata"].getValue )
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["channelNames"].getValue )
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"].channelData, "R", imath.V2i( 0 ) )
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", GafferImage.ImageAlgo.image, reader["out"] )
# Hold mode matches OpenImageIOReader
reader["missingFrameMode"].setValue( GafferImage.ImageReader.MissingFrameMode.Hold )
oiio["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Hold )
with context :
assertMatch()
# Black mode matches OpenImageIOReader
reader["missingFrameMode"].setValue( GafferImage.ImageReader.MissingFrameMode.Black )
oiio["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Black )
with context :
assertMatch()
# set to a different missing frame
context.setFrame( 4 )
# Hold mode matches OpenImageIOReader
reader["missingFrameMode"].setValue( GafferImage.ImageReader.MissingFrameMode.Hold )
oiio["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Hold )
with context :
assertMatch()
# Black mode matches OpenImageIOReader
reader["missingFrameMode"].setValue( GafferImage.ImageReader.MissingFrameMode.Black )
oiio["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Black )
with context :
assertMatch()
# set to a missing frame before the start of the sequence
context.setFrame( 0 )
# Hold mode matches OpenImageIOReader
reader["missingFrameMode"].setValue( GafferImage.ImageReader.MissingFrameMode.Hold )
oiio["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Hold )
with context :
assertMatch()
# Black mode matches OpenImageIOReader
reader["missingFrameMode"].setValue( GafferImage.ImageReader.MissingFrameMode.Black )
oiio["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Black )
with context :
assertMatch()
# explicit fileNames do not support MissingFrameMode
reader["fileName"].setValue( testSequence.fileNameForFrame( 0 ) )
reader["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Hold )
with context :
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", GafferImage.ImageAlgo.image, reader["out"] )
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["format"].getValue )
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["dataWindow"].getValue )
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["metadata"].getValue )
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["channelNames"].getValue )
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"].channelData, "R", imath.V2i( 0 ) )
reader["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Black )
oiio["missingFrameMode"].setValue( GafferImage.OpenImageIOReader.MissingFrameMode.Black )
with context :
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", GafferImage.ImageAlgo.image, reader["out"] )
six.assertRaisesRegex( self, RuntimeError, ".*incompleteSequence.*.exr.*", reader["out"]["format"].getValue )
self.assertEqual( reader["out"]["dataWindow"].getValue(), oiio["out"]["dataWindow"].getValue() )
self.assertEqual( reader["out"]["metadata"].getValue(), oiio["out"]["metadata"].getValue() )
self.assertEqual( reader["out"]["channelNames"].getValue(), oiio["out"]["channelNames"].getValue() )
self.assertTrue( GafferImage.BufferAlgo.empty( reader["out"]['dataWindow'].getValue() ) )
self.assertTrue( GafferImage.BufferAlgo.empty( oiio["out"]['dataWindow'].getValue() ) )
def testFrameRangeMask( self ) :
testSequence = IECore.FileSequence( self.temporaryDirectory() + "/incompleteSequence.####.exr" )
shutil.copyfile( self.fileName, testSequence.fileNameForFrame( 1 ) )
shutil.copyfile( self.fileName, testSequence.fileNameForFrame( 3 ) )
shutil.copyfile( self.offsetDataWindowFileName, testSequence.fileNameForFrame( 5 ) )
shutil.copyfile( self.offsetDataWindowFileName, testSequence.fileNameForFrame( 7 ) )
reader = GafferImage.ImageReader()
reader["fileName"].setValue( testSequence.fileName )
reader["missingFrameMode"].setValue( GafferImage.ImageReader.MissingFrameMode.Hold )
oiio = GafferImage.OpenImageIOReader()
oiio["fileName"].setValue( testSequence.fileName )
oiio["missingFrameMode"].setValue( GafferImage.ImageReader.MissingFrameMode.Hold )
context = Gaffer.Context()
# make sure the tile we're comparing isn't black
# so we can tell if BlackOutside is working.
blackTile = IECore.FloatVectorData( [ 0 ] * GafferImage.ImagePlug.tileSize() * GafferImage.ImagePlug.tileSize() )
with context :
for i in range( 1, 11 ) :
context.setFrame( i )
self.assertNotEqual( reader["out"].channelData( "R", imath.V2i( 0 ) ), blackTile )
def assertBlack() :
# format and data window still match
self.assertEqual( reader["out"]["format"].getValue(), oiio["out"]["format"].getValue() )
self.assertEqual( reader["out"]["dataWindow"].getValue(), oiio["out"]["dataWindow"].getValue() )
self.assertNotEqual( GafferImage.ImageAlgo.image( reader["out"] ), GafferImage.ImageAlgo.image( oiio["out"] ) )
# the metadata and channel names are at the defaults
self.assertEqual( reader["out"]["metadata"].getValue(), reader["out"]["metadata"].defaultValue() )
self.assertEqual( reader["out"]["channelNames"].getValue(), reader["out"]["channelNames"].defaultValue() )
# channel data is black
self.assertEqual( reader["out"].channelData( "R", imath.V2i( 0 ) ), blackTile )
def assertMatch() :
self.assertEqual( reader["out"]["format"].getValue(), oiio["out"]["format"].getValue() )
self.assertEqual( reader["out"]["dataWindow"].getValue(), oiio["out"]["dataWindow"].getValue() )
self.assertEqual( reader["out"]["metadata"].getValue(), oiio["out"]["metadata"].getValue() )
self.assertEqual( reader["out"]["channelNames"].getValue(), oiio["out"]["channelNames"].getValue() )
self.assertEqual( reader["out"].channelData( "R", imath.V2i( 0 ) ), oiio["out"].channelData( "R", imath.V2i( 0 ) ) )
self.assertImagesEqual( reader["out"], oiio["out"] )
def assertHold( holdFrame ) :
context = Gaffer.Context()
context.setFrame( holdFrame )
with context :
holdImage = GafferImage.ImageAlgo.image( reader["out"] )
holdFormat = reader["out"]["format"].getValue()
holdDataWindow = reader["out"]["dataWindow"].getValue()
holdMetadata = reader["out"]["metadata"].getValue()
holdChannelNames = reader["out"]["channelNames"].getValue()
holdTile = reader["out"].channelData( "R", imath.V2i( 0 ) )
self.assertEqual( reader["out"]["format"].getValue(), holdFormat )
self.assertEqual( reader["out"]["dataWindow"].getValue(), holdDataWindow )
self.assertEqual( reader["out"]["metadata"].getValue(), holdMetadata )
self.assertEqual( reader["out"]["channelNames"].getValue(), holdChannelNames )
self.assertEqual( reader["out"].channelData( "R", imath.V2i( 0 ) ), holdTile )
self.assertEqual( GafferImage.ImageAlgo.image( reader["out"] ), holdImage )
reader["start"]["frame"].setValue( 4 )
reader["end"]["frame"].setValue( 7 )
# frame 0 errors, match from 1-10
reader["start"]["mode"].setValue( GafferImage.ImageReader.FrameMaskMode.None_ )
reader["end"]["mode"].setValue( GafferImage.ImageReader.FrameMaskMode.None_ )
with context :
for i in range( 0, 11 ) :
context.setFrame( i )
assertMatch()
# black from 0-3, match from 4-10
reader["start"]["mode"].setValue( GafferImage.ImageReader.FrameMaskMode.BlackOutside )
with context :
for i in range( 0, 4 ) :
context.setFrame( i )
assertBlack()
for i in range( 4, 11 ) :
context.setFrame( i )
assertMatch()
# black from 0-3, match from 4-7, black from 8-10
reader["end"]["mode"].setValue( GafferImage.ImageReader.FrameMaskMode.BlackOutside )
with context :
for i in range( 0, 4 ) :
context.setFrame( i )
assertBlack()
for i in range( 4, 8 ) :
context.setFrame( i )
assertMatch()
for i in range( 8, 11 ) :
context.setFrame( i )
assertBlack()
# hold frame 4 from 0-3, match from 4-7, black from 8-10
reader["start"]["mode"].setValue( GafferImage.ImageReader.FrameMaskMode.ClampToFrame )
with context :
for i in range( 0, 4 ) :
context.setFrame( i )
assertHold( 4 )
for i in range( 4, 8 ) :
context.setFrame( i )
assertMatch()
for i in range( 8, 11 ) :
context.setFrame( i )
assertBlack()
# hold frame 4 from 0-3, match from 4-7, hold frame 7 from 8-10
reader["end"]["mode"].setValue( GafferImage.ImageReader.FrameMaskMode.ClampToFrame )
with context :
for i in range( 0, 4 ) :
context.setFrame( i )
assertHold( 4 )
for i in range( 4, 8 ) :
context.setFrame( i )
assertMatch()
for i in range( 8, 11 ) :
context.setFrame( i )
assertHold( 7 )
def testDefaultColorSpaceFunctionArguments( self ) :
# Make a network to write and read an image
# in various formats.
c = GafferImage.Constant()
c["format"].setValue( GafferImage.Format( 64, 64 ) )
w = GafferImage.ImageWriter()
w["in"].setInput( c["out"] )
r = GafferImage.ImageReader()
r["fileName"].setInput( w["fileName"] )
# Register a custom colorspace function that
# just captures its arguments.
capturedArguments = {}
def f( fileName, fileFormat, dataType, metadata ) :
capturedArguments.update(
{
"fileName" : fileName,
"fileFormat" : fileFormat,
"dataType" : dataType,
"metadata" : metadata,
}
)
return "linear"
GafferImage.ImageReader.setDefaultColorSpaceFunction( f )
# Verify that the correct arguments are passed for
# a variety of fileNames and dataTypes.
for ext, fileFormat, dataType in [
( "exr", "openexr", "half" ),
( "dpx", "dpx", "uint12" ),
( "TIFF", "tiff", "float" ),
( "tif", "tiff", "uint32" ),
] :
w["fileName"].setValue( "{0}/{1}.{2}".format( self.temporaryDirectory(), dataType, ext ) )
w[fileFormat]["dataType"].setValue( dataType )
w.execute()
capturedArguments.clear()
r["out"].channelData( "R", imath.V2i( 0 ) ) # Triggers call to color space function
self.assertEqual( len( capturedArguments ), 4 )
self.assertEqual( capturedArguments["fileName"], w["fileName"].getValue() )
self.assertEqual( capturedArguments["fileFormat"], fileFormat )
self.assertEqual( capturedArguments["dataType"], dataType )
self.assertEqual( capturedArguments["metadata"], r["out"]["metadata"].getValue() )
def testDisabling( self ) :
reader = GafferImage.ImageReader()
reader["fileName"].setValue( self.fileName )
reader["enabled"].setValue( False )
constant = GafferImage.Constant()
constant["enabled"].setValue( False )
self.assertImagesEqual( reader["out"], constant["out"] )
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
1679659 | <gh_stars>0
import pyperclip
import unittest # Importing the unittest module
from socialAccounts import SocialAccounts
class TestSocialAccounts(unittest.TestCase):
'''
Test class that we define and test methods to use in the SocialAccount class
'''
def tearDown(self):
'''
this is to ensure each time the app is executed the list is clean
'''
SocialAccounts.social_accounts_list=[]
def setUp(self):
'''
this setup method is run before each test case is executed
'''
self.new_social_account= SocialAccounts("facebook", "phoebe", "facebookPassword", "4") #creating the social account object
def test_init(self):
'''
testing to ensure the social account object is initialized correctly
'''
self.assertEqual(self.new_social_account.social_account,"facebook")
self.assertEqual(self.new_social_account.social_account_username,"phoebe")
self.assertEqual(self.new_social_account.social_account_password,"<PASSWORD>")
self.assertEqual(self.new_social_account.password_length,"4")
def test_save_social_account(self):
'''
test case to see if the social account details are being saved to the list
'''
self.new_social_account.save_social_account() #saving the social account
self.assertEqual(len(SocialAccounts.social_accounts_list),1)
def test_save_multiple_social_account(self):
'''
test to check if we can save multiple social account details
'''
self.new_social_account.save_social_account()
test_social_account = SocialAccounts("video","phoebe", "pass<PASSWORD>", "4") #new account to save
test_social_account.save_social_account()
self.assertEqual(len(SocialAccounts.social_accounts_list),2)
def test_delete_social_account(self):
'''
this is to ensure we can remove a social account from our list
'''
self.new_social_account.save_social_account()
test_social_account = SocialAccounts("video","phoebe", "passvideo", "4") #new account to save
test_social_account.save_social_account()
self.new_social_account.delete_social_account()
self.assertEqual(len(SocialAccounts.social_accounts_list),1)
def test_find_account_by_name(self):
'''
this is to enable users search their credentials by social account name e.g twitter and display the logins
'''import pyperclip
import unittest # Importing the unittest module
from socialAccounts import SocialAccounts
class TestSocialAccounts(unittest.TestCase):
'''
Test class that we define and test methods to use in the SocialAccount class
'''
def tearDown(self):
'''
this is to ensure each time the app is executed the list is clean
'''
SocialAccounts.social_accounts_list=[]
def setUp(self):
'''
this setup method is run before each test case is executed
'''
self.new_social_account= SocialAccounts("facebook", "phoebe", "facebookPassword", "4") #creating the social account object
def test_init(self):
'''
testing to ensure the social account object is initialized correctly
'''
self.assertEqual(self.new_social_account.social_account,"facebook")
self.assertEqual(self.new_social_account.social_account_username,"phoebe")
self.assertEqual(self.new_social_account.social_account_password,"<PASSWORD>")
self.assertEqual(self.new_social_account.password_length,"4")
def test_save_social_account(self):
'''
test case to see if the social account details are being saved to the list
'''
self.new_social_account.save_social_account() #saving the social account
self.assertEqual(len(SocialAccounts.social_accounts_list),1)
def test_save_multiple_social_account(self):
'''
test to check if we can save multiple social account details
'''
self.new_social_account.save_social_account()
test_social_account = SocialAccounts("video","phoebe", "passvideo", "4") #new account to save
test_social_account.save_social_account()
self.assertEqual(len(SocialAccounts.social_accounts_list),2)
def test_delete_social_account(self):
'''
this is to ensure we can remove a social account from our list
'''
self.new_social_account.save_social_account()
test_social_account = SocialAccounts("video","phoebe", "<PASSWORD>", "4") #new account to save
test_social_account.save_social_account()
self.new_social_account.delete_social_account()
self.assertEqual(len(SocialAccounts.social_accounts_list),1)
def test_find_account_by_name(self):
'''
this is to enable users search their credentials by social account name e.g twitter and display the logins
'''
self.new_social_account.save_social_account()
test_social_account = SocialAccounts("video","phoebe", "<PASSWORD>", "4") #new account to save
test_social_account.save_social_account()
found_social_account= SocialAccounts.find_account_by_name('video')
self.assertEqual(found_social_account.social_account_username, test_social_account.social_account_username)
def test_social_account_exists(self):
'''
we want to return a boolean if we cannot find a contact
'''
self.new_social_account.save_social_account()
test_social_account = SocialAccounts("video","phoebe", "<PASSWORD>", "4") #new account to save
test_social_account.save_social_account()
account_exists= SocialAccounts.social_account_exists("video")
self.assertTrue(account_exists)
def test_display_all_social_accounts(self):
'''
method to show all the saved social accounts
'''
self.assertEqual(SocialAccounts.display_social_accounts(), SocialAccounts.social_accounts_list)
# def test_copy_username(self):
# '''
# Test to confirm we are copying the username from a found account
# '''
# self.new_social_account.save_social_account()
# SocialAccounts.copy_username("facebook")
#
# self.assertEqual(self.new_social_account.social_account_username,pyperclip.paste())
if __name__=='__main__':
unittest.main()
self.new_social_account.save_social_account()
test_social_account = SocialAccounts("video","phoebe", "<PASSWORD>", "4") #new account to save
test_social_account.save_social_account()
found_social_account= SocialAccounts.find_account_by_name('video')
self.assertEqual(found_social_account.social_account_username, test_social_account.social_account_username)
def test_social_account_exists(self):
'''
we want to return a boolean if we cannot find a contact
'''
self.new_social_account.save_social_account()
test_social_account = SocialAccounts("video","phoebe", "<PASSWORD>", "4") #new account to save
test_social_account.save_social_account()
account_exists= SocialAccounts.social_account_exists("video")
self.assertTrue(account_exists)
def test_display_all_social_accounts(self):
'''
method to show all the saved social accounts
'''
self.assertEqual(SocialAccounts.display_social_accounts(), SocialAccounts.social_accounts_list)
# def test_copy_username(self):
# '''
# Test to confirm we are copying the username from a found account
# '''
# self.new_social_account.save_social_account()
# SocialAccounts.copy_username("facebook")
#
# self.assertEqual(self.new_social_account.social_account_username,pyperclip.paste())
if __name__=='__main__':
unittest.main()
| StarcoderdataPython |
1721267 | # GENERATED BY KOMAND SDK - DO NOT EDIT
from .execute_script.action import ExecuteScript
from .powershell_string.action import PowershellString
| StarcoderdataPython |
3354983 | <filename>sdk/python/pulumi_google_native/bigquerydatatransfer/v1/outputs.py
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'EmailPreferencesResponse',
'ScheduleOptionsResponse',
'UserInfoResponse',
]
@pulumi.output_type
class EmailPreferencesResponse(dict):
"""
Represents preferences for sending email notifications for transfer run events.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "enableFailureEmail":
suggest = "enable_failure_email"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in EmailPreferencesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
EmailPreferencesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
EmailPreferencesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
enable_failure_email: bool):
"""
Represents preferences for sending email notifications for transfer run events.
:param bool enable_failure_email: If true, email notifications will be sent on transfer run failures.
"""
pulumi.set(__self__, "enable_failure_email", enable_failure_email)
@property
@pulumi.getter(name="enableFailureEmail")
def enable_failure_email(self) -> bool:
"""
If true, email notifications will be sent on transfer run failures.
"""
return pulumi.get(self, "enable_failure_email")
@pulumi.output_type
class ScheduleOptionsResponse(dict):
"""
Options customizing the data transfer schedule.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "disableAutoScheduling":
suggest = "disable_auto_scheduling"
elif key == "endTime":
suggest = "end_time"
elif key == "startTime":
suggest = "start_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ScheduleOptionsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ScheduleOptionsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ScheduleOptionsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
disable_auto_scheduling: bool,
end_time: str,
start_time: str):
"""
Options customizing the data transfer schedule.
:param bool disable_auto_scheduling: If true, automatic scheduling of data transfer runs for this configuration will be disabled. The runs can be started on ad-hoc basis using StartManualTransferRuns API. When automatic scheduling is disabled, the TransferConfig.schedule field will be ignored.
:param str end_time: Defines time to stop scheduling transfer runs. A transfer run cannot be scheduled at or after the end time. The end time can be changed at any moment. The time when a data transfer can be trigerred manually is not limited by this option.
:param str start_time: Specifies time to start scheduling transfer runs. The first run will be scheduled at or after the start time according to a recurrence pattern defined in the schedule string. The start time can be changed at any moment. The time when a data transfer can be trigerred manually is not limited by this option.
"""
pulumi.set(__self__, "disable_auto_scheduling", disable_auto_scheduling)
pulumi.set(__self__, "end_time", end_time)
pulumi.set(__self__, "start_time", start_time)
@property
@pulumi.getter(name="disableAutoScheduling")
def disable_auto_scheduling(self) -> bool:
"""
If true, automatic scheduling of data transfer runs for this configuration will be disabled. The runs can be started on ad-hoc basis using StartManualTransferRuns API. When automatic scheduling is disabled, the TransferConfig.schedule field will be ignored.
"""
return pulumi.get(self, "disable_auto_scheduling")
@property
@pulumi.getter(name="endTime")
def end_time(self) -> str:
"""
Defines time to stop scheduling transfer runs. A transfer run cannot be scheduled at or after the end time. The end time can be changed at any moment. The time when a data transfer can be trigerred manually is not limited by this option.
"""
return pulumi.get(self, "end_time")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> str:
"""
Specifies time to start scheduling transfer runs. The first run will be scheduled at or after the start time according to a recurrence pattern defined in the schedule string. The start time can be changed at any moment. The time when a data transfer can be trigerred manually is not limited by this option.
"""
return pulumi.get(self, "start_time")
@pulumi.output_type
class UserInfoResponse(dict):
"""
Information about a user.
"""
def __init__(__self__, *,
email: str):
"""
Information about a user.
:param str email: E-mail address of the user.
"""
pulumi.set(__self__, "email", email)
@property
@pulumi.getter
def email(self) -> str:
"""
E-mail address of the user.
"""
return pulumi.get(self, "email")
| StarcoderdataPython |
1647373 | <reponame>unistra/schedulesy<filename>schedulesy/apps/ade_api/migrations/0007_resource_parent.py
# Generated by Django 2.1.12 on 2019-09-26 15:32
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ade_api', '0006_auto_20190926_1502'),
]
operations = [
migrations.AddField(
model_name='resource',
name='parent',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='ade_api.Resource'),
),
]
| StarcoderdataPython |
3265548 | <reponame>shersonb/codecfactory<filename>codecfactory/exc.py
class DecodeError(BaseException):
def __init__(self, codec, message, offset=None, exc=None):
self.codec = codec
self.offset = offset
self.exc = exc
super(DecodeError, self).__init__(message)
class NoMatch(DecodeError):
"""
Used by a decoder only if it immediately decides the beginning of data is not what the decoder expects.
Should NOT be used after a decoder has already started decoding any data structure with any sort of complexity.
"""
def __init__(self, codec):
super(NoMatch, self).__init__(codec, "No Match.")
class UnexpectedEndOfData(DecodeError):
def __init__(self, codec, message="Unexpected end of data."):
super(UnexpectedEndOfData, self).__init__(codec, message)
class ExcessData(DecodeError):
def __init__(self, codec, offset):
super(ExcessData, self).__init__(codec, "Data continues past expected end.", offset)
class EncodeError(BaseException):
def __init__(self, codec, obj, message):
self.codec = codec
self.obj = obj
super(EncodeError, self).__init__(message)
class EncodeMatchError(EncodeError):
def __init__(self, codec, obj, message):
super(EncodeMatchError, self).__init__(codec, obj, message)
| StarcoderdataPython |
1708993 | <reponame>gardar/ahvl<gh_stars>1-10
#
# import modules
#
from ahvl.options.base import OptionsBase
#
# OptionsGenerateSalt
#
class OptionsGenerateSalt(OptionsBase):
# set option prefix
def get_prefix(self):
# return option prefix
return "ahvl_generate_salt"
# set path
# useable variables:
# - {find}
# - {hostname}
def get_path(self):
return None
# set default options
def get_defaults(self):
# set default option values - dict
return {
'salt_chars' : 'itoa64', # salt charset
}
# calculate any remaining options
def get_appended(self):
# set shorthand
o = self.options
# return list of overide options or calculated options
return {}
# set required options
def get_required(self):
# return required options - list
return ['salt_chars',
]
def validate(self):
# set shorthand
o = self.options
#
# check allowed salt chars
#
allowed = ['itoa64', 'alnum']
if o['salt_chars'] not in allowed:
msg.fail("option [salt_chars] invalid; [{}] given, but expected one of {}".format(o['salt_chars'], allowed))
| StarcoderdataPython |
1777572 | <reponame>zfang-slim/PysitForPython3<filename>pysit/solvers/constant_density_acoustic/time/scalar/setup.py
from distutils.core import setup, Extension
import numpy
from Cython.Distutils import build_ext
import os
import os.path
#
os.environ["CC"] = "gcc-8"
os.environ["CXX"] = "g++-8"
setup(
cmdclass={'build_ext': build_ext},
ext_modules=[Extension("_constant_density_acoustic_time_scalar_cpp",
library_dirs=['./', '/usr/local/Cellar/gcc/8.2.0/lib/gcc/8'], #lib for MacOS lib64 for Linux , '/usr/lib'
language="c++",
sources=["cython_wrapper.pyx"],
include_dirs=[numpy.get_include()],
extra_compile_args=["-O3","-fopenmp","-ffast-math"],
library=["gomp"])]
)
| StarcoderdataPython |
3314881 | <gh_stars>0
from django_tables2 import RequestConfig
from django.shortcuts import render
from django.forms import modelformset_factory
from django.views.decorators.csrf import csrf_protect
from django.contrib.auth.decorators import login_required
from .tables import CashUpReportTable
from .commons import *
from .models import Invoice, PaymentType, CashUpReport, CashUpReportPaymentType, Branch, Payment, PaymentInvoice, Terminal
from django.contrib.auth.models import User
from django.db import models
from django.db.models import Sum
from django.http import HttpResponseRedirect, HttpResponse
from reportlab.pdfgen import canvas
from reportlab.lib.pagesizes import letter, A4
from reportlab.lib import colors
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
import os
import reportlab
import ctypes
from io import BytesIO
from decimal import Decimal
from textwrap import wrap
from django.utils import timezone
from num2words import num2words
CONST_branchid = 'branchid'
CONST_terminalid = 'terminalid'
CONST_font = 'Helvetica'
CONST_fontbold = CONST_font + '-Bold'
#method to retrieve Courier statementofacc list
@login_required
def cashuplist(request):
loggedusers = userselection(request)
branchselectlist = branchselection(request)
terminallist = terminalselection(request)
menubar = navbar(request)
branchid = request.session.get(CONST_branchid)
loguser = User.objects.get(id=request.session.get('userid'))
terminalid = request.session.get(CONST_terminalid)
if branchid == '-1':
cashupreport_list = CashUpReport.objects.all()
else:
if terminalid:
if terminalid == '-1':
cashupreport_list = CashUpReport.objects.filter(branch__id=branchid)
else:
cashupreport_list = CashUpReport.objects.filter(branch__id=branchid, terminal__id = terminalid)
else:
cashupreport_list = CashUpReport.objects.filter(branch__id=branchid)
final_CashUpReport_table = CashUpReportTable(cashupreport_list)
RequestConfig(request, paginate={'per_page': 25}).configure(final_CashUpReport_table)
context = {
'cashupreport': final_CashUpReport_table,
'nav_bar' : sorted(menubar.items()),
'branchselection': branchselectlist,
'terminalselection': terminallist,
'loggedusers' : loggedusers,
'title': "Cash up report",
'isedit' : True,
'issuperuser' : loguser.is_superuser,
'isall': branchid == '-1',
'statusmsg' : request.GET.get('msg'),
'header': "Cash up report",
}
return render(request, 'cashup.html', context)
@login_required
def viewcashupreport(request):
branchid = request.session.get(CONST_branchid)
selectedbranch = Branch.objects.get(id=branchid)
loguser = User.objects.get(id=request.session.get('userid'))
terminalid = request.session.get(CONST_terminalid)
terminal = Terminal.objects.filter(id=terminalid).first()
try:
curid = request.GET.get('curid')
cureport = CashUpReport.objects.get(id=curid)
except:
cureport = None;
if cureport:
pass
elif request.method == "POST":
latestcashupreport = CashUpReport.objects.filter(branch__id=branchid, terminal__id = terminalid).order_by('-createtimestamp').first()
if latestcashupreport:
invoicelist = Invoice.objects.filter(branch__id = branchid, createtimestamp__gte = latestcashupreport.createtimestamp, invoicetype__name="Cash")
paymentlist = Payment.objects.filter(customer__branch__id = branchid, terminal__id = terminalid, createtimestamp__gte = latestcashupreport.createtimestamp)
paymentinvoicelist = PaymentInvoice.objects.filter(payment__in=paymentlist)
sessionstart = latestcashupreport.createtimestamp
else:
invoicelist = Invoice.objects.filter(branch__id = branchid, terminal__id = terminalid, invoicetype__name="Cash")
paymentlist = Payment.objects.filter(customer__branch__id = branchid, terminal__id = terminalid)
paymentinvoicelist = PaymentInvoice.objects.filter(payment__in=paymentlist)
if invoicelist:
sessionstart = invoicelist.order_by('createtimestamp').first().createtimestamp
else:
paymentinvoice = paymentinvoicelist.first()
sessionstart = paymentinvoice.invoice.createtimestamp
if invoicelist or paymentinvoicelist:
if invoicelist:
earliestinvoice = invoicelist.order_by('invoiceno').first().invoiceno
else:
firstpayment = paymentinvoicelist.order_by('invoice__invoiceno').first()
earliestinvoice = firstpayment.invoice.invoiceno
try:
epaymentinvoiceid = paymentinvoicelist.order_by('invoice').first().invoice.invoiceno
if epaymentinvoiceid < earliestinvoice:
earliestinvoice = epaymentinvoiceid
except:
pass
if invoicelist:
latestinvoice = invoicelist.order_by('-invoiceno').first().invoiceno
else:
lastpayment = paymentinvoicelist.order_by('-invoice__invoiceno').first()
latestinvoice = firstpayment.invoice.invoiceno
try:
lpaymentinvoiceid = paymentinvoicelist.order_by('-invoice').first().invoice.invoiceno
if lpaymentinvoiceid > latestinvoice:
latestinvoice = lpaymentinvoiceid
except:
pass
totalpaymentinvoice = invoicelist.aggregate(Sum('total')).get('total__sum', 0.00)
totalpaymentpayment = paymentlist.aggregate(Sum('total')).get('total__sum', 0.00)
totalamt = 0;
if totalpaymentinvoice:
totalamt = totalamt + totalpaymentinvoice
if totalpaymentpayment:
totalamt = totalamt + totalpaymentpayment
if terminal and terminal.float:
totalamt = totalamt + terminal.float
cureport = CashUpReport(branch=selectedbranch,
terminal = terminal,
created_by = loguser,
sessiontimestamp = sessionstart, createtimestamp = timezone.now(),
invoicenofrom=earliestinvoice, invoicenoto=latestinvoice,
total = totalamt)
cureport.id = str(branchid) +'_' + cureport.createtimestamp.strftime("%d/%m/%Y %H:%M%p")
cureport.save()
paymenttypes = PaymentType.objects.all()
totalfrominvoice = totalamt - terminal.float
for paymenttype in paymenttypes:
invoicepaymenttype = invoicelist.filter(payment_type=paymenttype)
paymentpaymenttype = paymentlist.filter(payment_paymenttype=paymenttype)
if invoicepaymenttype or paymentpaymenttype:
totalpayment = 0
if invoicepaymenttype:
totalpayment = totalpayment + invoicepaymenttype.aggregate(Sum('total')).get('total__sum', 0.00)
if paymentpaymenttype:
totalpayment = totalpayment + paymentpaymenttype.aggregate(Sum('total')).get('total__sum', 0.00)
totalcount = invoicepaymenttype.count() + paymentpaymenttype.count()
percentagept = (float(totalpayment) / float(totalfrominvoice)) * 100
cashupreportpt = CashUpReportPaymentType(cashupreport=cureport,
payment_type=paymenttype,
total = totalpayment,
percentage = percentagept,
count = totalcount)
cashupreportpt.id = cureport.id + '_' + paymenttype.name
cashupreportpt.save()
else:
cureport = CashUpReport(branch=selectedbranch, terminal = terminal, created_by = loguser,
sessiontimestamp = timezone.now(), createtimestamp = timezone.now(),
total = terminal.float)
cureport.id = str(branchid) +'_' + cureport.createtimestamp.strftime("%d/%m/%Y %H:%M%p")
cureport.save()
cur_pdf = cashup_pdf(request, cureport)
return HttpResponse(cur_pdf, content_type='application/pdf')
def cashup_pdf(request, cashupreport):
response = HttpResponse(content_type='application/pdf')
filename = 'CashUp_' + cashupreport.sessiontimestamp.strftime('%Y-%m-%d') + '_' + cashupreport.createtimestamp.strftime('%Y-%m-%d')
response['Content-Disposition'] = 'attachment; filename="'+filename+'.pdf"'
cashupreportpaymenttype = CashUpReportPaymentType.objects.filter(cashupreport=cashupreport).order_by('payment_type__legend')
margin = 25;
totalwidth = 590;
totalheight = 820;
center = totalwidth / 2.0
buffer = BytesIO()
p = canvas.Canvas(buffer, pagesize=A4)
p.setFont(CONST_fontbold, 14)
# header
branch = cashupreport.branch
topmargin = 20;
headerstring = branch.owner.upper()
linecount = 12;
p.drawCentredString(center, totalheight - topmargin, headerstring)
p.setFont(CONST_font, 8)
datestring = cashupreport.createtimestamp.strftime('%d/%m/%Y')
datewidth = p.stringWidth(datestring, CONST_font, 8)
p.drawString(totalwidth-margin-datewidth, totalheight - topmargin, datestring)
p.setLineWidth(1)
lineheight = totalheight - topmargin- (linecount * 0.5)
p.line(margin, lineheight, totalwidth-margin, lineheight)
if branch.hasgst and branch.gstno:
line2string = 'B/N: ' + branch.registrationno + ' GST No:' + branch.gstno + ' ' + branch.address
else:
line2string = branch.address
p.drawCentredString(center, totalheight - topmargin - (linecount * 1.2), line2string)
branchcontact = '-'
branchfax = '-'
if branch.contact:
branchcontact = branch.contact
if branch.fax:
branchfax = branch.fax
contactinfo = "Tel: " + branchcontact + " Fax: " + branchfax
p.drawCentredString(center, totalheight - topmargin- (linecount * 2), contactinfo)
p.setFont(CONST_fontbold, 14)
p.drawCentredString(center, totalheight - topmargin- (linecount * 4) - topmargin, "Cash Up Report")
p.setFont(CONST_fontbold, 9)
p.drawString(margin, totalheight - topmargin- (linecount * 9), "Drawer")
p.drawString(margin, totalheight - topmargin- (linecount * 10), "Transactions")
p.drawString(margin, totalheight - topmargin- (linecount * 11), "Session Opened")
p.drawString(margin, totalheight - topmargin- (linecount * 12), "Report Run")
terminal = cashupreport.terminal
p.setFont(CONST_font, 9)
p.drawString(margin+ 100, totalheight - topmargin- (linecount * 9), terminal.name)
invoicerange = ''
if cashupreport.invoicenofrom and cashupreport.invoicenoto:
invoicefrom = cashupreport.invoicenofrom.split(branch.branch_code)[-1]
invoiceto = cashupreport.invoicenoto.split(branch.branch_code)[-1]
invoicerange = branch.branch_code + ' ' + str(int(invoicefrom)) + ' - ' + str(int(invoiceto))
p.drawString(margin+100, totalheight - topmargin- (linecount * 10), invoicerange)
p.drawString(margin+100, totalheight - topmargin- (linecount * 11), cashupreport.sessiontimestamp.strftime('%d/%m/%Y @%H:%M:%S%p'))
p.drawString(margin+100, totalheight - topmargin- (linecount * 12), cashupreport.createtimestamp.strftime('%d/%m/%Y @%H:%M:%S%p'))
p.setLineWidth(1)
linecount = 15
boxheight = linecount * (cashupreportpaymenttype.count() + 2 )
p.rect(margin, totalheight - topmargin- (linecount * 12) - boxheight, totalwidth-(margin*2), boxheight, stroke=True, fill=False )
p.line(margin, totalheight - topmargin- linecount * 13, totalwidth-margin, totalheight - topmargin- linecount * 13)
p.setFont(CONST_fontbold, 9)
tableheaderheight = totalheight - topmargin- (linecount * 12.7)
p.drawString(margin*2, tableheaderheight, "Legend")
p.drawString((margin*2)+60, tableheaderheight, "Payment Type")
p.drawString((margin*2)+340, tableheaderheight, "Expected RM")
p.drawString((margin*2)+430, tableheaderheight, "%")
p.drawString((margin*2)+470, tableheaderheight, "Count")
p.setFont(CONST_font, 9)
count = 1;
for cashup in cashupreportpaymenttype:
legend = cashup.payment_type.legend
paymenttype = cashup.payment_type.name
p.drawCentredString((margin*2)+15, tableheaderheight - (linecount * count), legend)
p.drawString((margin*2)+60, tableheaderheight - (linecount * count), paymenttype)
totalstring = "%.2f" % cashup.total
totalstringwidth = p.stringWidth(totalstring, CONST_font, 9)
p.drawString((margin*2)+395-totalstringwidth, tableheaderheight - (linecount * count), totalstring)
percentagestring = "%.2f" % cashup.percentage
p.drawCentredString((margin*2)+435, tableheaderheight - (linecount * count), percentagestring)
p.drawCentredString((margin*2)+485, tableheaderheight - (linecount * count), str(cashup.count))
count += 1;
p.drawCentredString((margin*2)+15, tableheaderheight - (linecount * count), '00')
p.drawString((margin*2)+60, tableheaderheight - (linecount * count), 'Float')
totalstring = "%.2f" % terminal.float
totalstringwidth = p.stringWidth(totalstring, CONST_font, 9)
p.drawString((margin*2)+395-totalstringwidth, tableheaderheight - (linecount * count), totalstring)
p.setFont(CONST_fontbold, 9)
p.drawString((margin*2)+60, tableheaderheight - (linecount * (count+1)), 'Total')
totalstring = "%.2f" % cashupreport.total
totalstringwidth = p.stringWidth(totalstring, CONST_font, 9)
p.drawString((margin*2)+395-totalstringwidth, tableheaderheight - (linecount * (count+1)), totalstring)
p.showPage()
# Get the value of the BytesIO buffer and write it to the response.
pdf = p.getpdfdata()
buffer.close()
return pdf
@login_required
def deletecashupreport(request, dcurid ):
dcurid = request.GET.get('dcurid')
cashupreport = CashUpReport.objects.filter(id = dcurid )
msg = 'Cash up report from %s to %s have been deleted successfully.' % (cashupreport.first().sessiontimestamp.strftime('%d/%m/%Y @%H:%M:%S%p'), cashupreport.first().createtimestamp.strftime('%d/%m/%Y @%H:%M:%S%p') )
if cashupreport:
cashupreport.delete()
return HttpResponseRedirect("/parcelhubPOS/cashupreport/?msg=%s" % msg) | StarcoderdataPython |
3311593 | <filename>projects/dccon-downloader/app.py
import sys
import tkinter as tk
from dcinside.downloader import Downloader
title = '흔한 찐따의 디시콘 다운로더'
icon = 'icon.ico'
if __name__ == '__main__':
root = tk.Tk()
downloader = Downloader(root, title=title, icon=icon)
downloader.mainloop()
sys.exit(0)
| StarcoderdataPython |
131472 | import traceback, sys, string
import win32com.server.util
from win32com.util import IIDToInterfaceName
from win32com.client.util import Enumerator
from win32com.server.exception import COMException
import pythoncom
from framework import trace
from win32com.axdebug import axdebug, gateways, contexts, stackframe, documents, adb
from win32com.axdebug.codecontainer import SourceCodeContainer
from win32com.axdebug.util import _wrap, _wrap_remove
import win32com.client.connect
import win32api, winerror
import os
try:
os.environ["DEBUG_AXDEBUG"]
debuggingTrace = 1 # Should we print "trace" output?
except KeyError:
debuggingTrace = 0
def trace(*args):
"""A function used instead of "print" for debugging output.
"""
if not debuggingTrace:
return
print win32api.GetCurrentThreadId(),
for arg in args:
print arg,
print
# Note that the DebugManager is not a COM gateway class for the
# debugger - but it does create and manage them.
class DebugManager:
_debugger_interfaces_ = [axdebug.IID_IActiveScriptDebug]
def __init__(self, scriptEngine):
self.scriptEngine = scriptEngine
self.adb = adb.Debugger()
self.rootNode = None
self.debugApplication = None
self.ccProvider = documents.CodeContainerProvider()
try:
self.scriptSiteDebug = scriptEngine.GetScriptSite(axdebug.IID_IActiveScriptSiteDebug)
except pythoncom.com_error:
# No debugger interface (ie, dumb host). Do the extra work.
trace("Scripting site has no debugger interface")
self.scriptSiteDebug = None
# Get the debug application object.
self.debugApplication = None
if self.scriptSiteDebug is not None:
# Spec says that we should test for this, and if it fails revert to
# PDM application.
try:
self.debugApplication = self.scriptSiteDebug.GetApplication()
self.rootNode = self.scriptSiteDebug.GetRootApplicationNode()
except pythoncom.com_error:
self.debugApplication = None
if self.debugApplication is None:
# Try to get/create the default one
# NOTE - Dont catch exceptions here - let the parent do it,
# so it knows debug support is available.
pdm=pythoncom.CoCreateInstance(axdebug.CLSID_ProcessDebugManager,None,pythoncom.CLSCTX_ALL, axdebug.IID_IProcessDebugManager)
self.debugApplication = pdm.GetDefaultApplication()
self.rootNode = self.debugApplication.GetRootNode()
assert self.debugApplication is not None, "Need to have a DebugApplication object by now!"
self.activeScriptDebug = None
if self.debugApplication is not None:
self.adb.AttachApp(self.debugApplication, self.ccProvider)
self.codeContainers = {}
self.activeScriptDebug = _wrap(ActiveScriptDebug(self, self.codeContainers), axdebug.IID_IActiveScriptDebug)
def Close(self):
# Called by the language engine when it receives a close request
if self.activeScriptDebug is not None:
_wrap_remove(self.activeScriptDebug)
self.activeScriptDebug = None
self.scriptEngine = None
self.rootNode = None
self.debugApplication = None
self.scriptSiteDebug = None
if self.ccProvider is not None:
self.ccProvider.Close()
self.ccProvider = None
self.codeContainers = {}
if self.adb:
self.adb.CloseApp()
self.adb = None
# print "Close complete"
def IsAnyHost(self):
"Do we have _any_ debugging interfaces installed?"
return self.debugApplication is not None
def IsSimpleHost(self):
return self.scriptSiteDebug is None
def HandleRuntimeError( self ):
"""Called by the engine when a runtime error occurs. If we have a debugger,
we let it know.
The result is a boolean which indicates if the error handler should call
IActiveScriptSite::OnScriptError()
"""
# if self.IsAnyHost:
# site = _wrap(self, axdebug.IID_IActiveScriptSite)
# breakResume, errorResume, fCallOnError = self.debugApplication(activeScriptErrorDebug, site)
# Do something with these!
# else:
trace("HandleRuntimeError")
fCallOnError = 1
return fCallOnError
def _query_interface_for_debugger_(self, iid):
if iid in self._debugger_interfaces_:
return self.activeScriptDebug
trace("DebugManager QI - unknown IID", iid)
return 0
def OnEnterScript(self):
trace("OnEnterScript")
try:
1/0
except:
# Bit of a hack - reach into engine.
baseFrame = sys.exc_info()[2].tb_frame.f_back
self.adb.SetupAXDebugging(baseFrame)
def OnLeaveScript(self):
trace("OnLeaveScript")
self.adb.ResetAXDebugging()
def AddScriptBlock(self, codeBlock):
# If we dont have debugging support, dont bother.
cc = DebugCodeBlockContainer(codeBlock, self.scriptSiteDebug)
if self.IsSimpleHost():
document = documents.DebugDocumentText(cc)
document = _wrap(document, axdebug.IID_IDebugDocument)
provider = documents.DebugDocumentProvider(document)
provider = _wrap(provider, axdebug.IID_IDebugDocumentProvider)
cc.debugDocument = document
newNode = self.debugApplication.CreateApplicationNode()
newNode.SetDocumentProvider(provider)
newNode.Attach(self.rootNode)
else:
newNode = None # Managed by smart host.
self.codeContainers[cc.sourceContext] = cc
self.ccProvider.AddCodeContainer(cc, newNode)
class DebugCodeBlockContainer(SourceCodeContainer):
def __init__(self, codeBlock, site):
self.codeBlock = codeBlock
SourceCodeContainer.__init__(self, codeBlock.codeText, codeBlock.GetFileName(), codeBlock.sourceContextCookie, codeBlock.startLineNumber, site)
def GetName(self, dnt):
if dnt==axdebug.DOCUMENTNAMETYPE_APPNODE:
return self.codeBlock.GetDisplayName()
elif dnt==axdebug.DOCUMENTNAMETYPE_TITLE:
return self.codeBlock.GetDisplayName()
# elif dnt==axdebug.DOCUMENTNAMETYPE_FILE_TAIL:
# elif dnt==axdebug.DOCUMENTNAMETYPE_URL:
else:
raise COMException(scode=winerror.S_FALSE)
class EnumDebugCodeContexts(gateways.EnumDebugCodeContexts):
def _wrap(self, ob):
return ob
class ActiveScriptDebug:
"""The class which implements the IActiveScriptDebug interface for the Active Script engine.
Only ever used by smart hosts.
"""
_public_methods_ = ["GetScriptTextAttributes", "GetScriptletTextAttributes", "EnumCodeContextsOfPosition"]
_com_interfaces_ = [axdebug.IID_IActiveScriptDebug]
def __init__(self, debugMgr, codeContainers):
self.debugMgr = debugMgr
self.scriptSiteDebug = debugMgr.scriptSiteDebug
self.codeContainers = codeContainers
def _Close(self):
self.debugMgr = None
self.scriptSiteDebug = None
self.codeContainers = {}
def _query_interface_(self, iid):
trace("DebuggerQI with", iid)
return _wrap(self.debugMgr.scriptEngine, iid)
def GetScriptTextAttributes(self, code, delim, flags):
container = SourceCodeContainer(code, "<Temp Code Block>")
return container.GetSyntaxColorAttributes()
def GetScriptletTextAttributes(self, code, delim, flags):
trace ("GetScriptletTextAttributes", code, delim, flags)
container = SourceCodeContainer(code, "<Temp Code Block>")
return container.GetSyntaxColorAttributes()
def EnumCodeContextsOfPosition(self, context, charOffset, numChars):
trace("EnumCodeContextsOfPosition", context, charOffset, numChars)
try:
context = self.codeContainers[context].GetCodeContextAtPosition(charOffset)
except KeyError:
raise COMException(scode=winerror.E_UNEXPECTED)
enum = EnumDebugCodeContexts([context])
return _wrap(enum, axdebug.IID_IEnumDebugCodeContexts) | StarcoderdataPython |
3214109 | <filename>manual_exercises/ex4_5/dtft.py
import numpy as np
import matplotlib.pyplot as plt
def H(z):
num = np.polyval([1,0,1],z**(-1))
den = np.polyval([0.5,1],z**(-1))
H = num/den
return H
#Input and Output
omega = np.linspace(-3*np.pi,3*np.pi,int(10e2))
#subplots
plt.plot(omega,abs(H(np.exp(1j*omega))))
plt.title('Filter Frequency Response')
plt.xlabel('$\omega$')
plt.ylabel('$|H(e^{\jmath\omega})| $')
plt.grid()# minor
plt.savefig('dtft.png')
| StarcoderdataPython |
1615245 | <reponame>predatell/satchmo
from django.contrib import admin
from payment.models import CreditCardDetail
from payment.forms import CreditCardDetailAdminForm
class CreditCardDetail_Inline(admin.StackedInline):
model = CreditCardDetail
form = CreditCardDetailAdminForm
extra = 1
| StarcoderdataPython |
120261 | import os
if os.getenv("LEVEL") == "PRODUCTION":
print('RUN PRODUCTION MODE')
from .production import *
else:
print('RUN LOCAL MODE')
from .local import *
from .base import *
| StarcoderdataPython |
199390 | from pkgcheck.checks import header
from .. import misc
class TestEbuildHeaderCheck(misc.ReportTestCase):
check_kls = header.EbuildHeaderCheck
def mk_pkg(self, **kwargs):
return misc.FakePkg("dev-util/diffball-0.5", **kwargs)
def test_empty_file(self):
fake_pkg = self.mk_pkg(lines=())
self.assertNoReport(self.check_kls(None), fake_pkg)
def test_good_copyright(self):
good_copyrights = [
'# Copyright 1999-2019 Gentoo Authors\n',
'# Copyright 2019 Gentoo Authors\n',
'# Copyright 2010-2017 Gentoo Authors\n',
]
for line in good_copyrights:
fake_src = [line, self.check_kls.license_header]
fake_pkg = self.mk_pkg(lines=fake_src)
self.assertNoReport(self.check_kls(None), fake_pkg)
def test_invalid_copyright(self):
bad_copyrights = [
'# Copyright (c) 1999-2019 Gentoo Authors\n',
'# Copyright Gentoo Authors\n',
'# Gentoo Authors\n',
'# Here is entirely random text\n',
'\n',
]
for line in bad_copyrights:
fake_src = [line, self.check_kls.license_header]
fake_pkg = self.mk_pkg(lines=fake_src)
r = self.assertReport(self.check_kls(None), fake_pkg)
assert isinstance(r, header.EbuildInvalidCopyright)
assert line.strip() in str(r)
def test_new_foundation_copyright(self):
"""Foundation copyright on new ebuilds triggers the report."""
bad_copyrights = [
'# Copyright 1999-2019 Gentoo Foundation\n',
'# Copyright 2019 Gentoo Foundation\n',
'# Copyright 3125 Gentoo Foundation\n',
'# Copyright 2010-2021 Gentoo Foundation\n',
]
for line in bad_copyrights:
fake_src = [line, self.check_kls.license_header]
fake_pkg = self.mk_pkg(lines=fake_src)
r = self.assertReport(self.check_kls(None), fake_pkg)
assert isinstance(r, header.EbuildOldGentooCopyright)
assert line.strip() in str(r)
def test_old_foundation_copyright(self):
"""Foundation copyright on old ebuilds does not trigger false positives."""
good_copyrights = [
'# Copyright 1999-2018 Gentoo Foundation\n',
'# Copyright 2016 Gentoo Foundation\n',
'# Copyright 2010-2017 Gentoo Foundation\n',
]
for line in good_copyrights:
fake_src = [line, self.check_kls.license_header]
fake_pkg = self.mk_pkg(lines=fake_src)
self.assertNoReport(self.check_kls(None), fake_pkg)
def test_non_gentoo_authors_copyright_in_gentoo(self):
"""Ebuilds in the gentoo repo must use 'Gentoo Authors'."""
bad_copyrights = [
'# Copyright 1999-2019 <NAME>\n',
'# Copyright 2019 辣鸡汤\n',
]
for line in bad_copyrights:
fake_src = [line, self.check_kls.license_header]
fake_pkg = self.mk_pkg(lines=fake_src)
r = self.assertReport(self.check_kls(None), fake_pkg)
assert isinstance(r, header.EbuildNonGentooAuthorsCopyright)
assert line.strip() in str(r)
def test_license_headers(self):
copyright = '# Copyright 1999-2019 Gentoo Authors\n'
fake_src = [copyright, self.check_kls.license_header]
fake_pkg = self.mk_pkg(lines=fake_src)
self.assertNoReport(self.check_kls(None), fake_pkg)
bad_license_headers = [
'',
'\n',
f'{self.check_kls.license_header} ',
f' {self.check_kls.license_header}',
'# Distributed under the terms of the GNU General Public License v3'
]
for line in bad_license_headers:
fake_src = [copyright, line]
fake_pkg = self.mk_pkg(lines=fake_src)
r = self.assertReport(self.check_kls(None), fake_pkg)
assert isinstance(r, header.EbuildInvalidLicenseHeader)
assert line.strip() in str(r)
| StarcoderdataPython |
61501 | from copy import deepcopy
import torch
from torch import nn
import torch.nn.functional as F
import numpy as np
from apex import amp
from torch.cuda.amp import autocast as autocast
from transformers import BertModel, BertTokenizer
from util import text_processing
from collections import OrderedDict
from . import ops as ops
from .config import cfg
from .lcgn import LCGN, SemanLCGN
from .input_unit import Encoder
from .output_unit import Classifier
from .optimization import *
class SingleHop(nn.Module):
def __init__(self):
super().__init__()
self.proj_q = ops.Linear(cfg.ENC_DIM, cfg.CTX_DIM)
self.inter2att = ops.Linear(cfg.CTX_DIM, 1)
def forward(self, kb, vecQuestions, imagesObjectNum):
proj_q = self.proj_q(vecQuestions)
interactions = F.normalize(kb * proj_q[:, None, :], dim=-1)
raw_att = self.inter2att(interactions).squeeze(-1)# 128 * 49
raw_att = ops.apply_mask1d(raw_att, imagesObjectNum)
att = F.softmax(raw_att, dim=-1)
x_att = torch.bmm(att[:, None, :], kb).squeeze(1)
return x_att
class LCGNnet(nn.Module):
def __init__(self, num_vocab, num_choices):
super().__init__()
if cfg.INIT_WRD_EMB_FROM_FILE:
embeddingsInit = np.load(cfg.WRD_EMB_INIT_FILE) # 2956 * 300
assert embeddingsInit.shape == (num_vocab-1, cfg.WRD_EMB_DIM)
else:
embeddingsInit = np.random.randn(num_vocab-1, cfg.WRD_EMB_DIM)
self.num_vocab = num_vocab # 2957
self.num_choices = num_choices # 1845
self.tokenizer = BertTokenizer.from_pretrained('/home/xdjf/bert_config/bert-base-uncased')
self.model = BertModel.from_pretrained('/home/xdjf/bert_config/bert-base-uncased')
self.name_dict = text_processing.VocabDict(cfg.VOCAB_NAME_FILE)
name_embedding = self.reset_name_embedding()
self.encoder = Encoder(embeddingsInit, name_embedding)
self.lcgn = LCGN()
#self.sema_lcgn = SemanLCGN()
self.single_hop = SingleHop()
self.classifier = Classifier(num_choices)
#self.seman_encoder = ops.Linear(cfg.WRD_EMB_DIM, cfg.CMD_DIM)
def reset_name_embedding(self):
weight = torch.zeros(self.name_dict.num_vocab - 1, 768)
for word in self.name_dict.word_list:
if word == '<unk>':
continue
temp_embedding = self.extract_name_embedding(word)
weight[self.name_dict.word2idx(word) - 1] = temp_embedding
return weight
def extract_name_embedding(self, name):
token_name = self.tokenizer.encode(name, add_special_tokens=False)
input_ids = torch.tensor([token_name])
with torch.no_grad():
_, out = self.model(input_ids)
return out # 1* 768
def forward(self, batch):
#batchSize = len(batch['image_feat_batch'])
questionIndices = batch[0]
questionLengths = batch[1]
semanIndices = batch[2]
semanLengths = batch[3]
answerIndices = batch[4]
nameIndices = batch[5]
nameLengths = batch[6]
images = batch[7]
imagesObjectNum = batch[8]
batchSize = images.size(0)
# LSTM
questionCntxWords, vecQuestions, word_seman, encode_seman, name_embed = self.encoder(
questionIndices, questionLengths, # 128 * 30 * 512 128 * 512
semanIndices, semanLengths,
nameIndices, nameLengths)
encode_seman = encode_seman.permute(1, 0, 2)
#encode_seman = self.seman_encoder(encode_seman)
# semanCnt = semanCnt[:, 0, :]
# LCGN
x_out = self.lcgn(
images=images, q_encoding=vecQuestions,
lstm_outputs=questionCntxWords, word_seman=word_seman, encode_seman=encode_seman, semanIndices=semanIndices, batch_size=batchSize,
q_length=questionLengths, entity_num=imagesObjectNum, name_embed=name_embed, nameLengths=nameLengths)
# x_out_seman = self.sema_lcgn(
# images=images, seman_outputs=semanCnt,
# batch_size=batchSize, entity_num=imagesObjectNum)
# x_out = self.tensor_inter_graph_propagation(x_out, x_out_seman)
# Single-Hop
x_att = self.single_hop(x_out, vecQuestions, imagesObjectNum)
logits = self.classifier(x_att, vecQuestions) # 128 * 1845
predictions, num_correct = self.add_pred_op(logits, answerIndices)
loss = self.add_answer_loss_op(logits, answerIndices)
return {"predictions": predictions,
"batch_size": int(batchSize),
"num_correct": int(num_correct),
"loss": loss,
"accuracy": float(num_correct * 1. / batchSize)}
def tensor_inter_graph_propagation(self, x_out_1, x_out_2):
bsz, imageNum, dModel= x_out_1.size(0), x_out_1.size(1), x_out_1.size(2)
x_sum_1 = torch.sum(x_out_1, dim=1)
x_sum_2 = torch.sum(x_out_2, dim=1)
x_expand_1 = x_sum_1.repeat(1, 2)
x_expand_2 = x_sum_2.repeat(1, 2)
x_sum = torch.cat([x_expand_1, x_expand_2], -1)
x_sum = x_sum.unsqueeze(1)
x_sum = x_sum.repeat(1, imageNum, 1)
x_union = torch.cat([x_out_1, x_out_2], dim=-1)
x_union_expand = x_union.repeat(1, 1, 2)
x_kr = torch.mul(x_union_expand, x_sum)
x_kr = x_kr.view(bsz * imageNum, 4, dModel)
x_kr = x_kr.permute(0, 2, 1)
x_out = self.conv1d(x_kr)
x_out = x_out.squeeze(-1)
x_out = x_out.view(bsz, imageNum, dModel)
return x_out
def add_pred_op(self, logits, answers):
if cfg.MASK_PADUNK_IN_LOGITS:
logits = logits.clone()
logits[..., :2] += -1e30 # mask <pad> and <unk>
preds = torch.argmax(logits, dim=-1).detach() # 128
corrects = (preds == answers)
correctNum = torch.sum(corrects).item()
preds = preds.cpu()#.numpy()
return preds, correctNum
def add_answer_loss_op(self, logits, answers):
if cfg.TRAIN.LOSS_TYPE == "softmax":
loss = F.cross_entropy(logits, answers)
elif cfg.TRAIN.LOSS_TYPE == "sigmoid":
answerDist = F.one_hot(answers, self.num_choices).float() # 128 * 1845
loss = F.binary_cross_entropy_with_logits(
logits, answerDist) * self.num_choices
else:
raise Exception("non-identified loss")
return loss
class LCGNwrapper():
def __init__(self, num_vocab, num_choices, cfg=None, rank=-1, gpu=0):
self.no_decay = ['bias', 'norm']
torch.cuda.set_device(gpu)
self.model = LCGNnet(num_vocab, num_choices).cuda(gpu)
self.trainable_params = [
{
"params": [p for n, p in self.model.named_parameters() if p.requires_grad and not any(nd in n for nd in self.no_decay)],
"weight_decay": cfg.weight_decay,
},
{
"params": [p for n, p in self.model.named_parameters() if p.requires_grad and any(nd in n for nd in self.no_decay)],
"weight_decay": 0.0
}
]
self.optimizer = torch.optim.AdamW(
self.trainable_params, lr=cfg.TRAIN.SOLVER.LR)
#self.optimizer = AdamW(self.trainable_params, lr=cfg.TRAIN.SOLVER.LR, eps=cfg.adam_epsilon)
total_step = int(943000 / cfg.n_gpus // cfg.TRAIN.BATCH_SIZE + 1) * cfg.TRAIN.MAX_EPOCH
self.scheduler = get_cosine_schedule_with_warmup(
self.optimizer, num_warmup_steps=cfg.warmup_steps, num_training_steps=total_step)
if cfg.fp16:
self.scaler = torch.cuda.amp.GradScaler()
#self.model, self.optimizer = amp.initialize(self.model, self.optimizer, opt_level=cfg.fp16_opt_level)
if cfg.n_gpus > 1:
self.model = nn.parallel.DistributedDataParallel(self.model,
device_ids=[gpu], output_device=gpu, find_unused_parameters=True)
self.lr = cfg.TRAIN.SOLVER.LR
self.fp16 = cfg.fp16
self.fp16_opt_level = cfg.fp16_opt_level
if cfg.USE_EMA:
self.ema_param_dict = {
name: p for name, p in self.model.named_parameters()
if p.requires_grad}
self.ema = ops.ExponentialMovingAverage(
self.ema_param_dict, decay=cfg.EMA_DECAY_RATE)
self.using_ema_params = False
def train(self, training=True):
self.model.train(training)
if training:
self.set_params_from_original()
else:
self.set_params_from_ema()
def eval(self):
self.train(False)
def state_dict(self):
# Generate state dict in training mode
current_mode = self.model.training
self.train(True)
assert (not cfg.USE_EMA) or (not self.using_ema_params)
return {
'model': self.model.state_dict(),
'optimizer': self.optimizer.state_dict(),
'ema': self.ema.state_dict() if cfg.USE_EMA else None
}
# restore original mode
self.train(current_mode)
def load_state_dict(self, state_dict):
# Load parameters in training mode
current_mode = self.model.training
self.train(True)
assert (not cfg.USE_EMA) or (not self.using_ema_params)
new_state_dict = OrderedDict()
for k, v in state_dict['model'].items():
name = k[7: ]
new_state_dict[name] = v
self.model.load_state_dict(new_state_dict)
if 'optimizer' in state_dict:
self.optimizer.load_state_dict(state_dict['optimizer'])
else:
print('Optimizer does not exist in checkpoint! '
'Loaded only model parameters.')
if cfg.USE_EMA:
if 'ema' in state_dict and state_dict['ema'] is not None:
self.ema.load_state_dict(state_dict['ema'])
else:
print('cfg.USE_EMA is True, but EMA does not exist in '
'checkpoint! Using model params to initialize EMA.')
self.ema.load_state_dict(
{k: p.data for k, p in self.ema_param_dict.items()})
# restore original mode
self.train(current_mode)
def set_params_from_ema(self):
if (not cfg.USE_EMA) or self.using_ema_params:
return
self.original_state_dict = deepcopy(self.model.state_dict())
self.ema.set_params_from_ema(self.ema_param_dict)
self.using_ema_params = True
def set_params_from_original(self):
if (not cfg.USE_EMA) or (not self.using_ema_params):
return
self.model.load_state_dict(self.original_state_dict)
self.using_ema_params = False
def run_batch(self, batch, train, lr=None):
assert train == self.model.training
assert (not train) or (lr is not None), 'lr must be set for training'
if train:
if lr != self.lr:
for param_group in self.optimizer.param_groups:
param_group['lr'] = lr
self.lr = lr
self.optimizer.zero_grad()
if cfg.fp16:
with autocast():
batch_res = self.model.forward(batch)
else:
batch_res = self.model.forward(batch)
loss = batch_res['loss']
if self.fp16:
self.scaler.scale(loss).backward()
# with amp.scale_loss(loss, self.optimizer) as scaled_loss:
# scaled_loss.backward()
else:
loss.backward()
if cfg.TRAIN.CLIP_GRADIENTS:
if self.fp16:
self.scaler.unscale_(self.optimizer)
nn.utils.clip_grad_norm_(
self.model.parameters(), cfg.TRAIN.GRAD_MAX_NORM)
#torch.nn.utils.clip_grad_norm_(amp.master_params(self.optimizer), cfg.TRAIN.GRAD_MAX_NORM)
else:
nn.utils.clip_grad_norm_(
self.model.parameters(), cfg.TRAIN.GRAD_MAX_NORM)
if cfg.fp16:
self.scaler.step(self.optimizer)
self.scaler.update()
else:
self.optimizer.step()
self.scheduler.step()
batch_res['lr'] = self.scheduler.get_last_lr()[0]
if cfg.USE_EMA:
self.ema.step(self.ema_param_dict)
else:
with torch.no_grad():
batch_res = self.model.forward(batch)
return batch_res
| StarcoderdataPython |
1646543 | from os.path import join, exists, isdir, abspath
from packaging import version
import torch
ROOT_DIR = abspath('.')
DATA_DIR = join(ROOT_DIR, 'dataset/')
MODEL_DIR = join(ROOT_DIR, 'models/')
RESULT_DIR = join(ROOT_DIR, 'result/')
MID_PRODUCT = join(ROOT_DIR, 'mid_product/')
for d in [DATA_DIR, MODEL_DIR, RESULT_DIR, MID_PRODUCT]:
assert exists(d) and isdir(d), d+' does not exist. Please run "mkdir '+d+'" or download the dataset.'
###### for preprocessing only ######
GDRIVE_DIR = '???'
KUNSHAN_1_RAW = join(GDRIVE_DIR, 'Kunshan1')
PARIS_1_RAW = join(GDRIVE_DIR, 'Paris-Le Bourget')
SHENNONGJIA_1_RAW = join(GDRIVE_DIR, 'Shennongjia')
SUZHOU_1_RAW = join(GDRIVE_DIR, 'Suzhoudata1')
SUZHOU_2_RAW = join(GDRIVE_DIR, 'Suzhoudata2')
SUZHOU_3_RAW = join(GDRIVE_DIR, 'Suzhoudata3')
SUZHOU_4_RAW = join(GDRIVE_DIR, 'Suzhoudata4')
SWISS_1_RAW = join(GDRIVE_DIR, 'Swissdata1-Merlischachen')
SWISS_2_RAW = join(GDRIVE_DIR, 'Swissdata2-Renens')
SWISS_3_RAW = join(GDRIVE_DIR, 'Swissdata3-Lausanne')
WEIHAI_1_RAW = join(GDRIVE_DIR, 'Weihai')
WUXI_1_RAW = join(GDRIVE_DIR, 'Wuxi')
ENGLAND_RAW_ROOT = join(DATA_DIR, 'England_raw')
ENGLAND_BIRMINGHAM_RAW = join(ENGLAND_RAW_ROOT, 'Birmingham')
ENGLAND_COVENTRY_RAW = join(ENGLAND_RAW_ROOT, 'Coventry')
ENGLAND_LIVERPOOL_RAW = join(ENGLAND_RAW_ROOT, 'Liverpool')
ENGLAND_PEAK_RAW = join(ENGLAND_RAW_ROOT, 'PEAK')
###### end #####
ENGLAND_DATA = join(DATA_DIR, 'England')
ENGLAND_960x720 = join(DATA_DIR, 'England_960x720')
SWISS_DATA = join(DATA_DIR, 'swiss_data/')
SWISS_1280x720 = join(DATA_DIR, 'swiss_1280x720/')
SWISS_960x720 = join(DATA_DIR, 'swiss_960x720/')
SUZHOU_DATA = join(DATA_DIR, 'suzhou_data/')
SUZHOU_1280x720 = join(DATA_DIR, 'suzhou_1280x720/')
SUZHOU_960x720 = join(DATA_DIR, 'suzhou_960x720/')
FULL_DATA = join(DATA_DIR, 'full_data/')
FULL_RESIZED = join(DATA_DIR, 'full_resized/')
MIX_960x720 = join(DATA_DIR, 'scaled_down_dataset/')
FULL_960x720 = join(DATA_DIR, 'full_960x720/')
FULL_AUG_960x720 = join(DATA_DIR, 'full_aug_960x720/')
ERROR_TOLERANCE = join(DATA_DIR, 'error_tolerance/satellitemap')
FULL_RESIZED_FEATURE = join(MID_PRODUCT, 'features_full_res34_eval.h5')
FULL_RESIZED_FEATURE_NOEVAL = join(MID_PRODUCT, 'features_full_res34_noeval.h5')
FULL_960x720_FEATURE_RES34 = join(MID_PRODUCT, 'features_full_960x720_res34.h5')
FULL_960x720_FEATURE_RES50 = join(MID_PRODUCT, 'features_full_960x720_res50.h5')
_NewResNet = False # Please change this line according to your wish
SQUEEZE_960x720_SHAPE = (44, 59)
if version.parse(torch.__version__) >= version.parse('1.0') and _NewResNet:
RESNET_POOLING = ['adaptive', 'fixed'] [0]
RES34_960x720_SHAPE = (512,)
RES34_1280x720_SHAPE = (512,)
RES50_960x720_SHAPE = (2048,)
else:
RESNET_POOLING = ['adaptive', 'fixed'] [1]
RES34_960x720_SHAPE = (512, 17, 24)
RES34_1280x720_SHAPE = (512, 17, 34)
RES50_960x720_SHAPE = (2048, 17, 24)
| StarcoderdataPython |
1722591 | <filename>reachyAudio/reachyAudio.py
"""The reachyAudio module defines the ReachyAudio class."""
import time
from math import cos, sin, radians
from .reachyAudioPlayerRecorder import ReachyAudioPlayerRecorder
from .reachyAudioTextToSpeech import ReachyAudioTextToSpeech
from .reachyAudioMicArrayFeatures import ReachyAudioMicArrayFeatures
from .reachyAudioAnswering import ReachyAudioAnswering
from .reachyAudioSpeechRecognition import ReachyAudioSpeechRecognition
from .reachyAudioSpeechRecognition import speechRecognitionCallback
class ReachyAudio(ReachyAudioPlayerRecorder,
ReachyAudioTextToSpeech,
ReachyAudioSpeechRecognition,
ReachyAudioMicArrayFeatures,
ReachyAudioAnswering):
"""ReachyAudio class.
This class regroup all the features related to audio and natural
language processing.
"""
def __init__(self):
"""Call the constructor of each submodule."""
ReachyAudioPlayerRecorder.__init__(self)
ReachyAudioTextToSpeech.__init__(self)
ReachyAudioSpeechRecognition.__init__(self)
ReachyAudioMicArrayFeatures.__init__(self)
ReachyAudioAnswering.__init__(self)
def __del__(self):
"""Delete the text to speech engine."""
self.engine.stop()
def conversation(self, reachyObject, alteredVoice=False):
"""Allow Reachy to converse with people.
:param reachyObject: Instance of the Reachy class.
:param alteredVoice: If we want Reachy's voice to sound more
robotic like.
"""
# Allow to store the detected angle as the recognition thread
# takes more time to detect the end of a voice sample than the
# recording thread
stored_angle = -1
# Use the LEDs to make the conversation more interactive
self.pixel_ring.set_brightness(0x12)
self.pixel_ring.set_color_palette(self.COLORS['ORANGE'],
self.COLORS['YELLOW'])
self.pixel_ring.speak()
# Initialize the recognition thread so that we can do both recognition
# and orientation detection
stop_listening = self.recognizer.listen_in_background(
self.microphone,
speechRecognitionCallback)
print("Listening...")
while True:
try:
# Try to detect if someone spoke
said = self.getDetectedSentence()
angle = self.getDetectedAngle()
if angle != -1:
stored_angle = angle
if said != "":
# Reachy heard and recognized a sentence, he will now
# answer to it. We set that the robot is speaking such that
# we don't try to recognize what he will say. We also
# change the LEDs color to show to the interlocutor than
# Reachy is now in the answering state
self.setRobotSpeaking()
self.setRobotSpeakingMic()
self.pixel_ring.set_color_palette(self.COLORS['MAGENTA'],
self.COLORS['CYAN'])
print("Reachy heared a voice at ", stored_angle,
"degrees.")
print("Reachy thinks you said: ", said)
# Move the head toward the interlocutor
theta = radians(stored_angle)
reachyObject.head.compliant = False
reachyObject.head.look_at(2, cos(theta), sin(theta)-0.3,
duration=2, wait=True)
# Answer to the interlocutor
tag, answer = self.answer(said)
self.speak(answer, alteredVoice=alteredVoice)
time.sleep(2)
# End of the conversation depending on the sentence intent
if tag == "goodbye":
break
# Reachy stoped to speak, we reactivate the recording
# thread and the recognition thread. We also change the
# LEDs color to show to the interlocutor than Reachy is
# now in the listening state
self.clearDetectedSentence()
self.clearDetectedAngle()
self.clearRobotSpeakingMic()
self.clearRobotSpeaking()
self.pixel_ring.set_color_palette(self.COLORS['ORANGE'],
self.COLORS['YELLOW'])
print("Listening...")
time.sleep(0.1)
# End of the conversation if keyboard interrupt
except KeyboardInterrupt:
break
# End of the conversation, we stop the recognition thread
stop_listening(wait_for_stop=True)
self.clearDetectedSentence()
self.clearDetectedAngle()
self.clearRobotSpeakingMic()
self.clearRobotSpeaking()
self.pixel_ring.set_color_palette(self.COLORS['ORANGE'],
self.COLORS['YELLOW'])
print("End of conversation !")
| StarcoderdataPython |
3302836 | """Playwell RPA Base
"""
| StarcoderdataPython |
142456 | <reponame>Piero-Palevsky-OH/aldjemy
class Wrapper:
"Wrapper to disable commit in sqla"
def __init__(self, obj):
self.obj = obj
def __getattr__(self, attr):
if attr in ["commit", "rollback"]:
return lambda *args, **kwargs: None
obj = getattr(self.obj, attr)
if attr not in ["cursor", "execute"]:
return obj
if attr == "cursor":
return type(self)(obj)
return self.wrapper(obj)
def wrapper(self, obj):
"Implement if you need to make your customized wrapper"
return obj
def __call__(self, *args, **kwargs):
self.obj = self.obj(*args, **kwargs)
return self
| StarcoderdataPython |
173574 |
from sqlalchemy import Column, String, ForeignKey
from fr.tagc.rainet.core.util.sql.Base import Base
from fr.tagc.rainet.core.util.sql.SQLManager import SQLManager
from fr.tagc.rainet.core.util.exception.NotRequiredInstantiationException import NotRequiredInstantiationException
from fr.tagc.rainet.core.util.exception.RainetException import RainetException
from fr.tagc.rainet.core.data.ProteinCrossReference import ProteinCrossReference
from fr.tagc.rainet.core.data.Protein import Protein
from fr.tagc.rainet.core.data.SynonymGeneSymbol import SynonymGeneSymbol
# #
# This class describes a annotation of protein on Bioplex clusters
#
class ProteinBioplexAnnotation( Base ):
__tablename__ = 'ProteinBioplexAnnotation'
# The Bioplex cluster annotation
bioplex_cluster_id = Column( String, ForeignKey( 'BioplexCluster.bioplexID', onupdate="CASCADE", ondelete="CASCADE"), primary_key=True)
# The annotated protein
protein_id = Column( String, ForeignKey( 'Protein.uniprotAC', onupdate="CASCADE", ondelete="CASCADE"), primary_key=True)
#
# The constructor of the class
#
# @param bioplex_cluster_id : string - The ID of the Bioplex cluster associated to the protein
def __init__(self, bioplex_cluster_id, protein_id):
sql_session = SQLManager.get_instance().get_session()
#=======================================================================
# Search for the Bioplex cluster corresponding to the given Bioplex cluster ID
#=======================================================================
# -- make the query
from fr.tagc.rainet.core.data.BioplexCluster import BioplexCluster
clusters_list = sql_session.query( BioplexCluster).filter( BioplexCluster.bioplexID == bioplex_cluster_id).all()
# --Check if a single cross reference is found. If not, raise an issue
cluster_id = None
if clusters_list != None and len( clusters_list) > 0:
if len( clusters_list) == 1:
cluster_id = clusters_list[0]
else:
raise RainetException( "ProteinBioplexAnnotation.init : Abnormal number of Bioplex clusters found for cluster_id = " + cluster_id + " : " + str( len( clusters_list)))
else:
raise NotRequiredInstantiationException( "ProteinBioplexAnnotation.init : No Bioplex cluster found for cluster id = " + cluster_id)
if cluster_id == None:
raise RainetException( "ProteinBioplexAnnotation.init : returned cross reference is None for " + cluster_id)
#=======================================================================
# Search the protein object. Then build the association between the Protein and the Bioplex cluster
#=======================================================================
# -- make the query
protein_list = sql_session.query( Protein).filter( Protein.uniprotAC == protein_id).all()
# --Check if a single Protein is found. If not, raise an issue
protein = None
if protein_list != None and len( protein_list) > 0:
if len( protein_list) == 1:
protein = protein_list[0]
else:
raise RainetException( "ProteinBioplexAnnotation.init : Abnormal number of Protein found for = " +protein_id + " : " + str( len( protein_list)))
else:
raise NotRequiredInstantiationException( "ProteinBioplexAnnotation.init : No Protein found for uniprotAC = " + protein_id)
# -- Check if the Protein found is not None
if protein == None:
raise RainetException( "ProteinBioplexAnnotation.init : returned Protein is None for UniprotAC" + protein_id)
# -- Build the relation between the Bioplex cluster and the Protein
cluster_id.add_annotated_protein( protein)
sql_session.add( cluster_id)
sql_session.add( protein)
# Raise the Exception to indicate the instance must not be inserted since it is automatically created
raise NotRequiredInstantiationException( "ProteinBioplexAnnotation.init : ProteinBioplexAnnotation objects do not have to be inserted by __init__ since they are created by BioplexCluster to Protein association table.")
##
# Add the object to SQLAlchemy session if it is linked to a protein
def add_to_session(self):
sql_session = SQLManager.get_instance().get_session()
sql_session.add( self)
| StarcoderdataPython |
10996 | import re
textinput = widget_inputs["text1"]
comments = []
def commentizer(new):
if new not in comments:
comments.append(new)
is_correct = False
result = re.match(".*window.*", textinput, flags=re.IGNORECASE)
if result:
is_correct = True
commentizer("You're right, but there's a little more to it than that. Make sure you watch the solution video.")
result = re.match(".*global.*", textinput, flags=re.IGNORECASE)
if result:
is_correct = True
commentizer("Right! It's the global object.")
result = re.match(".*promise.*", textinput, flags=re.IGNORECASE)
if result:
is_correct = False
commentizer("It's not the Promise. Take another look!")
if not is_correct and len(comments) == 0:
commentizer("Not quite. Just log `this` somewhere in the Promise to see what happens.")
grade_result["comment"] = "\n\n".join(comments)
grade_result["correct"] = is_correct | StarcoderdataPython |
154899 | from django import forms
from .models import Member, get_config
from .util import validate_country
class MemberForm(forms.ModelForm):
class Meta:
model = Member
fields = ('fullname', 'country', 'listed')
def clean_country(self):
if self.instance.country_exception:
# No country checking for this member
return self.cleaned_data['country']
validate_country(get_config().country_validator, self.cleaned_data['country'])
return self.cleaned_data['country']
class ProxyVoterForm(forms.Form):
name = forms.CharField(min_length=5, max_length=100, help_text="Name of proxy voter. Leave empty to cancel proxy voting.", required=False)
| StarcoderdataPython |
1632216 | #!/usr/bin/python
#coding = utf-8
import QuantLib as ql
import pandas as pd
def getDigitsFromStr(string:str,withDigitPoint:bool = False):
"""
Extract number from a string. Return a list whose element is strings that is number.
Parameters
----------
string : str
The string that may contain numbers.
withDigitPoint : bool
Use true is the numbers are float. False if all numbers are int.
Returns
-------
strList : list
A list whose elements are string that can be converted into numbers.
"""
import re
if withDigitPoint:
strList = re.findall(r"\d+\.?\d*",string)
else:
strList = re.findall(r"\d+\d*",string)
return strList
def getNumbersFromStr(string:str,withDigitPoint:bool = False):
"""
Extract number from a string. Return a list whose element is number.
Parameters
----------
string : str
The string that may contain numbers.
withDigitPoint : bool
Use true is the numbers are float. False if all numbers are int.
Returns
-------
strList : list
A list whose elements are numbers.
"""
strList = getDigitsFromStr(string,withDigitPoint)
result = [i for i in strList]
return result
def getNthWeekday(n:int,weekdayCode:int,TimeStamp:pd.Timestamp):
"""
This function will return the n-th weekday of the month, which contains the given day.
Parameters
----------
n : int
The n-th weekday you want to get
weekdayCode : int
The weekday code, use int from 1 to 7
TimeStamp : pd.Timestamp
The standing point, calculation will be done based on this day.
Returns
-------
result : pd.Timestamp
"""
import pandas as pd
firstDayAtThatMonth = pd.Timestamp(TimeStamp.year,TimeStamp.month,1)
weekdayCodeBenchMark = firstDayAtThatMonth.dayofweek + 1
if n == 0:
print("Error: N can't be 0!\n")
exit(-1)
elif n<0:
n = n+1
else:
pass
if weekdayCode>=weekdayCodeBenchMark:
days = (n-1)*7 + weekdayCode-weekdayCodeBenchMark
else:
days = n * 7 + weekdayCode - weekdayCodeBenchMark
resultDay = firstDayAtThatMonth + pd.Timedelta(days=days)
return resultDay
def convertFutureCodeToExpirationDate(futureCodeString:str):
"""
Convert future code into expiration date. This function only applies in China.
In China, future code is like T2108.CFE, this function will convert it into expiration
date, which is 2021.08.21 in this case, the third Saturday in August. (Due to the rules
of exchange.)
"""
import pandas as pd
simplifiedStr = getDigitsFromStr(futureCodeString)[0]
tmpExpirationDay = pd.Timestamp(int('20'+simplifiedStr[:2]),int(simplifiedStr[2:]),12)
return getNthWeekday(3,6,tmpExpirationDay)
def getFutureTypeByExpirationDate(baseDateTimestamp:pd.Timestamp,expirationDateTimestamp:pd.Timestamp,futureIndexType:str='Stock_Index'):
"""
This function will classify the future code into contract type, such as 'Expired','Current','Month',
'Season','Half Year', etc, given the date where you stand and the expiration date of future.
This function only applies in China.
"""
baseMonthNum = (expirationDateTimestamp.year - baseDateTimestamp.year)*12 + (expirationDateTimestamp.month - baseDateTimestamp.month)
if futureIndexType=='Stock_Index':
n = 3
weekday = 6
if baseDateTimestamp >= getNthWeekday(n,weekday,baseDateTimestamp):
modifiedBaseMonthNum = baseMonthNum - 1
else:
modifiedBaseMonthNum = baseMonthNum
if modifiedBaseMonthNum < 0:
return 'Expired'
elif modifiedBaseMonthNum <1:
return 'Current'
elif modifiedBaseMonthNum <2:
return 'Month'
elif modifiedBaseMonthNum <3:
return 'Season'
elif modifiedBaseMonthNum <6:
return 'Half Year'
elif modifiedBaseMonthNum <12:
return 'Year'
elif modifiedBaseMonthNum <36:
return 'Three Year'
elif modifiedBaseMonthNum <60:
return 'Five Year'
elif modifiedBaseMonthNum <120:
return 'Ten Year'
else:
return ''
elif futureIndexType == 'Bond_Index':
n = 2
weekday = 6
if baseDateTimestamp >= getNthWeekday(n,weekday,baseDateTimestamp):
modifiedBaseMonthNum = baseMonthNum - 1
else:
modifiedBaseMonthNum = baseMonthNum
if modifiedBaseMonthNum < 0:
return 'Expired'
elif modifiedBaseMonthNum <3:
return 'Three Month'
elif modifiedBaseMonthNum <6:
return 'Six Month'
elif modifiedBaseMonthNum <9:
return 'Nine Month'
else:
return ''
else:
pass
def convertNumberOfDaysToChineseStr(numberOfDays:int):
"""
This function will convert number of days into Chinese string, such as
'一个月','三个月',etc.
"""
try:
if round(numberOfDays/30)==1:
return '一个月'
elif round(numberOfDays/30)<=3:
return '三个月'
elif round(numberOfDays/30)<=7:
return '六个月'
elif round(numberOfDays/30)<=10:
return '九个月'
elif round(numberOfDays/30)<=13:
return '一年'
elif round(numberOfDays/365)<=2:
return '两年'
elif round(numberOfDays/365)<=3:
return '三年'
elif round(numberOfDays/365)<=4:
return '四年'
elif round(numberOfDays/365)<=5:
return '五年'
elif round(numberOfDays/365)<=6:
return '六年'
elif round(numberOfDays/365)<=7:
return '七年'
elif round(numberOfDays/365)<=8:
return '八年'
elif round(numberOfDays/365)<=9:
return '九年'
elif round(numberOfDays/365)<=10:
return '十年'
elif round(numberOfDays/365)<=16:
return '十五年'
elif round(numberOfDays/365)<=21:
return '二十年'
else:
return str(numberOfDays)
except Exception as e:
print(e)
return numberOfDays
def changeSeriesIndexTypeFromStrToTimestamp(pdSeries:pd.Series):
"""
This function will change the data type of pd.Series.index into pd.Timestamp
"""
import pandas as pd
pdSeries.index = [pd.Timestamp(i) for i in pdSeries.index]
return pdSeries
def generateBusinessDateList(startDateString:str,endDateString:str,freq:str='D'):
"""
This function will generate a list of business day.
Parameters
----------
startDateString : str
The range start.
endDateString : str
The range end
freq : str
The frequency used to generate date list.
Returns
-------
list
"""
import pandas as pd
startDate = pd.Timestamp(startDateString).strftime("%Y-%m-%d")
endDate = pd.Timestamp(endDateString).strftime("%Y-%m-%d")
return [i for i in pd.date_range(startDate,endDate,freq=freq).to_list() if i.dayofweek<5]
def generateTradingDateList(startDateString:str,endDateString:str,freq:str='D',qlExchangeObject = ql.China.SSE):
"""
This function will return a list of trading day, using the trading calendar of China exchange.
"""
import pandas as pd
startDate = pd.Timestamp(startDateString).strftime("%Y-%m-%d")
endDate = pd.Timestamp(endDateString).strftime("%Y-%m-%d")
calendar = ql.China(qlExchangeObject)
dateList = [i for i in pd.date_range(startDate,endDate,freq=freq).to_list()]
dateList = [i for i in dateList if calendar.isBusinessDay(ql.Date(i.strftime("%Y-%m-%d"), '%Y-%m-%d'))]
return dateList
def isTradingDate(dateString:str, qlExchangeObject = ql.China.SSE):
"""
This function will return a bool value, telling whether the given date is a trading day,
using China exchange trading calendar.
"""
import pandas as pd
date = pd.Timestamp(dateString)
calendar = ql.China(qlExchangeObject)
targetDate = ql.Date(date.strftime("%Y-%m-%d"), '%Y-%m-%d')
return calendar.isBusinessDay(targetDate)
def generateNextNWeekday(startDateString:str,n:int):
"""
This function will return a list, whose elements are the next n weekdays.
"""
import pandas as pd
startDate = pd.Timestamp(startDateString)
startDate_FormedStr = startDate.strftime("%Y-%m-%d")
endDate_FormedStr = (startDate + pd.Timedelta(days=int(n/5*7)+10)).strftime("%Y-%m-%d")
return [i for i in pd.date_range(startDate_FormedStr,endDate_FormedStr,freq='D').to_list() if i.dayofweek<5][:n]
def changeSecurityListToStr(securityList:list):
"""
This function will convert a list of string into a single string.
"""
securityListFormed = [i+',' for i in securityList]
return "".join(securityListFormed).strip(',')
def getStringSimilarity(string1:str,string2:str):
"""
This function will return a similarity of two strings.
"""
import difflib
return difflib.SequenceMatcher(None,string1,string2).quick_ratio()
def getMostSimilarStringFromList(string:str,stringList:list):
"""
This function will return the most similar string of a given string, after specifying
the list where you choose string from.
"""
similarRatioList = [getStringSimilarity(string,i) for i in stringList]
return stringList[similarRatioList.index(max(similarRatioList))]
def getLastTradingDate(presentTradingDateString:str):
"""
This function will return the last trading date given standing point,
using China exchange trading calendar.
"""
import pandas as pd
import QuantLib as ql
calendar = ql.China(ql.China.SSE)
presentTradingDate = pd.Timestamp(presentTradingDateString)
standingPoint = ql.Date(presentTradingDate.strftime("%Y-%m-%d"),'%Y-%m-%d') + ql.Period(-1,ql.Days)
while not calendar.isBusinessDay(standingPoint):
standingPoint = standingPoint + ql.Period(-1,ql.Days)
return pd.Timestamp(standingPoint.year(),standingPoint.month(),standingPoint.dayOfMonth())
def getNTradingDaysBeforeTradingDate(days:int,presentTradingDateString:str):
"""
This function will return the N-th trading day before standing point.
"""
import pandas as pd
i = days
while i!=0:
presentTradingDateString = getLastTradingDate(presentTradingDateString).strftime("%Y-%m-%d")
i = i - 1
return pd.Timestamp(presentTradingDateString)
def convertTimeToString(time,formatString = "%Y-%m-%d"):
"""
This function will convert pd.Timestamp into string.
"""
try:
return time.strftime(formatString)
except Exception as e:
return ''
| StarcoderdataPython |
151915 | import cv2
import numpy as np
from imutils.video import FileVideoStream
import imutils
import time
vs = FileVideoStream('messi.webm').start()
while vs.more():
frame=vs.read()
if frame is None:
continue
output=frame.copy()
gray=cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
gray=cv2.medianBlur(gray,5)
gray=cv2.adaptiveThreshold(gray,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY,3,5)
kernel=np.ones((3,3),np.uint8)
gray=cv2.erode(gray,kernel,iterations=1)
gray=cv2.dilate(gray,kernel,iterations=1)
circles=cv2.HoughCircles(gray,cv2.HOUGH_GRADIENT,1,260,param1=30,param2=65,minRadius=0)
radii=[]
if circles is None:
continue
circles=np.uint16(np.around(circles))
for i in range(circles.shape[0]):
radii.append(circles[i][0][2])
R=max(radii)
X=None
Y=None
for i in range(circles.shape[0]):
if circles[i][0][2]==R:
X=circles[i][0][0]
Y=circles[i][0][1]
break
cv2.circle(output,(X,Y),R,(0,255,0),4)
cv2.imshow('result',output)
cv2.waitKey(1)
cv2.destroyAllWindows()
vs.stop() | StarcoderdataPython |
4836274 | ctx.tex_radius = ctx.gen_texture.radius.cell()
ctx.tex_radius.set(32)
ctx.tex_filename = ctx.gen_texture.filename.cell()
ctx.tex_filename.set("")
ctx.gen_texture.as_float.cell().set(True)
ctx.gen_texture.output.connect(ctx.texture)
| StarcoderdataPython |
1779942 | from threading import Thread
import pat
import patl
import logging
import argparse
import time
from patutils import SEPARATOR, BOX_LENGTH, BASE, USE_SEPARATOR
""" A chat protocol based loosely off IRC
The whole thing works with ASCII256 text
"""
standard_ascii = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19" \
"\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[" \
"\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\x7f"
extended_ascii = "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø×ƒáíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈıÍÎÏ┘┌█▄¦Ì▀Óß" \
"ÔÒõÕµþÞÚÛÙýݯ´≡±‗¾¶§÷¸°¨·¹³²■ "
alphabet = standard_ascii + extended_ascii # python doesn't natively support 'ascii256'
FINISHED = "\x00"
CANCEL = "cancel"
NAME = "name"
TEXT = "text"
# DEBUG = False
logger = logging.getLogger("Debug Window")
parser = argparse.ArgumentParser(description="Sined Messages: Send messages over sound")
parser.add_argument("--debug", dest="debug", action="store_true", default=False)
args = parser.parse_args()
if args.debug: # If debugging
logger.setLevel(logging.DEBUG) # Set log level to debug
else: # If not
logger.setLevel(logging.INFO) # Set it to info
class ChatClient:
playing = False # Whether the client is playing
override = None # Any overrides
separated = False # Whether the mmessages are separated (helps reduce loss of 'packets')
started_count = False #These are used to determine how quickly data is being transmitted
ended_count = False
start_time = None
count_time = 0
end_time = None
speed = None
last = -1
def handle_name(self, raw_args): # Handles the name command
old_name = self.other_name # The old name
self.other_name = raw_args # Sets the new name
self.action(f"{old_name} has changed their name to {self.other_name}")
def handle_text(self, raw_args): # Hansles the text
self.message(self.other_name, raw_args) # Sends message
commands = {
TEXT: handle_text, # Handles text command
NAME: handle_name # Handles NAME command
}
requests = {
NAME: lambda self, _: self.play_string(f"{NAME}:{self.name}{FINISHED}"), # Responds to NAME request
TEXT: lambda self, _: self.play_string(f"{TEXT}:{FINISHED}") # Responds to request for text
}
buffer = []
pre_buff = []
def __init__(self, name="Client 0", other_name="other", buffer=()):
self.handle_thread = None
self.name = name
self.other_name = other_name
self.buffer = list(buffer)
def start(self):
patl.start_listener(self.chat_listener) # Starts the chat listener
def chat_listener(self, value):
if self.playing: # If playing
return # Do nothing
if self.override: # If there's an override
return self.override(value) # Use the override
if USE_SEPARATOR: # If using a separator
if value == SEPARATOR:
self.separated = True # Now Separated
return
if not self.separated and (self.last == value or self.last == -1): # If not separated
return
logger.debug(f"received {value}")
self.pre_buff.append(value) # Adds to first buffer
self.separated = False
logger.debug(f"{self.buffer} & {self.pre_buff}")
if len(self.pre_buff) == BOX_LENGTH: # If the first buffer reaches the target size
decoded = alphabet[base_decode(self.pre_buff, BASE)] # Decodes the characther
if not self.started_count: # Works out the speed
print("start")
self.start_time = time.time()
self.started_count = True
self.count_time += 1
if self.started_count and self.count_time == 10:
self.end_time = time.time()
self.speed = self.end_time - self.start_time
self.started_count = False
print(f"Speed per 10 Bytes: {self.speed} --> {int(self.speed)}")
self.pre_buff = []
if decoded == FINISHED: # If finished
self.start_handle_thread() # Handle received text
else: # If not finished
self.buffer.append(decoded) # Add that to the buffer
def handle(self):
buffer = ''.join(self.buffer) # Joins the characters
self.buffer = []
has_command = False # If it has a command
has_request = False # If it has a request
command_name = ""
position = 0
while not (has_command or has_request):
try:
char = buffer[position] # Get the character
except IndexError:
continue
if char == ":": # If a ':'
has_command = True # It's a command
continue
if char == "?": # If a '?'
has_request = True # It's a request
continue
command_name += char # Add the character to the command name
position += 1
raw_args = ''.join(buffer[position + 1:])
logger.debug(f"COMMAND: {command_name} (`{raw_args}`)")
if has_command: # If has received a command
self.commands[command_name](self, raw_args) # Deal with the command
if has_request: # If has received a request
self.requests[command_name](self, raw_args) # Deal with the request
self.clean_handle_thread()
def play_value(self, value): # Sends the value using the utils
logger.debug(f"sending {value}")
self.playing = True
if USE_SEPARATOR:
pat.play_value(SEPARATOR)
pat.play_value(value)
self.playing = False
def play_buffer(self, buffer): # sends a buffer
[self.play_value(value) for value in buffer]
def play_char(self, char): # Sends a character
[self.play_value(value) for value in base_encode(char, BASE)]
def play_string(self, string): # Sends a string
[self.play_char(char) for char in string]
def request(self, value: str, raw_args=""): # Sends a request
self.play_string(value + "?:" + raw_args + FINISHED)
def command(self, value: str, raw_args=""): # Sends a command
self.play_string(value + ":" + raw_args + FINISHED)
def start_handle_thread(self): # Starts the handle thread
thread = Thread(target=self.handle)
thread.start()
self.handle_thread = thread
def clean_handle_thread(self): # Cleans the handle thread
self.handle_thread = None
def action(self, action_message): # When receiving an action
print("*" + action_message) # TODO
def message(self, sender, message): # When receiving a message
print(f"{sender}: {message}")
def base_encode(number, base): # Encodes to send
if type(number) == str:
assert len(number) == 1
number = alphabet.index(number)
d = []
while number != 0:
d.append(number % base)
# number -= (number // high) * high
number //= base
while len(d) != BOX_LENGTH:
d.append(0)
return d[::-1]
def base_4_encode(number): # Encodes to be sent (I think this is unused)
if type(number) == str:
assert len(number) == 1
number = alphabet.index(number)
d = []
high = number - number % 4
while number != 0:
d.append(number % 4)
# number -= (number // high) * high
number //= 4
while len(d) != BOX_LENGTH:
d.append(0)
return d[::-1]
def base_decode(buffer, base): # Decodes the received buffer
buffer = buffer[::-1]
total = 0
for place in range(len(buffer)):
# print(f"{buffer[place]}@{place}~{4**place} --> {buffer[place] * 4**place}") # DEBUG
total += buffer[place] * base ** place
return total
def base_4_decode(buffer): # Decodes into base4 (Think this is now unused)
buffer = buffer[::-1]
total = 0
for place in range(len(buffer)):
# print(f"{buffer[place]}@{place}~{4**place} --> {buffer[place] * 4**place}") # DEBUG
total += buffer[place] * 4 ** place
return total
| StarcoderdataPython |
1797706 | <gh_stars>0
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import sklearn
# Load the data
oecd_bli = pd.read_csv("BLI_20042020185733310.csv", thousands=',')
gdp_per_capita = pd.read_csv("WEO_Data.csv", thousands=',', delimiter='\t',
encoding='latin1', na_values="n/a")
# Prepare the data
country_stats = prepare_country_stats(oecd_bli, gdp_per_capita)
X = np.c_[country_stats["GDP per capita"]]
y = np.c_[country_stats["Life satisfaction"]]
# Visualize the data
country_stats.plot(kind='scatter', x="GDP per capita", y='Life satisfaction')
plt.show()
# Select a linear model
lin_reg_model = sklearn.linear_model.LinearRegression()
# Train the model
lin_reg_model.fit(X, y)
# Make a prediction for Cyprus
X_new = [[22587]] # Cyprus' GDP per capita
print(lin_reg_model.predict(X_new)) # outputs [[ 5.96242338]]
| StarcoderdataPython |
3369822 | <reponame>viraajpunia/Mini-Amazon<gh_stars>0
from flask import current_app as app
class Product:
def __init__(self, id, category, name, descrip, img_link, price, available):
self.id = id
self.category = category
self.name = name
self.descrip = descrip
self.img_link = img_link
self.price = price
self.available = available
@staticmethod
def get(id):
rows = app.db.execute('''
SELECT product_id, category, name, descrip, img_link, price, available
FROM Products
WHERE product_id = :id
''',
id=id)
return Product(*(rows[0])) if rows is not None else None
@staticmethod
def get_all(available=True):
rows = app.db.execute('''
SELECT product_id, category, name, descrip, img_link, price, available
FROM Products
''',
available=available)
return [Product(*row) for row in rows]
@staticmethod
def get_all_sorted(available=True):
rows = app.db.execute('''
SELECT product_id, category, name, descrip, img_link, price, available
FROM Products
ORDER BY price DESC
''',
available=available)
return [Product(*row) for row in rows]
@staticmethod
def get_exact_item_name(name):
rows = app.db.execute('''
SELECT product_id, category, name, descrip, img_link, price, available
FROM Products
WHERE name = :name
''',
name=name)
return [Product(*row).name for row in rows]
@staticmethod
def get_exact_item_id(name):
rows = app.db.execute('''
SELECT product_id, category, name, descrip, img_link, price, available
FROM Products
WHERE name = :name
''',
name=name)
return [Product(*row).id for row in rows]
@staticmethod
def get_item(name):
rows = app.db.execute('''
SELECT product_id, category, name, descrip, img_link, price, available
FROM Products
WHERE name LIKE '%' || :name || '%' OR descrip LIKE '%' || :name || '%'
''',
name=name)
return [Product(*row) for row in rows]
@staticmethod
def get_item_sorted(name):
rows = app.db.execute('''
SELECT product_id, category, name, descrip, img_link, price, available
FROM Products
WHERE name LIKE '%' || :name || '%' OR descrip LIKE '%' || :name || '%'
ORDER BY price DESC
''',
name=name)
return [Product(*row) for row in rows]
@staticmethod
def get_category(category):
rows = app.db.execute('''
SELECT product_id, category, name, descrip, img_link, price, available
FROM Products
WHERE category = :category
''',
category=category)
return [Product(*row) for row in rows]
@staticmethod
def get_category_sorted(category):
rows = app.db.execute('''
SELECT product_id, category, name, descrip, img_link, price, available
FROM Products
WHERE category = :category
ORDER BY price DESC
''',
category=category)
return [Product(*row) for row in rows]
@staticmethod
def get_item_in_category(name, category):
rows = app.db.execute('''
SELECT product_id, category, name, descrip, img_link, price, available
FROM Products
WHERE category = :category
AND (name LIKE '%' || :name || '%' OR descrip LIKE '%' || :name || '%')
''',
name=name, category=category)
return [Product(*row) for row in rows]
@staticmethod
def get_item_in_category_sorted(name, category):
rows = app.db.execute('''
SELECT product_id, category, name, descrip, img_link, price, available
FROM Products
WHERE category = :category
AND (name LIKE '%' || :name || '%' OR descrip LIKE '%' || :name || '%')
ORDER BY price DESC
''',
name=name, category=category)
return [Product(*row) for row in rows]
@staticmethod
def add_item(product_id, name, category, descrip, img_link, price, available = True):
rows = app.db.execute('''
INSERT INTO Products
VALUES(:product_id, :category, :name, :descrip, :img_link, :price, :available)
returning *
''',
product_id=product_id,
name=name,
category=category,
descrip=descrip,
img_link=img_link,
price=price,
available=available)
return None
@staticmethod
def edit_item(name, category, descrip, img_link, price):
rows = app.db.execute('''
UPDATE Products
SET category = :category, descrip = :descrip, img_link = :img_link, price = :price
WHERE name = :name
returning *
''',
name=name,
category=category,
descrip=descrip,
img_link=img_link,
price=price)
return None
| StarcoderdataPython |
3246660 | <filename>Python/questions/BinarySearchTreeIterator/binary-search-tree-iterator-solution-1.py<gh_stars>0
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class BSTIterator:
def __init__(self, root: TreeNode):
self.nums = []
self.index = -1
self.travesalInOrder(root)
def travesalInOrder(self, node: TreeNode) -> None:
if not node:
return
self.travesalInOrder(node.left)
self.nums.append(node.val)
self.travesalInOrder(node.right)
def next(self) -> int:
"""
@return the next smallest number
"""
self.index += 1
return self.nums[self.index]
def hasNext(self) -> bool:
"""
@return whether we have a next smallest number
"""
return self.index + 1 < len(self.nums)
if __name__ == "__main__":
root = TreeNode(7)
root.left = TreeNode(3)
root.right = TreeNode(15)
root.right.left = TreeNode(9)
root.right.right = TreeNode(20)
iterator = BSTIterator(root)
print(iterator.next()); # return 3
print(iterator.next()); # return 7
print(iterator.hasNext()); # return true
print(iterator.next()); # return 9
print(iterator.hasNext()); # return true
print(iterator.next()); # return 15
print(iterator.hasNext()); # return true
print(iterator.next()); # return 20
print(iterator.hasNext()); # return false | StarcoderdataPython |
1682784 | <reponame>WillGreen98/University-INTPROG-Python<gh_stars>1-10
class Coin(object):
def __init__(self, value):
self.value = value
def flip(self):
self.flip_value = random.randint(0, 1)
if self.flip_value == 0:
print("Heads")
else:
print("Tails")
# pence = Coin("50p")
# pence.flip()
| StarcoderdataPython |
3266614 | <filename>psrasvr.py
import atddm
import numpy as np
import pandas as pd
from sklearn.svm import SVR
from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error
from sklearn.model_selection import cross_validate, TimeSeriesSplit
from constants import CODES, TZONES, INTERVAL
nairp = len(CODES)
CODES.sort()
# sort airport ICAO codes alphabetically
FEATURES = ['national', 'continental', 'intercontinental', 'time_cos',
'time_sin', 'weekday_cos', 'weekday_sin']
# features create by load
def to_seconds(x):
return x.hour*3600 + x.minute*60 + x.second
def load(**kwargs):
"""
Load data and compute some features
**kwargs can be used to pass arguments to atddm.load
"""
dd = atddm.load(**kwargs)
for df in dd.values():
df['national'] = (df['START'].apply(lambda x: x[:2]) ==
df['END'].apply(lambda x: x[:2])).astype(int)
df['continental'] = df['START'].apply(lambda x: x[0]).isin(['E', 'L'])\
* abs(1-df['national'])
df['intercontinental'] = 1 - df['national'] - df['continental']
onedayinsecs = 24 * 3600
for df in dd.values():
# df['day_part'] = df['M1_FL240'].dt.time.apply(categorize_time)
m1_time = to_seconds(df['M1_FL240'].dt)
df['time_cos'] = np.cos(m1_time*2*np.pi/onedayinsecs)
df['time_sin'] = np.sin(m1_time*2*np.pi/onedayinsecs)
weekday = df['M1_FL240'].dt.weekday
df['weekday_cos'] = np.cos(weekday*2*np.pi/7)
df['weekday_sin'] = np.sin(weekday*2*np.pi/7)
df['week'] = df['M1_FL240'].dt.week
df['dayno'] = df['M1_FL240'].dt.dayofyear
df['delay_sec'] = df['delay']/pd.Timedelta(1, unit='s')
return dd
def predict_demand(regr, X, y):
"""
Predict tM3 according to the model t^M3 = tM1 + delta
and aggregate the predicted tM3 to obtain the predicted demand
regr is a regression model that can predict delays delta = tM3 - tM1
X is a matrix of features with X[:, 0] being tM1
y is a vector with tM3
"""
params = regr.get_params()
interval = params['interval']
tz = params['tz']
tm3 = pd.to_datetime(y)
# transform y in data format
y_true = atddm.binarrivals(tm3, interval=interval, tz=tz).fillna(0)
# bin observed tM3 to obtain observed demand
y_pred = atddm.binarrivals(regr.predict(X), interval=interval,
tz=tz).fillna(0)
# bin predicted tM3 to obtain predicted demand
combined_indx = y_true.index.union(y_pred.index)
y_true = y_true.reindex(index=combined_indx).fillna(0)
y_pred = y_pred.reindex(index=combined_indx).fillna(0)
# reindex predicted demand as observed one to avoid length mismatch
return y_true, y_pred
def demand_r2_score(regr, X, y, sample_weight=None):
"""
Return r2 score of the predicted demand
X is a matrix of features with X[:, 0] being the M1 time
y is the observed M3 time
"""
y_true, y_pred = predict_demand(regr, X, y)
return r2_score(y_true, y_pred, sample_weight)
def demand_mse_score(regr, X, y, sample_weight=None):
"""
Return mean squared error between observed and predicted demand
X is a matrix of features with X[:, 0] being the M1 time
y is the observed M3 time
"""
y_true, y_pred = predict_demand(regr, X, y)
return mean_squared_error(y_true, y_pred, sample_weight)
def demand_mae_score(regr, X, y, sample_weight=None):
"""
Return median absolute error between observed and predicted demand
X is a matrix of features with X[:, 0] being the M1 time
y is the observed M3 time
"""
y_true, y_pred = predict_demand(regr, X, y)
return mean_absolute_error(y_true, y_pred, sample_weight)
SCORING = dict(zip(['r2', 'mse', 'mae'],
[demand_r2_score, demand_mse_score, demand_mae_score]))
class psraSVR(SVR):
"""
Custom support vector machine to predict delays between arrivals according
to regulated (M1) and current (M3) flight plans
It exposes customized fit and predict methods to deal with time data
"""
def __init__(self, kernel='rbf', degree=3, gamma='auto', coef0=0.0,
tol=0.001, C=1.0, epsilon=0.1, shrinking=True, cache_size=200,
verbose=False, max_iter=-1, interval=INTERVAL, tz='UTC'):
"""
Constructor of the class
interval is the binning interval
tz is the time zone of the arrival airport for which the demand is to
be predicted
"""
super().__init__()
self.interval = interval
self.tz = tz
def fit(self, X, y=None, sample_weight=None):
"""
fit model
X is an array of features with the understanding that the first
column X[:, 0] is the M1 arrival time
y is the M3 arrival time time
"""
# transform y and X[:,0] via pd.to_datetime
tm3 = pd.to_datetime(y)
tm1 = pd.to_datetime(X[:, 0])
target = np.array((tm3 - tm1)/pd.Timedelta(1, unit='s'))
features = X[:, 1:]
return super().fit(features, target, sample_weight)
def predict(self, X):
"""
return predicted M3 time according to formula M1 + predicted_delay
X is an array of features with the understanding that the first
column X[:, 0] is the M1 arrival time
y is the M3 arrival time time
"""
# transform X[:,0] via pd.to_datetime then transform the prediction in
# TimeDelta before adding it
tm1 = pd.to_datetime(X[:, 0])
features = X[:, 1:]
y_pred = super().predict(features)
tm3_hat = tm1 + pd.to_timedelta(y_pred, unit='s')
return tm3_hat
def test():
print('Cross-validation test routine')
dd = load()
code = 'LIRF'
features = ['national', 'continental', 'intercontinental', 'time_cos',
'time_sin', 'weekday_cos', 'weekday_sin']
df = dd[code].sort_values(by='M1_FL240')
y = np.array(df['M3_FL240'])
cols = ['M1_FL240'] + features
X = df.as_matrix(columns=cols)
psvr = psraSVR(INTERVAL, TZONES[code])
tss = TimeSeriesSplit(n_splits=3)
scores = cross_validate(psvr, X, y, scoring=SCORING, cv=tss, n_jobs=3,
return_train_score=False)
print('Results of cross-validation')
print(scores)
if __name__ == '__main__':
test()
| StarcoderdataPython |
68338 | """
error models for pybugsnag
"""
class PyBugsnagException(Exception):
"""base pybugsnag exception class"""
def __init__(self, *args, **kwargs):
extra = ""
if args:
extra = '\n| extra info: "{extra}"'.format(extra=args[0])
print(
"[{exception}]: {doc}{extra}".format(
exception=self.__class__.__name__, doc=self.__doc__, extra=extra
)
)
Exception.__init__(self, *args, **kwargs)
class RateLimited(PyBugsnagException):
"""request received a 429 - you are currently rate limited"""
| StarcoderdataPython |
54225 | <gh_stars>0
"""
The is the abstract defender base class.
A defender can see all values at the beginning of
each simulation round and can try to detect an attack.
It is also possible to first let the defender learn
before running an attack.
"""
from abc import ABC, abstractmethod
__author__ = '<NAME>'
__email__ = '<EMAIL>'
__date__ = '2020/03/18'
class AttackDetected(Exception):
""" This exception should be raised if the defender detects an attack. """
def __init__(self, message):
self.message = message
class Defender(ABC):
# Nuber of rounds before start to defend.
learningRounds = 0
# Ingore the first few detections.
ignore = 0
def __init__(self, config=None):
""" Init defender with given config """
try:
self.learningRounds = int(config['roundsToLearn'])
self.ignore = int(config['ignore'])
except:
pass
def attackDetected(self, msg="Attack detected"):
if self.ignore > 0:
self.ignore -= 1
else:
raise AttackDetected(msg)
def learn(self, processValues):
""" This method should implement a learning algorithm. """
pass
# TODO: make this abstract.
def detect(self, processValues):
"""
This method should implement the detection algorithm.
TODO: maybe it makes sense to implement a more detect hooks
which the attacker call after running the control loops or
eaven after each control loop.
"""
pass
| StarcoderdataPython |
77832 | """Philips Hue Sync Box integration."""
import ipaddress
import homeassistant.helpers.config_validation as cv
import voluptuous as vol
from .const import *
from . import services
DEFAULT_NAME = 'Hue Sync'
def coerce_ip(value):
"""Validate that provided value is a valid IP address."""
if not value:
raise vol.Invalid("Must define an IP address")
try:
ipaddress.IPv4Network(value)
except ValueError:
raise vol.Invalid("Not a valid IP address")
return value
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({
vol.Required(CONF_IP_ADDRESS): vol.All(cv.string, coerce_ip),
vol.Required(CONF_ACCESS_TOKEN): cv.string,
vol.Optional(CONF_NAME, DEFAULT_NAME): cv.string,
})
async def async_setup(hass, config):
hass.data[HUE_SYNC_DOMAIN] = {}
return True
| StarcoderdataPython |
3369342 | <reponame>tuvshinot/algorithm-sorting-DS
from math import floor
def decimal_to_binary(num):
acc = []
remainder = 0;
binary = ''
while num > 0:
remainder = num % 2
acc.append(1) if remainder == 1 else acc.append(0)
num = floor(num / 2)
acc.reverse()
binary = ''.join(str(el) for el in acc)
return binary
print(decimal_to_binary(1995))
| StarcoderdataPython |
178724 | import tensorflow as tf
from keras.models import Sequential,load_model,model_from_json
from keras.layers import Dense, Dropout,Activation,MaxPooling2D,Conv2D,Flatten
from keras.applications.imagenet_utils import preprocess_input, decode_predictions
from keras.preprocessing.image import load_img
from keras.preprocessing import image
import numpy as np
import h5py
import os
import sys
import json
from sklearn.preprocessing import StandardScaler
from predictor import sc
# Flask utils
from flask import Flask, redirect, url_for, request, render_template
from werkzeug.utils import secure_filename
# Define a flask app
app = Flask(__name__)
with open('customer_churn_prediction_model.json','r') as f:
model = model_from_json(f.read())
# Load your trained model
model.load_weights('customer_churn_prediction_model.h5')
print('Model loaded. Check http://127.0.0.1:5000/')
@app.route('/', methods=['GET'])
def index():
# Main page
return render_template('prediction.html')
@app.route('/', methods=['GET', 'POST'])
def upload():
if request.method == 'POST':
# Get the values from the form
credit_score = request.form['cr_score']
age = request.form['age']
tenure = request.form['tenure']
balance = request.form.get('balance')
number_of_products = request.form.get('no_of_products')
estimated_salary = request.form['salary']
country = request.form['country']
gender = request.form['gender']
has_credit_card = request.form['cr_card']
is_active_member = request.form['active_member']
print([credit_score,age,tenure,balance,number_of_products,estimated_salary,country,gender,has_credit_card,is_active_member])
# Process input
if country=="France":
countries= [0,0]
elif country=="Germany":
countries = [1,0]
else:
countries = [0,1]
# Make Prediction
prediction = model.predict(sc.transform(np.array([[countries[0],countries[1],credit_score,gender,age,tenure,balance,number_of_products,has_credit_card,is_active_member,estimated_salary]])))
# Process your result for human
if prediction > 0.5:
result = "The customer will leave the bank"
else:
result = "The customer won't leave the bank"
return result
return None
if __name__ == '__main__':
app.debug = True
app.run(host='0.0.0.0', port=5000)
| StarcoderdataPython |
3271427 | <reponame>cyrusradfar/vaex
def connect(url, **kwargs):
"""Connect to remote server (default is tornado)"""
# dispatch to vaex.server.tornado package
from .tornado_client import connect
return connect(url, **kwargs)
| StarcoderdataPython |
1673770 | <reponame>tsaycal/romodel
from .knapsack import Knapsack
from .portfolio import Portfolio
from .pooling import Pooling
| StarcoderdataPython |
1639965 | <filename>logic/flatpak.py
from helper import run_cmd
class Flatpak:
@staticmethod
def do(cmd):
"""run an unimplemented command"""
run_cmd(f"flatpak {cmd}")
@staticmethod
def update():
Flatpak.do("update")
@staticmethod
def remote_add(name, url, *args):
cmd = f"remote-add {name} {url}"
for arg in args:
cmd += " " + arg
Flatpak.do(cmd)
@staticmethod
def install(package):
Flatpak.do(f"install -y {package}")
| StarcoderdataPython |
3297053 | <reponame>okfde/ckankrzn
../v19_01_error/plugin.py | StarcoderdataPython |
3200332 | <filename>prc/app_post2.py
'''
Created on 18.04.2018
@author: trevaz
--------------------------------------------------------------------------------
app: post-process
--------------------------------------------------------------------------------
'''
################################################################################
# IMPORT
################################################################################
import os, sys
import fctlib
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.animation as animation
################################################################################
# CONSTANTS
################################################################################
################################################################################
# MAIN FONCTION
################################################################################
def post(PATH, case_name):
'''
DEF: post-processing for wireles.
INPUT: - case_name
OUTPUT: - ()
'''
case_path = fctlib.get_case_path(PATH, case_name)
############################################################################
# INIT
out_path = os.path.join(case_path, 'output')
fctlib.test_and_mkdir(out_path)
src_out_path = os.path.join(PATH['job'], case_name, 'src', 'output')
src_inp_path = os.path.join(PATH['job'], case_name, 'src', 'input')
############################################################################
# CONFIG
print('extract config...')
config = fctlib.get_config(case_path)
############################################################################
# COMPUTE
print('compute results...')
space = get_space(config)
time = get_time(config)
if config['log_flag'] > 0:
log = get_log(src_out_path, config)
if config['ta_flag'] > 0:
result_3d = get_result_3d(src_inp_path, src_out_path, config)
result_pr = get_result_pr(result_3d, config)
if config['ts_flag'] > 0:
result_4d = get_result_4d(src_out_path, config)
############################################################################
# PLOT
print('plot results...')
if config['log_flag'] > 0:
plot_log(time, log, config, out_path)
if config['ta_flag'] > 0:
########################################################################
# pr global
########################################################################
plot_pr_uvw(space, result_pr, config, out_path)
plot_pr_log(space, result_pr, config, out_path)
plot_pr_phi(space, result_pr, config, out_path)
plot_pr_st(space, result_pr, config, out_path)
########################################################################
# pr local
########################################################################
plot_pr(space['z_c'][:config['nz']//2], result_3d['u_avg_c'][config['nx']//2-1+6 ,config['ny']//2-1,:config['nz']//2], 'z_u_avg6', out_path)
plot_pr(space['z_c'][:config['nz']//2], result_3d['u_avg_c'][config['nx']//2-1+12,config['ny']//2-1,:config['nz']//2], 'z_u_avg12', out_path)
plot_pr(space['z_c'][:config['nz']//2], result_3d['u_avg_c'][config['nx']//2-1+18,config['ny']//2-1,:config['nz']//2], 'z_u_avg18', out_path)
plot_pr(space['z_c'][:config['nz']//2], result_3d['u_avg_c'][config['nx']//2-1+24,config['ny']//2-1,:config['nz']//2], 'z_u_avg24', out_path)
########################################################################
# avg
########################################################################
turb_i1 = 32
turb_i2 = 96
plot_sl(space['x'], space['y'], result_3d['u_avg_c'][:,:,5], 'x', 'y', 'u_avg5', 2, out_path)
plot_sl(space['x'], space['y'], result_3d['u_avg_c'][:,:,8], 'x', 'y', 'u_avg8', 2, out_path)
plot_sl(space['x'], space['y'], result_3d['u_avg_c'][:,:,9], 'x', 'y', 'u_avg9', 2, out_path)
plot_sl(space['x'], space['y'], result_3d['u_avg_c'][:,:,10], 'x', 'y', 'u_avg10', 2, out_path)
plot_sl(space['x'], space['z_n'], result_3d['u_avg_c'][:,config['ny']//2-1,:], 'x', 'z', 'u_avg', 1, out_path)
plot_sl(space['y'], space['z_n'], result_3d['u_avg_c'][turb_i1-1,:,:], 'y', 'z', 'u_avg1', 1, out_path)
plot_sl(space['y'], space['z_n'], result_3d['u_avg_c'][turb_i2-1,:,:], 'y', 'z', 'u_avg2', 1, out_path)
plot_sl(space['x'], space['y'], result_3d['v_avg_c'][:,:,5], 'x', 'y', 'v_avg5', 2, out_path)
plot_sl(space['x'], space['y'], result_3d['v_avg_c'][:,:,8], 'x', 'y', 'v_avg8', 2, out_path)
plot_sl(space['x'], space['y'], result_3d['v_avg_c'][:,:,9], 'x', 'y', 'v_avg9', 2, out_path)
plot_sl(space['x'], space['y'], result_3d['v_avg_c'][:,:,10], 'x', 'y', 'v_avg10', 2, out_path)
plot_sl(space['x'], space['z_n'], result_3d['v_avg_c'][:,config['ny']//2-1,:], 'x', 'z', 'v_avg', 1, out_path)
plot_sl(space['y'], space['z_n'], result_3d['v_avg_c'][turb_i1-1,:,:], 'y', 'z', 'v_avg1', 1, out_path)
plot_sl(space['y'], space['z_n'], result_3d['v_avg_c'][turb_i2-1,:,:], 'y', 'z', 'v_avg2', 1, out_path)
plot_sl(space['x'], space['y'], result_3d['w_avg_c'][:,:,5], 'x', 'y', 'w_avg5', 2, out_path)
plot_sl(space['x'], space['y'], result_3d['w_avg_c'][:,:,8], 'x', 'y', 'w_avg8', 2, out_path)
plot_sl(space['x'], space['y'], result_3d['w_avg_c'][:,:,9], 'x', 'y', 'w_avg9', 2, out_path)
plot_sl(space['x'], space['y'], result_3d['w_avg_c'][:,:,10], 'x', 'y', 'w_avg10', 2, out_path)
plot_sl(space['x'], space['z_n'], result_3d['w_avg_c'][:,config['ny']//2-1,:], 'x', 'z', 'w_avg', 1, out_path)
plot_sl(space['y'], space['z_n'], result_3d['w_avg_c'][turb_i1-1,:,:], 'y', 'z', 'w_avg1', 1, out_path)
plot_sl(space['y'], space['z_n'], result_3d['w_avg_c'][turb_i2-1,:,:], 'y', 'z', 'w_avg2', 1, out_path)
########################################################################
# std
########################################################################
plot_sl(space['x'], space['y'], result_3d['u_std_c'][:,:,5], 'x', 'y', 'u_std5', 2, out_path)
plot_sl(space['x'], space['y'], result_3d['u_std_c'][:,:,8], 'x', 'y', 'u_std8', 2, out_path)
plot_sl(space['x'], space['y'], result_3d['u_std_c'][:,:,9], 'x', 'y', 'u_std9', 2, out_path)
plot_sl(space['x'], space['y'], result_3d['u_std_c'][:,:,10], 'x', 'y', 'u_std10', 2, out_path)
plot_sl(space['x'], space['z_n'], result_3d['u_std_c'][:,config['ny']//2-1,:], 'x', 'z', 'u_std', 1, out_path)
plot_sl(space['y'], space['z_n'], result_3d['u_std_c'][turb_i1-1,:,:], 'y', 'z', 'u_std1', 1, out_path)
plot_sl(space['y'], space['z_n'], result_3d['u_std_c'][turb_i2-1,:,:], 'y', 'z', 'u_std2', 1, out_path)
plot_sl(space['x_'], space['y_'], result_3d['u_inst_c'][:,:,5], 'x', 'y', 'u_inst5', 1, out_path)
plot_sl(space['x_'], space['y_'], result_3d['u_inst_c'][:,:,8], 'x', 'y', 'u_inst8', 1, out_path)
plot_sl(space['x_'], space['y_'], result_3d['u_inst_c'][:,:,9], 'x', 'y', 'u_inst9', 1, out_path)
plot_sl(space['x_'], space['y_'], result_3d['u_inst_c'][:,:,10], 'x', 'y', 'u_inst10', 1, out_path)
plot_sl(space['x_'], space['z_n'], result_3d['u_inst_c'][:,config['ny']//2-1,:], 'x', 'z', 'u_inst', 1, out_path)
plot_sl(space['y_'], space['z_n'], result_3d['u_inst_c'][turb_i1-1,:,:], 'y', 'z', 'u_inst1', 1, out_path)
plot_sl(space['y_'], space['z_n'], result_3d['u_inst_c'][turb_i2-1,:,:], 'y', 'z', 'u_inst2', 1, out_path)
# if config['ts_flag'] > 0:
# plot_sl_anim(space['x_'], space['z_n'], result_4d['u_inst_c'][:,:,config['ny']//2,:], 'x', 'z', 'u_inst', 1, out_path)
# plot_sl_anim(space['x_'], space['y_'], result_4d['u_inst_c'][:,:,:,9], 'x', 'y', 'u_inst', 1, out_path)
# # plot_sl_anim(space['x'], space['z_n'], result_4d['u_inst_n'][:,:,config['ny']//2,:], 'x', 'z', 'u_inst_', 2, out_path)
# # plot_sl_anim(space['x'], space['y'], result_4d['u_inst_n'][:,:,:,config['nz']//2], 'x', 'y', 'u_inst_', 2, out_path)
#################################################################################
# PROCESS FUNCTIONS
#################################################################################
def get_space(config):
space = {}
space['x'] = np.arange(0, config['lx'], config['dx'])
space['y'] = np.arange(0, config['ly'], config['dy'])
space['z_n'] = np.arange(0, config['lz']+config['dz'], config['dz'])
space['z_c'] = node2center_1d(space['z_n'])
space['x_'] = np.arange(0, config['lx']+config['dx'], config['dx']) - 0.5*config['dx']
space['y_'] = np.arange(0, config['ly']+config['dy'], config['dy']) - 0.5*config['dy']
return space
def get_time(config):
time = {}
time['t'] = config['dtr']*np.arange(0,config['nsteps'])
time['t_ta'] = config['p_count']*config['dtr']*np.arange(config['ta_tstart'],config['ta_tstart']+config['ta_ns']+1)
time['t_ts'] = config['c_count']*config['dtr']*np.arange(config['ts_tstart'],config['ts_tstart']+config['ts_ns']+1)
return time
def get_log(src_out_path, config):
log = {}
log['ustar'] = fctlib.load_1d('log_ustar', config['nsteps'], src_out_path)
log['umax'] = fctlib.load_1d('log_umax', config['nsteps'], src_out_path)
return log
def get_turb(src_out_path, config):
turb = {}
turb['thrust'] = fctlib.load_1d('turb_thrust', config['nsteps'], src_out_path)
turb['power'] = fctlib.load_1d('turb_power', config['nsteps'], src_out_path)
return turb
def get_result_3d(src_inp_path, src_out_path, config):
result_3d = {}
# avg
ta_u = fctlib.load_4d('ta_u', config['ta_ns'], config['nx'], config['ny'], config['nz'], src_out_path)
ta_v = fctlib.load_4d('ta_v', config['ta_ns'], config['nx'], config['ny'], config['nz'], src_out_path)
ta_w = fctlib.load_4d('ta_w', config['ta_ns'], config['nx'], config['ny'], config['nz'], src_out_path)
ta_uu = fctlib.load_4d('ta_u2', config['ta_ns'], config['nx'], config['ny'], config['nz'], src_out_path)
ta_vv = fctlib.load_4d('ta_v2', config['ta_ns'], config['nx'], config['ny'], config['nz'], src_out_path)
ta_ww = fctlib.load_4d('ta_w2', config['ta_ns'], config['nx'], config['ny'], config['nz'], src_out_path)
ta_uw = fctlib.load_4d('ta_uw', config['ta_ns'], config['nx'], config['ny'], config['nz'], src_out_path)
ta_txz = fctlib.load_4d('ta_txz', config['ta_ns'], config['nx'], config['ny'], config['nz'], src_out_path)
ta_dudz = fctlib.load_4d('ta_dudz', config['ta_ns'], config['nx'], config['ny'], config['nz'], src_out_path)
result_3d['u_avg_c'] = ta_u[-1,:,:,:-1]
result_3d['u_avg_n'] = center2node_3d(ta_u[-1,:,:,:])
result_3d['v_avg_c'] = ta_v[-1,:,:,:-1]
result_3d['v_avg_n'] = center2node_3d(ta_v[-1,:,:,:])
result_3d['w_avg_c'] = node2center_3d(ta_w[-1,:,:,:])
result_3d['w_avg_n'] = ta_w[-1,:,:,:]
result_3d['u_std_c'] = np.sqrt(ta_uu[-1,:,:,:-1]-ta_u[-1,:,:,:-1]*ta_u[-1,:,:,:-1])
result_3d['u_std_n'] = center2node_3d(np.sqrt(ta_uu[-1,:,:,:]-ta_u[-1,:,:,:]*ta_u[-1,:,:,:]))
result_3d['v_std_c'] = np.sqrt(ta_vv[-1,:,:,:-1]-ta_v[-1,:,:,:-1]*ta_v[-1,:,:,:-1])
result_3d['v_std_n'] = center2node_3d(np.sqrt(ta_vv[-1,:,:,:]-ta_v[-1,:,:,:]*ta_v[-1,:,:,:]))
result_3d['w_std_c'] = node2center_3d(np.sqrt(ta_ww[-1,:,:,:]-ta_w[-1,:,:,:]*ta_w[-1,:,:,:]))
result_3d['w_std_n'] = np.sqrt(ta_ww[-1,:,:,:]-ta_w[-1,:,:,:]*ta_w[-1,:,:,:])
result_3d['uw_cov_c'] = node2center_3d(ta_uw[-1,:,:,:]-result_3d['u_avg_n']*result_3d['w_avg_n'])
result_3d['uw_cov_n'] = ta_uw[-1,:,:,:]-result_3d['u_avg_n']*result_3d['w_avg_n']
result_3d['txz_avg_c'] = node2center_3d(ta_txz[-1,:,:,:])
result_3d['txz_avg_n'] = ta_txz[-1,:,:,:]
result_3d['dudz_avg_c'] = node2center_3d(ta_dudz[-1,:,:,:])
result_3d['dudz_avg_n'] = ta_dudz[-1,:,:,:]
# INST
u = fctlib.load_3d('u', config['nx'], config['ny'], config['nz'], src_inp_path)
v = fctlib.load_3d('v', config['nx'], config['ny'], config['nz'], src_inp_path)
w = fctlib.load_3d('w', config['nx'], config['ny'], config['nz'], src_inp_path)
result_3d['u_inst_c'] = u[:,:,:-1]
result_3d['u_inst_n'] = center2node_3d(u)
result_3d['v_inst_c'] = v[:,:,:-1]
result_3d['v_inst_n'] = center2node_3d(v)
result_3d['w_inst_c'] = node2center_3d(w)
result_3d['w_inst_n'] = w
return result_3d
def get_result_4d(src_out_path, config):
result_4d= {}
result_4d['u_inst_c'] = fctlib.load_4d('ts_u', config['ts_ns'], config['nx'], config['ny'], config['nz'], src_out_path)[:,:,:,:-1]
result_4d['v_inst_c'] = fctlib.load_4d('ts_v', config['ts_ns'], config['nx'], config['ny'], config['nz'], src_out_path)[:,:,:,:-1]
result_4d['w_inst_c'] = node2center_4d(fctlib.load_4d('ts_w', config['ts_ns'], config['nx'], config['ny'], config['nz'], src_out_path))
return result_4d
def get_result_pr(result_3d, config):
result_pr = {}
for key in ('u_avg_c', 'v_avg_c', 'w_avg_n', 'u_std_c', 'v_std_c','w_std_n', 'uw_cov_n', 'txz_avg_n','dudz_avg_n'):
result_pr[key] = np.mean(result_3d[key], axis=(0,1))
for key in ('u_inst_c', 'v_inst_c', 'w_inst_n'):
result_pr[key] = result_3d[key][config['nx']//2, config['ny']//2,:]
return result_pr
def node2center_4d(var_n):
var_c = 0.5*(var_n[:,:,:,:-1]+var_n[:,:,:,1:])
return var_c
def node2center_3d(var_n):
var_c = 0.5*(var_n[:,:,:-1]+var_n[:,:,1:])
return var_c
def node2center_1d(var_n):
var_c = 0.5*(var_n[:-1]+var_n[1:])
return var_c
def center2node_4d(var_c):
var_n = np.copy(var_c)
var_n[:,:,:,0] = 0.0
var_n[:,:,:,1:] = 0.5*(var_c[:,:,:,:-1]+var_c[:,:,:,1:])
return var_n
def center2node_3d(var_c):
var_n = np.copy(var_c)
var_n[:,:,0] = 0.0
var_n[:,:,1:] = 0.5*(var_c[:,:,:-1]+var_c[:,:,1:])
return var_n
# def fctlib.load_sp(var_name, Nx, Nz, src_out_path):
# var = np.zeros((Nx//2+1)*Nz)
# bin_file = open(os.path.join(src_out_path, 'spectr' + var_name + '.bin'))
# sp = np.fromfile(bin_file, dtype=np.float64, count=(Nx//2+1)*Nz);
# bin_file.close()
# sp = var.reshape((Nx//2+1, Nz), order='F')
# return sp
# def fctlib.load_2d_data(var_name, Nf, Nx, src_out_path):
# bin_path = os.path.join(src_out_path, 'bat' + var_name + '.bin')
# bin_file = open(bin_path)
# var = np.zeros((Nf, Nx))
# for m in range(0, Nf):
# print(m)
# tmp = np.fromfile(bin_file, dtype=np.int32, count=1)
# print(tmp)
# var[m, :] = np.fromfile(bin_file, dtype=np.float64, count=Nx)
# print(var)
# tmp = np.fromfile(bin_file, dtype=np.int32, count=1)
# print(tmp)
# bin_file.close()
# var = var.reshape((Nf, Nx), order='F')
# return var
####################
# PLOT GENERAL
####################
def plot_option():
'''
'''
plt.rc('font', family='serif')
plt.rc('xtick', labelsize=11)
plt.rc('ytick', labelsize=11)
plt.rc('axes', labelsize=14)
plt.rc('legend', fontsize=10)
plt.rc('lines', linewidth=1.5)
def plot_pr(z, var, z_name, var_name, out_path):
'''
'''
plt.figure()
plt.plot(var, z, '-ko',label=var_name)
plt.xlabel(var_name, fontsize=14)
plt.ylabel(z_name, fontsize=14)
plt.savefig(os.path.join(out_path, 'pr_' + z_name + '_' + var_name + '.png'), bbox_inches='tight')
plt.close()
def plot_sl(x, y, var, x_name, y_name, var_name, plot_flag, out_path):
plt.figure()
if plot_flag == 1:
plt.pcolormesh(x, y, var.T, cmap='jet', vmin=-0.001, vmax=0.001)
elif plot_flag == 2:
plt.contourf(x, y, var.T, 100, cmap='jet')
plt.xlabel(x_name)
plt.ylabel(y_name)
plt.axes().set_aspect('equal')
plt.colorbar(orientation = 'horizontal', label= var_name, aspect=30)
plt.savefig(os.path.join(out_path, x_name + y_name + '_'+ var_name + '.png'), bbox_inches='tight')
plt.close()
def plot_sl_anim(x, y, var, x_name, y_name, var_name, plot_flag, out_path):
fig = plt.figure()
ims = []
for i in range (np.size(var, axis=0)):
if i == 0:
if plot_flag == 1:
# im =plt.pcolormesh(x, y, var[i,:,:].T, cmap='jet')
im =plt.pcolormesh(x, y, var[i,:,:].T, cmap='jet', vmin=-0.1, vmax=0.1)
elif plot_flag == 2:
im =plt.contourf(x, y, var[i,:,:].T, 100, cmap='jet')
plt.axes().set_aspect('equal')
plt.xlabel(x_name)
plt.ylabel(y_name)
plt.colorbar(orientation = 'horizontal', label= var_name, aspect=30)
else:
if plot_flag == 1:
# im =plt.pcolormesh(x, y, var[i,:,:].T, cmap='jet')
im =plt.pcolormesh(x, y, var[i,:,:].T, cmap='jet', vmin=-0.1, vmax=0.1)
elif plot_flag == 2:
im =plt.contourf(x, y, var[i,:,:].T, 100, cmap='jet')
ims.append([im])
ani = animation.ArtistAnimation(fig, ims, blit=True, repeat_delay=100)
ani.save(os.path.join(out_path, x_name + y_name + '_'+ var_name + '.gif'), fps=50)
plt.close()
def plot_log(time, log, config, out_path):
'''
'''
plt.figure()
plt.plot(time['t'], log['ustar'], '-k')
plt.plot(time['t'], config['u_fric']*np.ones(time['t'].shape), '--r')
plt.xlabel('t [s]', fontsize=14)
plt.ylabel(r'$u_{*} m/s]$', fontsize=14)
plt.savefig(os.path.join(out_path, 'log_ustar.png'), bbox_inches='tight')
plt.close()
plt.figure()
plt.plot(time['t'], log['umax'], '-k')
plt.xlabel('t [s]', fontsize=14)
plt.ylabel(r'$u_{max} [m/s]$', fontsize=14)
plt.savefig(os.path.join(out_path, 'log_umax.png'), bbox_inches='tight')
plt.close()
def plot_turb(time, turb, config, out_path):
'''
'''
plt.figure()
plt.plot(time['t'], turb['thrust'], '-k')
plt.xlabel('t [s]', fontsize=14)
plt.ylabel(r'$thrust [N]$', fontsize=14)
plt.savefig(os.path.join(out_path, 'turb_thrust.png'), bbox_inches='tight')
plt.close()
plt.figure()
plt.plot(time['t'], turb['power'], '-k')
plt.xlabel('t [s]', fontsize=14)
plt.ylabel(r'$power [W]$', fontsize=14)
plt.savefig(os.path.join(out_path, 'turb_power.png'), bbox_inches='tight')
plt.close()
####################
# PLOT ABL
####################
def plot_pr_uvw(space, result_pr, config, out_path):
'''
'''
plt.figure(figsize=(3*3,2*4))
plt.clf()
plot_option()
plt.subplots_adjust(hspace=0.2)
ax = plt.subplot(231)
plt.plot(result_pr['u_avg_c']/config['u_fric'], space['z_c']/config['l_z'], 'k')
plt.xlabel(r'$\bar{u}/u_*$')
plt.ylabel(r'$z/H$')
ax.xaxis.set_major_locator(plt.MaxNLocator(3))
ax.yaxis.set_major_locator(plt.MaxNLocator(4))
plt.grid(b=True, which='both')
ax = plt.subplot(232)
plt.plot(result_pr['v_avg_c']/config['u_fric'], space['z_c']/config['l_z'], 'g')
plt.xlabel(r'$\bar{v}/u_*$')
plt.setp(ax.get_yticklabels(), visible=False)
ax.xaxis.set_major_locator(plt.MaxNLocator(3))
ax.yaxis.set_major_locator(plt.MaxNLocator(4))
plt.grid(b=True, which='both')
ax = plt.subplot(233)
plt.plot(result_pr['w_avg_n']/config['u_fric'], space['z_n']/config['l_z'], 'b')
plt.xlabel(r'$\bar{w}/u_*$')
plt.setp(ax.get_yticklabels(), visible=False)
ax.xaxis.set_major_locator(plt.MaxNLocator(3))
ax.yaxis.set_major_locator(plt.MaxNLocator(4))
plt.grid(b=True, which='both')
ax = plt.subplot(234)
plt.plot(result_pr['u_std_c']/config['u_fric']**2, space['z_c']/config['l_z'], 'k')
plt.xlabel(r'$\sigma^2_{u}/u^2_*$')
plt.ylabel(r'$z/H$')
ax.xaxis.set_major_locator(plt.MaxNLocator(3))
ax.yaxis.set_major_locator(plt.MaxNLocator(4))
plt.grid(b=True, which='both')
ax = plt.subplot(235)
plt.plot(result_pr['v_std_c']/config['u_fric']**2, space['z_c']/config['l_z'], 'g')
plt.xlabel(r'$\sigma^2_{v}/u^2_*$')
plt.setp(ax.get_yticklabels(), visible=False)
ax.xaxis.set_major_locator(plt.MaxNLocator(3))
ax.yaxis.set_major_locator(plt.MaxNLocator(4))
plt.grid(b=True, which='both')
ax = plt.subplot(236)
plt.plot(result_pr['w_std_n']/config['u_fric']**2, space['z_n']/config['l_z'], 'b')
plt.xlabel(r'$\sigma^2_{w}/u^2_*$')
plt.setp(ax.get_yticklabels(), visible=False)
ax.xaxis.set_major_locator(plt.MaxNLocator(3))
ax.yaxis.set_major_locator(plt.MaxNLocator(4))
plt.grid(b=True, which='both')
plt.subplots_adjust(wspace=0.4)
plt.savefig(os.path.join(out_path, 'pr_uvw.png'), bbox_inches='tight')
plt.close()
def plot_pr_log(space, result_pr, config, out_path):
plt.figure()
plt.semilogy(result_pr['u_avg_c']/config['u_fric'], space['z_c']/config['l_z'], '-ko', fillstyle='none')
plt.semilogy(1/0.4*np.log(space['z_c']/config['zo']), space['z_c']/config['l_z'], '--r')
# plt.xlim([11, 25])
# plt.ylim([1e-2, 1e0])
plt.xlabel(r'$\bar{u}/u_*$', fontsize=14)
plt.ylabel(r'$z/H$', fontsize=14)
plt.savefig(os.path.join(out_path, 'pr_log.png'), bbox_inches='tight')
plt.close()
# def plot_pr_log(space, result_pr, config, out_path):
# plt.figure()
# plt.semilogx(space['z_c']/config['l_z'], result_pr['u_avg_c']/config['u_fric'], '-ko', fillstyle='none')
# plt.semilogx(space['z_c']/config['l_z'], 1/0.4*np.log(space['z_c']/config['zo']), '--r')
# plt.ylabel(r'$\bar{u}/u_*$', fontsize=14)
# plt.xlabel(r'$z/H$', fontsize=14)
# plt.savefig(os.path.join(out_path, 'pr_log.png'), bbox_inches='tight')
# plt.close()
def plot_pr_phi(space, result_pr, config, out_path):
plt.figure()
plt.plot(result_pr['dudz_avg_n'][1:]*(space['z_n'][1:]/config['z_i'])*0.4/config['u_fric'], space['z_n'][1:]/config['l_z'], '-ko', fillstyle='none')
plt.xlim([0.0, 2.0])
plt.ylim([0.0, 0.6])
plt.xlabel(r'$\phi$', fontsize=14)
plt.ylabel(r'$z/H$', fontsize=14)
plt.savefig(os.path.join(out_path, 'pr_phi.png'), bbox_inches='tight')
plt.close()
def plot_pr_st(space, result_pr, config, out_path):
plt.figure()
plt.plot(result_pr['uw_cov_n']/config['u_fric']**2, space['z_n']/config['l_z'], '-go',label='res', fillstyle='none')
plt.plot(result_pr['txz_avg_n']/config['u_fric']**2, space['z_n']/config['l_z'], '-bo',label='sgs', fillstyle='none')
plt.plot((result_pr['uw_cov_n'] + result_pr['txz_avg_n'])/config['u_fric']**2, space['z_n']/config['l_z'], '-ko',label='tot', fillstyle='none')
plt.xlabel(r'$Norm. stress$', fontsize=14)
plt.ylabel(r'$z/H$', fontsize=14)
plt.legend()
plt.savefig(os.path.join(out_path, 'pr_st.png'), bbox_inches='tight')
plt.close()
####################
# SAVE
####################
def save_pr(z, var, pr_name, out_path):
R_header = '# ' + pr_name
R_arrray = np.vstack((z, var)).T
np.savetxt(os.path.join(out_path, 'pr_' + pr_name + '.txt'), R_arrray, delimiter=' ', fmt='%.4f', header=R_header)
def save_pr_uvw(z, u_avg, v_avg, w_avg, u_std, v_std, w_std, out_path):
'''
'''
R_header = '# z, u_avg, v_avg, w_avg, u_std, v_std, w_std'
R_arrray = np.vstack((z, u_avg, v_avg, w_avg, u_std, v_std, w_std)).T
np.savetxt(os.path.join(out_path, 'pr_uvw.txt'), R_arrray, delimiter=' ', fmt='%.4f', header=R_header)
# def save_pr_phi(z, u_avg, v_avg, w_avg, u_std, v_std, w_std, out_path):
# '''
# '''
# R_header = '# z, u_avg, v_avg, w_avg, u_std, v_std, w_std'
# R_arrray = np.vstack((z, u_avg, v_avg, w_avg, u_std, v_std, w_std)).T
# np.savetxt(os.path.join(out_path, 'pr_uvw.txt'), R_arrray, delimiter=' ', fmt='%.4f', header=R_header)
#
# def save_pr_st(z, u_avg, v_avg, w_avg, u_std, v_std, w_std, out_path):
# '''
# '''
# R_header = '# z, u_avg, v_avg, w_avg, u_std, v_std, w_std'
# R_arrray = np.vstack((z, u_avg, v_avg, w_avg, u_std, v_std, w_std)).T
# np.savetxt(os.path.join(out_path, 'pr_uvw.txt'), R_arrray, delimiter=' ', fmt='%.4f', header=R_header)
| StarcoderdataPython |
114188 | import discord
import json
from config import config
bot = config.bot()
def get_role(role, ctx):
return discord.utils.get(ctx.guild.roles, name=role)
def roles():
with open("Moderating/Perms/roles.json") as file:
return json.load(file)
async def is_muted(user):
return get_role(role="Muted", ctx=user) in user.roles
def list_roles(user):
roles_str = ""
for role in user.roles:
if not role == get_role(role="@everyone", ctx=user):
roles_str = roles_str + f"{role}, "
return roles_str[0:len(roles_str) - 2]
| StarcoderdataPython |
127080 | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\clubs\club_commands.py
# Compiled at: 2019-11-07 00:04:00
# Size of source mod 2**32: 26533 bytes
import functools, operator
from protocolbuffers import GameplaySaveData_pb2, Clubs_pb2
from clubs.club_enums import ClubGatheringStartSource, ClubHangoutSetting
from clubs.club_sim_picker_dialog import ClubSimPickerRow
from clubs.club_tuning import ClubTunables
from distributor.ops import AskAboutClubsDialog
from distributor.system import Distributor
from google.protobuf import text_format
from server_commands.argument_helpers import TunableInstanceParam, RequiredTargetParam, OptionalTargetParam, get_optional_target
from sims4.commands import CommandType
from tag import Tag
from world.region import get_region_instance_from_zone_id
import build_buy, services, sims4
def _get_club_service(_connection):
club_service = services.get_club_service()
if club_service is None:
sims4.commands.output('Club Service not loaded.', _connection)
return club_service
@sims4.commands.Command('clubs.create_club_from_seed', command_type=(sims4.commands.CommandType.Automation))
def create_club_from_seed(club_seed: TunableInstanceParam(sims4.resources.Types.CLUB_SEED), _connection=None):
club = None
if club_seed is not None:
club = club_seed.create_club()
elif club is not None:
sims4.commands.automation_output('ClubCreate; Status:Success, Id:{}'.format(club.club_id), _connection)
else:
sims4.commands.automation_output('ClubCreate; Status:Failed', _connection)
@sims4.commands.Command('clubs.add_sim_to_club')
def add_sim_to_club(sim: RequiredTargetParam, club_name, _connection=None):
target_sim_info = sim.get_target(manager=(services.sim_info_manager()))
if target_sim_info is None:
sims4.commands.output('Not a valid SimID.', _connection)
return
club_service = _get_club_service(_connection)
if club_service is None:
return
club_name_lc = club_name.lower()
for club in club_service.clubs:
if club_name_lc in str(club).lower():
club.add_member(target_sim_info)
return
sims4.commands.output('No existing club with a name including the string {}'.format(club_name_lc), _connection)
@sims4.commands.Command('clubs.add_sim_to_club_by_id', command_type=(sims4.commands.CommandType.Live))
def add_sim_to_club_by_id(sim: RequiredTargetParam, club_id: int, _connection=None):
target_sim_info = sim.get_target(manager=(services.sim_info_manager()))
if target_sim_info is None:
sims4.commands.output('Not a valid SimID.', _connection)
sims4.commands.automation_output('ClubAddSim; Status:Failed', _connection)
return
club_service = _get_club_service(_connection)
if club_service is None:
sims4.commands.automation_output('ClubAddSim; Status:Failed', _connection)
return
club = club_service.get_club_by_id(club_id)
if club is not None:
club.add_member(target_sim_info)
sims4.commands.automation_output('ClubAddSim; Status:Success', _connection)
return
sims4.commands.output('No existing club with id {}'.format(club_id), _connection)
sims4.commands.automation_output('ClubAddSim; Status:Failed', _connection)
@sims4.commands.Command('clubs.set_leader_by_id', command_type=(sims4.commands.CommandType.Live))
def set_leader_by_id(sim: RequiredTargetParam, club_id: int, _connection=None):
target_sim_info = sim.get_target(manager=(services.sim_info_manager()))
if target_sim_info is None:
sims4.commands.output('Not a valid SimID.', _connection)
sims4.commands.automation_output('ClubSetLeader; Status:Failed', _connection)
return
club_service = _get_club_service(_connection)
if club_service is None:
sims4.commands.automation_output('ClubSetLeader; Status:Failed', _connection)
return
club = club_service.get_club_by_id(club_id)
if club is not None:
club.reassign_leader(target_sim_info)
sims4.commands.automation_output('ClubSetLeader; Status:Success', _connection)
return
sims4.commands.output('No existing club with id {}'.format(club_id), _connection)
sims4.commands.automation_output('ClubSetLeader; Status:Failed', _connection)
@sims4.commands.Command('clubs.remove_sim_from_club')
def remove_sim_from_club(sim: RequiredTargetParam, club_name, _connection=None):
target_sim_info = sim.get_target(manager=(services.sim_info_manager()))
if target_sim_info is None:
sims4.commands.output('Not a valid SimID.', _connection)
return
club_service = _get_club_service(_connection)
if club_service is None:
return
club_name_lc = club_name.lower()
for club in club_service.clubs:
if club_name_lc in str(club).lower():
club.remove_member(target_sim_info)
return
sims4.commands.output('No existing club with a name including the string {}'.format(club_name_lc), _connection)
@sims4.commands.Command('clubs.remove_sim_from_club_by_id', command_type=(sims4.commands.CommandType.Live))
def remove_sim_from_club_by_id(sim: RequiredTargetParam, club_id: int, _connection=None):
target_sim_info = sim.get_target(manager=(services.sim_info_manager()))
if target_sim_info is None:
sims4.commands.output('Not a valid SimID.', _connection)
sims4.commands.automation_output('ClubRemoveSim; Status:Failed', _connection)
return
club_service = _get_club_service(_connection)
if club_service is None:
sims4.commands.automation_output('ClubRemoveSim; Status:Failed', _connection)
return
club = club_service.get_club_by_id(club_id)
if club is not None:
club.remove_member(target_sim_info)
sims4.commands.automation_output('ClubRemoveSim; Status:Success', _connection)
return
sims4.commands.output('No existing club with id {}'.format(club_id), _connection)
sims4.commands.automation_output('ClubRemoveSim; Status:Failed', _connection)
@sims4.commands.Command('clubs.start_gathering_by_club_id', command_type=(sims4.commands.CommandType.Live))
def start_gathering_by_club_id(club_id: int, _connection=None):
club_service = _get_club_service(_connection)
if club_service is None:
sims4.commands.automation_output('ClubGatheringStart; Status:Failed', _connection)
return
club = club_service.get_club_by_id(club_id)
if club is None:
sims4.commands.output('No Club exists with this ID.', _connection)
sims4.commands.automation_output('ClubGatheringStart; Status:Failed', _connection)
return
persistence_service = services.get_persistence_service()
venue_manager = services.get_instance_manager(sims4.resources.Types.VENUE)
current_zone_id = services.current_zone_id()
def _start_gathering(zone_id=None):
start_gathering = functools.partial((club_service.start_gathering), club, invited_sims=(services.active_sim_info(),))
if zone_id is None:
current_venue_tuning = venue_manager.get(build_buy.get_current_venue(current_zone_id))
if current_venue_tuning.is_residential or current_venue_tuning.is_university_housing:
club.is_zone_valid_for_gathering(current_zone_id) or club.show_club_notification(services.active_sim_info(), ClubTunables.CLUB_GATHERING_START_RESIDENTIAL_INVALID_DIALOG)
return
else:
if not club.is_zone_valid_for_gathering(current_zone_id):
club.show_club_notification(services.active_sim_info(), ClubTunables.CLUB_GATHERING_START_INVALID_DIALOG)
return
start_gathering()
else:
start_gathering(zone_id=zone_id)
zone_id = club.get_hangout_zone_id(prefer_current=True)
if zone_id:
current_region = services.current_region()
hangout_region = get_region_instance_from_zone_id(zone_id)
if not current_region.is_region_compatible(hangout_region):
zone_id = 0
if zone_id:
if zone_id == current_zone_id or persistence_service.is_save_locked():
_start_gathering()
else:
def on_response(dialog):
if dialog.closed:
return
elif dialog.accepted:
_start_gathering(zone_id=zone_id)
else:
_start_gathering()
if club.hangout_setting == ClubHangoutSetting.HANGOUT_VENUE:
venue_name = club.hangout_venue.display_name
else:
if club.hangout_setting == ClubHangoutSetting.HANGOUT_LOT:
zone_data = persistence_service.get_zone_proto_buff(zone_id)
venue_name = zone_data.name if zone_data is not None else ''
club.show_club_notification((services.active_sim_info()), (ClubTunables.CLUB_GATHERING_START_SELECT_LOCATION_DIALOG), additional_tokens=(
venue_name,),
on_response=on_response)
sims4.commands.automation_output('ClubGatheringStart; Status:Success', _connection)
return True
@sims4.commands.Command('clubs.join_gathering_by_club_id', command_type=(sims4.commands.CommandType.Live))
def join_gathering_by_club_id(club_id: int, sim_id: OptionalTargetParam=None, _connection=None):
club_service = _get_club_service(_connection)
if club_service is None:
return False
club = club_service.get_club_by_id(club_id)
if club is None:
return False
sim = get_optional_target(sim_id, _connection)
if sim is None:
return False
club_gathering = club_service.clubs_to_gatherings_map.get(club)
if club_gathering is None:
return False
current_gathering = club_service.sims_to_gatherings_map.get(sim)
if current_gathering is not None:
if current_gathering.associated_club is not club:
current_gathering.remove_sim_from_situation(sim)
club_gathering.invite_sim_to_job(sim, job=(club_gathering.default_job()))
return True
@sims4.commands.Command('clubs.end_gathering_by_club_id', command_type=(sims4.commands.CommandType.Live))
def end_gathering_by_club_id(club_id: int, _connection=None):
club_service = _get_club_service(_connection)
if club_service is None:
sims4.commands.automation_output('ClubGatheringEnd; Status:Failed', _connection)
return
club = club_service.get_club_by_id(club_id)
if club is None:
sims4.commands.output('No Club exists with this ID.', _connection)
sims4.commands.automation_output('ClubGatheringEnd; Status:Failed', _connection)
return
gathering = club_service.clubs_to_gatherings_map.get(club)
if gathering is None:
sims4.commands.output('No Gathering exists for a Club with this ID.', _connection)
sims4.commands.automation_output('ClubGatheringEnd; Status:Failed', _connection)
return
gathering._self_destruct()
sims4.commands.automation_output('ClubGatheringEnd; Status:Success', _connection)
@sims4.commands.Command('clubs.request_invite', command_type=(sims4.commands.CommandType.Live))
def request_club_invite(club_id: int, _connection=None):
sim_info = services.active_sim_info()
if sim_info is None:
return
club_service = services.get_club_service()
if club_service is None:
return
club = club_service.get_club_by_id(club_id)
if club is None:
return
if any((club_member.is_selectable for club_member in club.members)):
club.show_club_notification(sim_info, ClubTunables.CLUB_GATHERING_DIALOG_REQUEST_INVITE_ACTIVE_SIM)
else:
if club in club_service.clubs_to_gatherings_map:
club.show_club_notification(sim_info, ClubTunables.CLUB_GATHERING_DIALOG_REQUEST_INVITE_CURRENT_LOT)
else:
club_hangout_zone_id = club.get_hangout_zone_id()
if club.hangout_setting == ClubHangoutSetting.HANGOUT_LOT:
current_region = services.current_region()
hangout_region = get_region_instance_from_zone_id(club_hangout_zone_id)
if not current_region.is_region_compatible(hangout_region):
club.show_club_notification(sim_info, (ClubTunables.CLUB_GATHERING_DIALOG_REQUEST_INVITE_UNAVAILABLE), target_sim_id=(club.leader.sim_id))
return
else:
if not club_hangout_zone_id:
if services.active_lot_id() == services.active_household_lot_id():
def on_response(dialog):
if dialog.accepted:
club_service.start_gathering(club, host_sim_id=(sim_info.sim_id), invited_sims=(sim_info,), ignore_zone_validity=True)
club.show_club_notification(sim_info, (ClubTunables.CLUB_GATHERING_DIALOG_REQUEST_INVITE_NO_LOT), target_sim_id=(club.leader.sim_id),
on_response=on_response)
else:
club.show_club_notification(sim_info, (ClubTunables.CLUB_GATHERING_DIALOG_REQUEST_INVITE_NO_LOT_NOT_HOME), target_sim_id=(club.leader.sim_id))
return
club.show_club_gathering_dialog(sim_info, flavor_text=(ClubTunables.CLUB_GATHERING_DIALOG_TEXT_REQUEST_INVITE), start_source=(ClubGatheringStartSource.APPLY_FOR_INVITE))
@sims4.commands.Command('clubs.refresh_safe_seed_data_for_club')
def refresh_safe_seed_data_for_club(club_id: int, _connection=None):
club_service = _get_club_service(_connection)
if club_service is None:
return
club = club_service.get_club_by_id(club_id)
if club is None:
sims4.commands.output('No Club exists with this ID.', _connection)
return
if club.club_seed is None:
sims4.commands.output('Club has no associated ClubSeed.', _connection)
return
club_service.refresh_safe_seed_data_for_club(club)
sims4.commands.output('Club successfully refreshed.', _connection)
@sims4.commands.Command('clubs.request_club_building_info', command_type=(CommandType.Live))
def request_club_building_info(_connection=None):
club_service = _get_club_service(_connection)
if club_service is None:
return
club_service.send_club_building_info()
@sims4.commands.Command('clubs.validate_sims_against_criteria', command_type=(CommandType.Live))
def validate_sims_against_criteria(criteria_data: str, *sim_ids: int, _connection=None):
club_service = _get_club_service(_connection)
if club_service is None:
return
proto = Clubs_pb2.ClubBuildingInfo()
text_format.Merge(criteria_data, proto)
club_service.send_club_criteria_validation(sim_ids, proto)
@sims4.commands.Command('clubs.show_add_member_picker', command_type=(CommandType.Live))
def show_add_club_member_picker(criteria_data: str, max_selectable: int=8, *excluded_sim_ids: int, _connection=None):
club_service = _get_club_service(_connection)
if club_service is None:
return False
criteria_msg = Clubs_pb2.ClubBuildingInfo()
text_format.Merge(criteria_data, criteria_msg)
criterias = [club_service._load_specific_criteria(data) for data in criteria_msg.criterias]
active_sim_info = services.active_sim_info()
sim_filter_service = services.sim_filter_service()
dialog = ClubTunables.CLUB_ADD_MEMBER_PICKER_DIALOG((services.active_sim_info()), club_building_info=criteria_msg, max_selectable=max_selectable)
def get_sim_filter_gsi_name():
return 'Club Command: Add Club Member'
valid_sim_infos = []
for sim_info in services.sim_info_manager().get_all():
if sim_info.sim_id in excluded_sim_ids:
continue
else:
if sim_info.is_baby:
continue
if sim_info.is_ghost and not sim_info.is_selectable:
continue
if not club_service.can_sim_info_join_more_clubs(sim_info):
continue
if not all((criteria.test_sim_info(sim_info) for criteria in criterias)):
continue
results = sim_filter_service.submit_filter((ClubTunables.CLUB_ADD_MEMBER_FILTER), callback=None,
requesting_sim_info=active_sim_info,
sim_constraints=(
sim_info.sim_id,),
allow_yielding=False,
gsi_source_fn=get_sim_filter_gsi_name)
if results:
valid_sim_infos.append((sim_info, results[0].score))
for sim_info, _ in sorted(valid_sim_infos, key=(operator.itemgetter(1)), reverse=True)[:ClubTunables.CLUB_ADD_MEMBER_CAP]:
dialog_row = ClubSimPickerRow(sim_info.sim_id)
dialog.add_row(dialog_row)
dialog.show_dialog(additional_tokens=(max_selectable,))
return True
@sims4.commands.Command('clubs.validate_sim_against_clubs', command_type=(CommandType.Live))
def validate_sim_against_clubs(sim_id: int, *club_ids: int, _connection=None):
club_service = _get_club_service(_connection)
if club_service is None:
return
club_service.send_club_validation(sim_id, club_ids)
@sims4.commands.Command('clubs.create_club', command_type=(CommandType.Live))
def create_club(club_data: str, _connection=None):
club_service = _get_club_service(_connection)
if club_service is None:
sims4.commands.automation_output('ClubCreate; Status:Failed', _connection)
return
proto = GameplaySaveData_pb2.Club()
text_format.Merge(club_data, proto)
club = club_service.create_club_from_new_data(proto)
sims4.commands.automation_output('ClubCreate; Status:Success, Id:{}'.format(club.club_id), _connection)
@sims4.commands.Command('clubs.update_club', command_type=(CommandType.Live))
def update_club(club_data: str, _connection=None):
club_service = _get_club_service(_connection)
if club_service is None:
return
proto = GameplaySaveData_pb2.Club()
text_format.Merge(club_data, proto)
club_service.update_club_from_data(proto)
@sims4.commands.Command('clubs.remove_club_by_id', command_type=(CommandType.Live))
def remove_club_by_id(club_id: int, _connection=None):
club_service = _get_club_service(_connection)
if club_service is None:
sims4.commands.automation_output('ClubDestroy; Status:Failed', _connection)
return
club = club_service.get_club_by_id(club_id)
if club is None:
sims4.commands.output('No Club exists with this ID.', _connection)
sims4.commands.automation_output('ClubDestroy; Status:Failed', _connection)
return
club_service.remove_club(club)
sims4.commands.automation_output('ClubDestroy; Status:Success', _connection)
@sims4.commands.Command('clubs.set_club_outfit_style', command_type=(CommandType.Live))
def set_club_outfit_style(club_id: int, style_tag: Tag, _connection=None):
club = get_club_from_service_by_id(club_id, _connection)
if club is None:
return False
club.set_associated_style(style_tag)
sims4.commands.output('The {} group now has an associated style of {}'.format(club, style_tag), _connection)
@sims4.commands.Command('clubs.set_club_outfit_color', command_type=(CommandType.Live))
def set_club_outfit_color(club_id: int, color_tag: Tag, _connection=None):
club = get_club_from_service_by_id(club_id, _connection)
if club is None:
return False
club.set_associated_color(color_tag)
sims4.commands.output('The {} group now has an associated color of {}'.format(club, color_tag), _connection)
def get_club_from_service_by_id(club_id, _connection):
club_service = services.get_club_service()
if club_service is None:
sims4.commands.output('A Pack with Clubs/Groups is not installed.', _connection)
return
club = club_service.get_club_by_id(club_id)
if club is None:
sims4.commands.output('Club not found with id {}. Please Specify an existing club id.'.format(club_id), _connection)
return
return club
@sims4.commands.Command('clubs.show_ask_about_clubs_dialog_for_sim', command_type=(CommandType.Live))
def show_ask_about_clubs_dialog_for_sim(sim: RequiredTargetParam, _connection):
club_service = _get_club_service(_connection)
if club_service is None:
return
else:
target_sim_info = sim.get_target(manager=(services.sim_info_manager()))
if target_sim_info is None:
sims4.commands.output('Not a valid SimID.', _connection)
return
participant_clubs = club_service.get_clubs_for_sim_info(target_sim_info)
return participant_clubs or None
op = AskAboutClubsDialog((target_sim_info.id), club_ids=[club.id for club in participant_clubs])
Distributor.instance().add_op_with_no_owner(op)
@sims4.commands.Command('clubs.set_outfit_setting', command_type=(CommandType.Live))
def set_outfit_setting(club_id: int, setting: int, _connection):
club = get_club_from_service_by_id(club_id, _connection)
if club is None:
return False
club.set_outfit_setting(setting)
@sims4.commands.Command('qa.clubs.get_members', command_type=(CommandType.Automation))
def qa_get_members(club_id: int, _connection):
club = get_club_from_service_by_id(club_id, _connection)
if club is None:
sims4.commands.automation_output('ClubMembers; Status:Failed', _connection)
return False
sims4.commands.automation_output('ClubMembers; Status:Begin', _connection)
members = club.members
for member in members:
sims4.commands.automation_output('ClubMembers; Status:Data, SimId:{}'.format(member.sim_id), _connection)
sims4.commands.automation_output('ClubMembers; Status:End', _connection)
@sims4.commands.Command('qa.clubs.get_leader', command_type=(CommandType.Automation))
def qa_get_leader(club_id: int, _connection):
club = get_club_from_service_by_id(club_id, _connection)
if club is None:
sims4.commands.automation_output('ClubLeader; Status:Failed', _connection)
return False
leader = club.leader
if leader is None:
sims4.commands.automation_output('ClubLeader; Status:Failed', _connection)
return False
sims4.commands.automation_output('ClubLeader; Status:Success, SimId:{}'.format(leader.sim_id), _connection)
@sims4.commands.Command('qa.clubs.create_club', command_type=(CommandType.Automation))
def qa_create_club(club_name: str, sim_id: int, _connection=None):
club_data = '\n name: "{0}"\n description: "{0}"\n leader: {1}\n members: {1}\n '.format(club_name, sim_id)
create_club(club_data, _connection) | StarcoderdataPython |
1775101 | #!/usr/bin/env python3
# Copyright 2022 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import time
for x in range(30):
print("Hi! %s" % x)
time.sleep(1)
| StarcoderdataPython |
3377694 | import collections.abc
from crims2s.training.util import find_checkpoint_file
import torch
import torch.nn as nn
import numpy as np
from typing import Mapping, TypeVar, Callable, Iterable, Hashable
from ...util import ECMWF_FORECASTS
class PytorchMultiplexer(nn.Module):
"""Model multiplexer that only works on pytorch tensor inputs."""
def __init__(self, models):
super().__init__()
self.models = nn.ModuleDict(models)
def forward(self, key, *args):
if isinstance(key, str):
model = self.models[key]
return model(*args)
if isinstance(key, collections.abc.Iterable):
model_outputs = []
for i, k in enumerate(key):
unbatched_args = [a[i] for a in args]
model = self.models[k]
model_output = model(*unbatched_args)
model_outputs.append(model_output)
return torch.stack(model_outputs, dim=0)
else:
model = self.models[key]
return model(*args)
class PytorchRolllingWindowMultiplexer(nn.Module):
"""Model multiplexer that, for a given key, runs a rolling window of models.
The models_of_key callable gives the list of models that apply to a given key.
For instance, if we want to call model 3 with a rolling window of two, then
model_of_key(3) could return [1,2,3,4,5]."""
def __init__(
self,
models_of_key: Callable[[str], Iterable[str]],
models: Mapping[str, nn.Module],
):
super().__init__()
self.models_of_key = models_of_key
self.models = nn.ModuleDict(models)
def forward(self, key, *args):
if isinstance(key, str):
return self._compute_one_example(key, *args)
elif isinstance(key, collections.abc.Iterable):
outputs = []
for i, k in enumerate(key):
unbatched_args = [a[[i]] for a in args]
outputs.append(self._compute_one_example(k, *unbatched_args))
return torch.cat(outputs, dim=0)
else:
raise RuntimeError("Unregognized key type.")
def _compute_one_example(self, key: str, *args):
model_keys = self.models_of_key(key)
models = [self.models[k] for k in model_keys]
outputs = torch.stack([m(*args) for m in models], dim=0)
return outputs.mean(dim=0)
class MonthlyMultiplexer(PytorchMultiplexer):
def __init__(self, cls, *args, **kwargs):
monthly_models = {f"{month:02}": cls(*args, **kwargs) for month in range(1, 13)}
super().__init__(monthly_models)
class WeeklyMultiplexer(PytorchMultiplexer):
def __init__(self, cls, *args, **kwargs):
monthdays = [f"{m:02}{d:02}" for m, d in ECMWF_FORECASTS]
weekly_models = {monthday: cls(*args, **kwargs) for monthday in monthdays}
super().__init__(weekly_models)
class WeeklyRollingWindowMultiplexer(PytorchRolllingWindowMultiplexer):
def __init__(self, window_size, cls, *args, **kwargs):
self.window_size = window_size
self.monthdays = [f"{m:02}{d:02}" for m, d in ECMWF_FORECASTS]
weekly_models = {monthday: cls(*args, **kwargs) for monthday in self.monthdays}
super().__init__(self.models_of_key, weekly_models)
def models_of_key(self, key):
left_lookup = self.window_size // 2
right_lookup = self.window_size // 2 + 1
padded_monthdays = [
*self.monthdays[-left_lookup:],
*self.monthdays,
*self.monthdays[:right_lookup],
]
i = self.monthdays.index(key)
model_keys = padded_monthdays[i : i + self.window_size]
return model_keys
def compute_edges_cdf_from_distribution(distribution, edges, regularization=0.0):
edges_nan_mask = edges.isnan()
edges[edges_nan_mask] = 0.0
cdf = distribution.cdf(edges + regularization)
edges[edges_nan_mask] = np.nan
cdf[edges_nan_mask] = np.nan
return cdf
def edges_cdf_to_terciles(edges_cdf):
if len(edges_cdf.shape) == 5:
stack_dim = 1
else:
stack_dim = 0
return torch.stack(
[edges_cdf[0], edges_cdf[1] - edges_cdf[0], 1.0 - edges_cdf[1],], dim=stack_dim
)
class DistributionToTerciles(nn.Module):
def __init__(self, regularization=0.0):
super().__init__()
self.regularization = regularization
def forward(self, distribution, edges):
edges_cdf = compute_edges_cdf_from_distribution(
distribution, edges, self.regularization
)
return edges_cdf_to_terciles(edges_cdf)
class DistributionModelAdapter(nn.Module):
"""Convert a model that outputs distributions into a model that outputs terciles."""
def __init__(self, model, tp_regularization=0.0):
super().__init__()
self.model = model
self.t2m_to_terciles = DistributionToTerciles()
self.tp_to_terciles = DistributionToTerciles(regularization=tp_regularization)
def forward(self, example):
t2m_dist, tp_dist = self.model(example)
edges_t2m = example["edges_t2m"]
edges_tp = example["edges_tp"]
if len(edges_t2m.shape) == 5:
"""There is a batch dim but we need the egdges dim on the first dim."""
edges_t2m = torch.transpose(edges_t2m, 0, 1)
edges_tp = torch.transpose(edges_tp, 0, 1)
t2m_terciles = self.t2m_to_terciles(t2m_dist, edges_t2m)
tp_terciles = self.tp_to_terciles(tp_dist, edges_tp)
return t2m_terciles, tp_terciles
class ModelWithCheckpoint(nn.Module):
def __init__(self, model: nn.Module, checkpoint_path, remove_prefix="model."):
super().__init__()
self.model = model
checkpoint_file = find_checkpoint_file(checkpoint_path)
state_dict = torch.load(checkpoint_file)["state_dict"]
state_dict = {k[len(remove_prefix) :]: v for k, v in state_dict.items()}
self.model.load_state_dict(state_dict)
def forward(self, *args, **kwargs):
return self.model.forward(*args, **kwargs)
| StarcoderdataPython |
1661767 | import wx
import sys, platform, time, ast
from packages.rmnetwork import netutil
import packages.rmnetwork as network
from packages.rmnetwork.constants import *
from packages.lang.Localizer import *
from wx.lib.wordwrap import wordwrap
import wx.lib.scrolledpanel as scrolled
import wx.lib.masked as masked
if platform.system() == "Linux":
from wx.lib.pubsub import setupkwargs
from wx.lib.pubsub import pub as Publisher
else:
from wx.lib.pubsub import pub as Publisher
# mapping from combo choices to constant codes
CMD = ['play', 'stop', 'restart', 'play_number', 'reboot', 'update']
CMD_CODE = [PLAYER_START, PLAYER_STOP, PLAYER_RESTART, PLAYER_START_FILENUMBER, PLAYER_REBOOT, PLAYER_UPDATE]
TYPE = ['startup', 'new_player_found', 'per_sec', 'per_min', 'per_hour', 'spec_time']
TYPE_CODE = [ACTION_EVENT_STARTUP, ACTION_EVENT_NEW_PLAYER, PERIODIC_SEC, PERIODIC_MIN, PERIODIC_HOUR, ACTION_TYPE_SPECIFIC_TIME]
TYPE_ONETIME = [ACTION_EVENT_STARTUP, ACTION_EVENT_NEW_PLAYER, ACTION_TYPE_SPECIFIC_TIME]
################################################################################
# FRAME FOR ACTION EDITING #####################################################
################################################################################
class ActionEditFrame(wx.Frame):
def __init__(self,parent,id,title,hosts,group):
wx.Frame.__init__(self,parent,id,title)
self.parent = parent
self.hosts = hosts
self.Bind(wx.EVT_CLOSE, self.Close)
self.group = group
self.currentAction = None
self.actionSaved = True
self.actions = []
self.masterHost = ""
if "actions" in group:
self.actions = group['actions']
for member in self.group['members']:
if member['master']:
self.masterHost = member['ip']
self.mainSizer = wx.BoxSizer(wx.VERTICAL)
self.__InitUI()
self.SetSizerAndFit(self.mainSizer)
self.Center()
self.__ValidateInput()
def __InitUI(self):
self.headSizer = wx.BoxSizer(wx.VERTICAL)
self.contentSizer = wx.BoxSizer(wx.VERTICAL)
self.actScroll = scrolled.ScrolledPanel(self, -1, (515,200))
self.actScroll.SetAutoLayout(1)
self.actScroll.SetupScrolling(scroll_x=True, scroll_y=True)
self.actScroll.SetMinSize((515,200))
self.contentSizer.SetMinSize((493,195))
self.actScroll.SetSizer(self.contentSizer)
self.SetupHeadSection()
self.LoadActionUI()
# close button
self.okBtn = wx.Button(self,-1,label="OK")
self.Bind(wx.EVT_BUTTON, self.Close, self.okBtn)
# divider line between sections
line = wx.StaticLine(self, -1, size = (515,2))
secLine = wx.StaticLine(self, -1, size = (515,2))
lineBottom = wx.StaticLine(self, -1, size = (515,2))
# add content sizers and section dividers to main sizer
self.mainSizer.Add(self.headSizer)
self.mainSizer.Add(line, flag=wx.TOP, border = 3)
self.mainSizer.Add(secLine, flag=wx.BOTTOM, border = 3)
self.mainSizer.Add(self.actScroll)
self.mainSizer.Add(lineBottom, flag=wx.TOP|wx.BOTTOM, border = 3)
self.mainSizer.Add(self.okBtn, flag = wx.ALIGN_CENTER_HORIZONTAL | wx.ALL, border = 10)
def SetupHeadSection(self):
# Sizer with combo boxes to define new action
comboSizer = wx.BoxSizer()
cmdChoices = []
for c in CMD:
cmdChoices.append(tr(c))
self.cmdCombo = wx.ComboBox(self, -1, choices=cmdChoices, size=(134,26))
self.cmdCombo.SetName('cmd')
typeChoices = []
for t in TYPE:
typeChoices.append(tr(t))
self.typeCombo = wx.ComboBox(self, -1, choices=typeChoices, size=(174,26))
self.typeCombo.SetName('type')
self.times = []
for i in range(256):
self.times.append(str(i))
self.timeCombo = wx.ComboBox(self, -1, choices=self.times, size=(77,26))
self.timeSpin = wx.SpinButton(self,-1,style=wx.SP_VERTICAL)
self.triggerTime = masked.TimeCtrl(self,-1,format='24HHMM')
self.triggerTime.BindSpinButton(self.timeSpin)
self.addBtn = wx.Button(self,-1,label="+", size=(25,25))
self.Bind(wx.EVT_COMBOBOX, self.ComboSelection, self.cmdCombo)
self.Bind(wx.EVT_COMBOBOX, self.ComboSelection, self.typeCombo)
self.Bind(wx.EVT_COMBOBOX, self.ComboSelection, self.timeCombo)
self.Bind(wx.EVT_BUTTON, self.AddAction, self.addBtn)
comboSizer.Add(self.cmdCombo, flag = wx.LEFT, border = 10)
comboSizer.Add(self.typeCombo)
comboSizer.Add(self.timeCombo)
comboSizer.Add(self.triggerTime)
comboSizer.Add(self.timeSpin,flag=wx.RIGHT,border=3)
comboSizer.Add(self.addBtn, flag = wx.RIGHT | wx.ALIGN_CENTER_VERTICAL, border = 10)
self.addBtn.Disable()
self.ResetCombos()
# Sizer with labels for the combo boxes
labelSizer = wx.BoxSizer()
cmdLabel = wx.StaticText(self,-1,label="Command:",size=(self.cmdCombo.GetSize()[0],17))
triggerLabel = wx.StaticText(self,-1,label="Trigger:",size=(self.typeCombo.GetSize()[0],17))
self.timeLabel = wx.StaticText(self,-1,label="Delay:",size=(self.timeCombo.GetSize()[0],17))
specTimeLabel = wx.StaticText(self,-1,label="Time:")
labelSizer.Add(cmdLabel, flag = wx.LEFT| wx.TOP, border = 6)
labelSizer.Add(triggerLabel, flag = wx.TOP, border = 6)
labelSizer.Add(self.timeLabel, flag = wx.TOP, border = 6)
labelSizer.Add(specTimeLabel, flag = wx.TOP, border = 6)
self.headSizer.Add(labelSizer)
self.headSizer.Add(comboSizer)
def ResetCombos(self):
self.cmdCombo.SetSelection(-1)
self.typeCombo.SetSelection(-1)
self.timeCombo.SetSelection(-1)
def LoadActionUI(self):
for action in self.actions:
self.__AddActionToUI(action)
def __AddActionToUI(self, action):
try:
actionDict = ast.literal_eval(action)
action = actionDict
except:
pass
actBox = wx.BoxSizer()
actBox.SetMinSize((488,10))
desc = self.__GetDescription(action)
descLabel = wx.StaticText(self.actScroll,-1,label=desc)
descLabel.SetMinSize((453,25))
delBtn = wx.Button(self.actScroll,-1,label="x",size=(25,25))
line = wx.StaticLine(self.actScroll, -1, size = (494,2))
self.Bind(wx.EVT_BUTTON, lambda event, action=action: self.DeleteAction(event,action), delBtn)
actBox.Add(descLabel, flag = wx.ALL, border = 3)
actBox.Add(delBtn, flag = wx.ALIGN_RIGHT | wx.ALL, border = 3)
self.contentSizer.Prepend(line, flag = wx.ALL, border = 3)
self.contentSizer.Prepend(actBox)
self.contentSizer.Layout()
self.actScroll.SetupScrolling(scroll_x=True, scroll_y=True)
def __GetDescription(self, action):
ind = CMD_CODE.index(action['command'])
desc = '"' + tr(CMD[ind])
if ind == 3:
desc += " " + str(action['file_number'])
desc += '"'
if action['type'] == ACTION_TYPE_PERIODIC:
desc += " " + tr("every") + " " + action['periodic_interval']
pType = action['periodic_type']
if pType == PERIODIC_SEC:
desc += " " + tr("sec")
elif pType == PERIODIC_MIN:
desc += " " + tr("min")
elif pType == PERIODIC_HOUR:
desc += " " + tr("hour")
if int(action['periodic_interval']) > 1:
desc += tr("desc_plural")
elif action['type'] == ACTION_TYPE_ONETIME:
if action['event'] == ACTION_TYPE_SPECIFIC_TIME:
hourStr = "%02d" % action['hour']
minStr = "%02d" % action['minute']
desc += " at " + hourStr + ":" + minStr
else:
desc += " " + tr("after")
desc += " " + action['delay'] + " " + tr("sec")
if int(action['delay']) > 1:
desc += tr("desc_plural")
desc += " " + tr("when")
ind = TYPE_CODE.index(action['event'])
event = tr(TYPE[ind])
desc += " " + event
return desc
def ComboSelection(self, event=None):
self.__ValidateInput()
def DeleteAction(self, event, action):
desc = self.__GetDescription(action)
dlg = wx.MessageDialog(self, tr("delete_action") % desc, tr("deleting"), style = wx.YES_NO)
if dlg.ShowModal() == wx.ID_YES:
self.currentAction = action
# send action to group master
Publisher.subscribe(self.CurrentActionDeleted, 'action_deleted')
Publisher.subscribe(self.UdpListenerStopped, 'listener_stop')
self.prgDlg = wx.ProgressDialog(tr("deleting"), tr("deleting_action") % desc, parent=self, style=wx.PD_AUTO_HIDE)
self.prgDlg.Pulse()
msgData = network.messages.getMessage(GROUP_CONFIG_ACTION_DELETE, ["-s", str(action)])
network.udpconnector.sendMessage(msgData, self.masterHost, 3)
def CurrentActionDeleted(self):
# if string and not dictionary --> convert
self.currentAction = self.__toDict(self.currentAction)
ind = self.__actionIndex(self.currentAction)
if not ind == -1:
del self.actions[ind]
self.currentAction = None
self.prgDlg.Update(100)
if platform.system() == "Windows":
self.prgDlg.Destroy()
self.ResetCombos()
self.contentSizer.Clear(True)
self.LoadActionUI()
def AddAction(self, event):
action = self.InputToAction()
self.currentAction = action
self.SendCurrentActionToMaster()
def InputToAction(self):
cmd = CMD_CODE[self.cmdCombo.GetSelection()]
type = TYPE_CODE[self.typeCombo.GetSelection()]
time = self.times[self.timeCombo.GetSelection()]
specTime = self.triggerTime.GetValue(as_wxDateTime=True)
hour = specTime.GetHour()
minute = specTime.GetMinute()
actType = ACTION_TYPE_PERIODIC
if type in TYPE_ONETIME:
actType = ACTION_TYPE_ONETIME
action = {}
action['type'] = actType
if actType == ACTION_TYPE_PERIODIC:
action['periodic_type'] = type
action['periodic_interval'] = time
else:
action['event'] = type
if type == ACTION_TYPE_SPECIFIC_TIME:
action['hour'] = hour
action['minute'] = minute
action['file_number'] = time
else:
action['delay'] = time
action['command'] = cmd
return action
def SendCurrentActionToMaster(self):
if not self.currentAction == None:
self.actionSaved = False
# send action to group master
Publisher.subscribe(self.CurrentActionSaved, 'action_saved')
Publisher.subscribe(self.UdpListenerStopped, 'listener_stop')
msgData = network.messages.getMessage(GROUP_CONFIG_ADD_ACTION, ["-s",str(self.currentAction)])
network.udpconnector.sendMessage(msgData, self.masterHost, 3)
def CurrentActionSaved(self):
# group master responded that it received the action update
self.actionSaved = True
self.ResetCombos()
self.actions.append(self.currentAction)
self.__AddActionToUI(self.currentAction)
self.currentAction = None
def UdpListenerStopped(self):
if not self.actionSaved:
self.SendCurrentActionToMaster()
def Close(self, event):
self.MakeModal(False)
self.Destroy()
event.Skip()
def __ValidateInput(self, event=None):
if self.cmdCombo.GetSelection() == 3:
# start specific file number --> use delay combo for file number, trigger has to be specific time
self.typeCombo.SetSelection(5)
self.typeCombo.Disable()
self.timeLabel.SetLabel("File:")
self.timeCombo.Enable()
self.triggerTime.Enable()
self.timeSpin.Enable()
else:
self.timeLabel.SetLabel("Delay:")
self.typeCombo.Enable()
if self.typeCombo.GetSelection() == 5:
self.triggerTime.Enable()
self.timeSpin.Enable()
self.timeCombo.Disable()
else:
self.triggerTime.Disable()
self.timeSpin.Disable()
self.timeCombo.Enable()
if not self.cmdCombo.GetSelection() == -1 and not self.typeCombo.GetSelection() == -1:
if (self.typeCombo.GetSelection() != 5 and not self.timeCombo.GetSelection() == -1) or self.typeCombo.GetSelection() == 5:
action = self.InputToAction()
action = self.__toDict(action)
if self.__actionIndex(action) == -1:
self.addBtn.Enable()
else:
self.addBtn.Disable()
else:
self.addBtn.Disable()
else:
self.addBtn.Disable()
def __actionIndex(self,action):
ind = -1
cnt = 0
# convert to dict in case it is still a string
action = self.__toDict(action)
action = self.__sortDict(action)
for a in self.actions:
a = self.__toDict(a)
a = self.__sortDict(a)
if cmp(a, action) == 0:
ind = cnt
cnt += 1
return ind
def __toDict(self,data):
try:
dict = ast.literal_eval(data)
data = dict
except:
pass
return data
def __sortDict(self,dict):
sDict = {}
for key in sorted(dict):
sDict[key] = dict[key]
return sDict
def get_index(seq, attr, value):
return next(index for (index, d) in enumerate(seq) if d[attr] == value)
| StarcoderdataPython |
3264464 | """
The LcCatalog provides a semantic interface to a collection of (local and remote) read-only LcArchives, which provide
access to physical data.
It is made up of the following components:
* built on an LciaEngine
+ local, persistent storage of resources, indexes, cache data + etc
+ A resolver, which translates semantic references into resources. Input: semantic ref. output: CatalogInterface.
+ an interface generator, which creates archive accessors on demand based on resource information from the resolver
x An internal cache of entities retrieved, by full reference-- this has been cut
From the catalog_ref file, the catalog should meet the following spec:
Automatic - entity information
catalog.query(origin) - returns a query interface
catalog.lookup(origin, external_ref) - returns the origin of the lowest-priority resource resolving the ref
catalog.fetch(origin, external_ref) - return a reference to the object that can be queried + handled
LC Queries:
see lcatools.interfaces.*
"""
import os
import re
import hashlib
# from collections import defaultdict
from ..archives import InterfaceError
from ..lcia_engine import LciaDb
from antelope import CatalogRef, UnknownOrigin
from ..catalog_query import CatalogQuery, INTERFACE_TYPES, zap_inventory
from .lc_resolver import LcCatalogResolver
from ..lc_resource import LcResource
# from lcatools.flowdb.compartments import REFERENCE_INT # reference intermediate flows
class DuplicateEntries(Exception):
pass
class CatalogError(Exception):
pass
class StaticCatalog(object):
"""
Provides query-based access to LCI information. The static version is ideal for creating read-only web resources
from curated LcCatalogs. However, it must already exist. Only an LcCatalog (or subclasses) support de novo
instantiation.
A catalog is stored in the local file system and creates and stores resources relative to its root directory.
Subfolders (all accessors return absolute paths):
Public subfolders:
LcCatalog.resource_dir
LcCatalog.archive_dir
Public filenames:
LcCatalog.cache_file(src) returns a sha1 hash of the source filename in the [absolute] cache dir
LcCatalog.download_file(src) returns a sha1 hash of the source filename in the [absolute] download dir
Private folders + files:
LcCatalog._download_dir
LcCatalog._index_dir
LcCatalog._index_file(src) returns a sha1 hash of the source filename in the [absolute] index dir
LcCatalog._cache_dir
LcCatalog._entity_cache: local entities file in root
LcCatalog._reference_qtys: reference quantities file in root
LcCatalog._compartments: local compartments file (outmoded in Context Refactor)
"""
@property
def resource_dir(self):
return os.path.join(self._rootdir, 'resources')
@property
def _download_dir(self):
return os.path.join(self._rootdir, 'downloads')
@staticmethod
def _source_hash_file(source):
"""
Creates a stable filename from a source argument. The source is the key found in the _archive dict, and
corresponds to a single physical data source. The filename is a sha1 hex-digest, .json.gz
:param source:
:return:
"""
h = hashlib.sha1()
h.update(source.encode('utf-8'))
return h.hexdigest()
@property
def _index_dir(self):
return os.path.join(self._rootdir, 'index')
def _index_file(self, source):
return os.path.join(self._index_dir, self._source_hash_file(source) + '.json.gz')
@property
def _cache_dir(self):
return os.path.join(self._rootdir, 'cache')
def cache_file(self, source):
return os.path.join(self._cache_dir, self._source_hash_file(source) + '.json.gz')
@property
def archive_dir(self):
return os.path.join(self._rootdir, 'archives')
'''
@property
def _entity_cache(self):
return os.path.join(self._rootdir, 'entity_cache.json')
'''
@property
def _reference_qtys(self):
return os.path.join(self._rootdir, 'reference-quantities.json')
'''
@property
def _compartments(self):
"""
Deprecated
:return:
"""
return os.path.join(self._rootdir, 'local-compartments.json')
'''
@property
def _contexts(self):
return os.path.join(self._rootdir, 'local-contexts.json')
@property
def _flowables(self):
return os.path.join(self._rootdir, 'local-flowables.json')
def _localize_source(self, source):
if source is None:
return None
if source.startswith(self._rootdir):
#return re.sub('^%s' % self._rootdir, '$CAT_ROOT', source)
# Should work on both mac and windows
return os.path.join('$CAT_ROOT', os.path.relpath(source, self._rootdir))
return source
def abs_path(self, rel_path):
if os.path.isabs(rel_path):
return rel_path
elif rel_path.startswith('$CAT_ROOT'):
#return re.sub('^\$CAT_ROOT', self.root, rel_path)
# Should work on both mac and windows
return os.path.abspath(os.path.join(self.root, os.path.relpath(rel_path, '$CAT_ROOT')))
return os.path.abspath(os.path.join(self.root, rel_path))
@property
def root(self):
return self._rootdir
def __init__(self, rootdir, strict_clookup=True, **kwargs):
"""
Instantiates a catalog based on the resources provided in resource_dir
:param rootdir: directory storing LcResource files.
:param strict_clookup: [True] whether to enforce uniqueness on characterization factors (raise an error when a
non-matching duplicate characterization is encountered). If False, selection among conflicting factors is
not well defined and may be done interactively or unpredictably
:param kwargs: passed to Qdb
"""
self._rootdir = os.path.abspath(rootdir)
if not os.path.exists(self._rootdir):
raise FileNotFoundError(self._rootdir)
self._resolver = LcCatalogResolver(self.resource_dir)
"""
_archives := source -> archive
_names := ref:interface -> source
_nicknames := nickname -> source
"""
self._nicknames = dict() # keep a collection of shorthands for sources
self._queries = dict() # keep a collection of CatalogQuery instances for each origin
'''
LCIA:
'''
qdb = LciaDb.new(source=self._reference_qtys, contexts=self._contexts, flowables=self._flowables,
strict_clookup=strict_clookup, **kwargs)
self._qdb = qdb
res = LcResource.from_archive(qdb, interfaces=('index', 'quantity'), store=False)
self._resolver.add_resource(res, store=False)
'''
The thing that distinguishes a catalog from an archive is its centralized handling of quantities via the qdb
'''
@property
def qdb(self):
"""
Provides query access to the quantity database. Should be like cat.query('local.qdb'), except that
it provides a basic query- which is what internal quantities use themselves
:return:
"""
return self._qdb.query
@property
def lcia_engine(self):
return self._qdb.tm
def register_quantity_ref(self, q_ref):
print('registering %s' % q_ref.link)
self._qdb.add(q_ref)
@property
def sources(self):
for k in self._resolver.sources:
yield k
@property
def references(self):
for ref, ints in self._resolver.references:
yield ref
@property
def interfaces(self):
for ref, ints in self._resolver.references:
for i in ints:
yield ':'.join([ref, i])
def show_interfaces(self):
for ref, ints in sorted(self._resolver.references):
print('%s [%s]' % (ref, ', '.join(ints)))
'''
Nicknames
'''
@property
def names(self):
"""
List known references.
:return:
"""
for k, ifaces in self._resolver.references:
for iface in ifaces:
yield ':'.join([k, iface])
for k in self._nicknames.keys():
yield k
def add_nickname(self, source, nickname):
"""
quickly refer to a specific data source already present in the archive
:param source:
:param nickname:
:return:
"""
if self._resolver.known_source(source):
self._nicknames[nickname] = source
else:
raise KeyError('Source %s not found' % source)
def has_resource(self, res):
return self._resolver.has_resource(res)
'''
Retrieve resources
'''
def _find_single_source(self, origin, interface, source=None, strict=True):
r = self._resolver.get_resource(ref=origin, iface=interface, source=source, include_internal=False, strict=strict)
r.check(self)
return r.source
def get_resource(self, name, iface=None, source=None, strict=True):
"""
retrieve a resource by providing enough information to identify it uniquely. If strict is True (default),
then parameters are matched exactly and more than one match raises an exception. If strict is False, then
origins are matched approximately and the first (lowest-priority) match is returned.
:param name: nickname or origin
:param iface:
:param source:
:param strict:
:return:
"""
if name in self._nicknames:
return self._resolver.get_resource(source=self._nicknames[name], strict=strict)
iface = zap_inventory(iface, warn=True) # warn when requesting the wrong interface
return self._resolver.get_resource(ref=name, iface=iface, source=source, strict=strict)
def get_archive(self, ref, interface=None, strict=False):
interface = zap_inventory(interface, warn=True)
if interface in INTERFACE_TYPES:
rc = self.get_resource(ref, iface=interface, strict=strict)
else:
rc = self.get_resource(ref, strict=strict)
rc.check(self)
return rc.archive
'''
Main data accessor
'''
def _sorted_resources(self, origin, interfaces, strict):
for res in sorted(self._resolver.resolve(origin, interfaces, strict=strict),
key=lambda x: (not (x.is_loaded and x.static), x.priority, x.reference != origin)):
yield res
def gen_interfaces(self, origin, itype=None, strict=False):
"""
Generator of interfaces by spec
:param origin:
:param itype: single interface or iterable of interfaces
:param strict: passed to resolver
:return:
"""
# if itype == 'quantity':
# yield self._qdb.make_interface(itype)
for res in self._sorted_resources(origin, itype, strict):
res.check(self)
try:
yield res.make_interface(itype)
except InterfaceError:
continue
'''
# no need for this because qdb is (a) listed in the resolver and (b) upstream of everything
if 'quantity' in itype:
yield self._qdb # fallback to our own quantity db for Quantity Interface requests
'''
"""
public functions -- should these operate directly on a catalog ref instead? I think so but let's see about usage
"""
def query(self, origin, strict=False, refresh=False, **kwargs):
"""
Returns a query using the first interface to match the origin.
:param origin:
:param strict: [False] whether the resolver should match the origin exactly, as opposed to returning more highly
specified matches. e.g. with strict=False, a request for 'local.traci' could be satisfied by 'local.traci.2.1'
whereas if strict=True, only a resource matching 'local.traci' exactly will be returned
:param refresh: [False] by default, the catalog stores a CatalogQuery instance for every requested origin. With
refresh=True, any prior instance will be replaced with a fresh one.
:param kwargs:
:return:
"""
next(self._resolver.resolve(origin, strict=strict))
if refresh or (origin not in self._queries):
self._queries[origin] = CatalogQuery(origin, catalog=self, **kwargs)
return self._queries[origin]
def lookup(self, catalog_ref, keep_properties=False):
"""
Attempts to return a valid grounded reference matching the one supplied.
:param catalog_ref:
:param keep_properties: [False] if True, apply incoming ref's properties to grounded ref, probably with a
prefix or something.
:return:
"""
ref = self.query(catalog_ref.origin).get(catalog_ref.external_ref)
if keep_properties:
for k in catalog_ref.properties():
ref[k] = catalog_ref[k]
return ref
'''
def lookup(self, origin, external_ref=None):
"""
Attempts to secure an entity
:param origin:
:param external_ref:
:return: The origin of the lowest-priority resource to match the query
"""
if external_ref is None:
origin, external_ref = origin.split('/', maxsplit=1)
for i in self.gen_interfaces(origin):
if i.lookup(external_ref):
return i.origin
for i in self.gen_interfaces('.'.join(['foreground', origin])):
if i.lookup(external_ref):
return i.origin
raise EntityNotFound('%s/%s' % (origin, external_ref))
def fetch(self, origin, external_ref=None):
if external_ref is None:
origin, external_ref = origin.split('/', maxsplit=1)
org = self.lookup(origin, external_ref)
return self.query(org).get(external_ref)
'''
def catalog_ref(self, origin, external_ref, entity_type=None, **kwargs):
"""
TODO: make foreground-generated CatalogRefs lazy-loading. This mainly requires removing the expectation of a
locally-defined reference entity, and properly implementing and using a reference-retrieval process in the
basic interface.
:param origin:
:param external_ref:
:param entity_type:
:return:
"""
try:
q = self.query(origin)
ref = q.get(external_ref)
except UnknownOrigin:
ref = CatalogRef(origin, external_ref, entity_type=entity_type, **kwargs)
return ref
| StarcoderdataPython |
26742 | <gh_stars>0
import unittest
#4.定义测试类,父类为unittest.TestCase。
#可继承unittest.TestCase的方法,如setUp和tearDown方法,不过此方法可以在子类重写,覆盖父类方法。
#可继承unittest.TestCase的各种断言方法。
class Test(unittest.TestCase):
#5.定义setUp()方法用于测试用例执行前的初始化工作。
#注意,所有类中方法的入参为self,定义方法的变量也要“self.变量”
def setUp(self):
print("开始。。。。。。。。")
self.number = 10
#6.定义测试用例,以“test_”开头命名的方法
#注意,方法的入参为self
#可使用unittest.TestCase类下面的各种断言方法用于对测试结果的判断
#可定义多个测试用例
#最重要的就是该部分
def test_case1(self):
self.assertEqual(10,10)
def test_case2(self):
# self.number为期望值,20为实际值
self.assertEqual(self.number,20,msg="your input is not 20")
@unittest.skip('暂时跳过用例3的测试')
def test_case3(self):
self.assertEqual(self.number,30,msg='Your input is not 30')
#7.定义tearDown()方法用于测试用例执行之后的善后工作。
#注意,方法的入参为self
def tearDown(self):
print("结束。。。。。。。。")
#8如果直接运行该文件(__name__值为__main__),则执行以下语句,常用于测试脚本是否能够正常运行
if __name__=='__main__':
#8.1执行测试用例方案一如下:
#unittest.main()方法会搜索该模块下所有以test开头的测试用例方法,并自动执行它们。
#执行顺序是命名顺序:先执行test_case1,再执行test_case2
unittest.main() | StarcoderdataPython |
62264 | import mysql.connector
conn = mysql.connector.connect(
host="192.168.99.102",
user="root",
passwd="<PASSWORD>",
database="user_db",
port="3308"
)
def find_all():
query = "SELECT * FROM users"
try:
cursor = conn.cursor()
rows = cursor.execute(query)
cursor.close()
return rows
except mysql.connector.Error as err:
print("Something went wrong: {}".format(err))
def find_one_by_id(user_id):
query = "SELECT * FROM users where id='%'"
try:
cursor = conn.cursor()
row = cursor.execute(query, user_id)
cursor.close()
return row
except mysql.connector.Error as err:
print("Something went wrong: {}".format(err))
def find_one_by_name(name):
query = "SELECT * FROM users where name='%'"
try:
cursor = conn.cursor()
row = cursor.execute(query, name)
cursor.close()
return row
except mysql.connector.Error as err:
print("Something went wrong: {}".format(err))
users = find_all()
user_1 = find_one_by_id(1)
user_pete = find_one_by_name('pete')
"""
Something went wrong: 1146 (42S02): Table 'user_db.users' doesn't exist
Something went wrong: 1146 (42S02): Table 'user_db.users' doesn't exist
Something went wrong: 1146 (42S02): Table 'user_db.users' doesn't exist
"""
| StarcoderdataPython |
4836496 | import numpy as np
from UCTB.dataset import NodeTrafficLoader
from UCTB.model import DCRNN
from UCTB.evaluation import metric
class my_data_loader(NodeTrafficLoader):
def diffusion_matrix(self, filter_type='random_walk'):
def calculate_random_walk_matrix(adjacent_mx):
d = np.array(adjacent_mx.sum(1))
d_inv = np.power(d, -1).flatten()
d_inv[np.isinf(d_inv)] = 0.
d_mat_inv = np.diag(d_inv)
random_walk_mx = d_mat_inv.dot(adjacent_mx)
return random_walk_mx
assert len(self.AM) == 1
diffusion_matrix = []
if filter_type == "random_walk":
diffusion_matrix.append(calculate_random_walk_matrix(self.AM[0]).T)
elif filter_type == "dual_random_walk":
diffusion_matrix.append(calculate_random_walk_matrix(self.AM[0]).T)
diffusion_matrix.append(calculate_random_walk_matrix(self.AM[0].T).T)
return np.array(diffusion_matrix, dtype=np.float32)
data_loader = my_data_loader(dataset='Bike', city='NYC', train_data_length='365',
closeness_len=6, period_len=7, trend_len=4, graph='Correlation', normalize=True)
diffusion_matrix = data_loader.diffusion_matrix()
batch_size = 64
DCRNN_Obj = DCRNN(num_nodes=data_loader.station_number,
num_diffusion_matrix=diffusion_matrix.shape[0],
num_rnn_units=64,
num_rnn_layers=1,
max_diffusion_step=2,
seq_len=data_loader.closeness_len + data_loader.period_len + data_loader.trend_len,
use_curriculum_learning=False,
input_dim=1,
output_dim=1,
cl_decay_steps=1000,
target_len=1,
lr=1e-4,
epsilon=1e-3,
optimizer_name='Adam',
code_version='DCRNN-QuickStart',
model_dir='model_dir',
gpu_device='0')
# Build tf-graph
DCRNN_Obj.build()
print('Number of trainable parameters', DCRNN_Obj.trainable_vars)
# Training
DCRNN_Obj.fit(inputs=np.concatenate((data_loader.train_trend.transpose([0, 2, 1, 3]),
data_loader.train_period.transpose([0, 2, 1, 3]),
data_loader.train_closeness.transpose([0, 2, 1, 3])), axis=1),
diffusion_matrix=diffusion_matrix,
target=data_loader.train_y.reshape([-1, 1, data_loader.station_number, 1]),
batch_size=batch_size,
sequence_length=data_loader.train_sequence_len)
# Predict
prediction = DCRNN_Obj.predict(inputs=np.concatenate((data_loader.test_trend.transpose([0, 2, 1, 3]),
data_loader.test_period.transpose([0, 2, 1, 3]),
data_loader.test_closeness.transpose([0, 2, 1, 3])), axis=1),
diffusion_matrix=diffusion_matrix,
target=data_loader.test_y.reshape([-1, 1, data_loader.station_number, 1]),
sequence_length=data_loader.test_sequence_len,
output_names=['prediction'])
# Evaluate
print('Test result', metric.rmse(prediction=data_loader.normalizer.min_max_denormal(prediction['prediction']),
target=data_loader.normalizer.min_max_denormal(data_loader.test_y.transpose([0, 2, 1])),
threshold=0)) | StarcoderdataPython |
3281385 | <filename>Lib/plat-irix6/cdplayer.py
# This file implements a class which forms an interface to the .cdplayerrc
# file that is maintained by SGI's cdplayer program.
#
# Usage is as follows:
#
# import readcd
# r = readcd.Readcd()
# c = Cdplayer(r.gettrackinfo())
#
# Now you can use c.artist, c.title and c.track[trackno] (where trackno
# starts at 1). When the CD is not recognized, all values will be the empty
# string.
# It is also possible to set the above mentioned variables to new values.
# You can then use c.write() to write out the changed values to the
# .cdplayerrc file.
cdplayerrc = '.cdplayerrc'
class Cdplayer:
def __init__(self, tracklist):
import string
self.artist = ''
self.title = ''
if type(tracklist) == type(''):
t = []
for i in range(2, len(tracklist), 4):
t.append((None, \
(int(tracklist[i:i+2]), \
int(tracklist[i+2:i+4]))))
tracklist = t
self.track = [None] + [''] * len(tracklist)
self.id = 'd' + string.zfill(len(tracklist), 2)
for track in tracklist:
start, length = track
self.id = self.id + string.zfill(length[0], 2) + \
string.zfill(length[1], 2)
try:
import posix
f = open(posix.environ['HOME'] + '/' + cdplayerrc, 'r')
except IOError:
return
import re
reg = re.compile(r'^([^:]*):\t(.*)')
s = self.id + '.'
l = len(s)
while 1:
line = f.readline()
if line == '':
break
if line[:l] == s:
line = line[l:]
match = reg.match(line)
if not match:
print 'syntax error in ~/' + cdplayerrc
continue
name, value = match.group(1, 2)
if name == 'title':
self.title = value
elif name == 'artist':
self.artist = value
elif name[:5] == 'track':
trackno = int(name[6:])
self.track[trackno] = value
f.close()
def write(self):
import posix
filename = posix.environ['HOME'] + '/' + cdplayerrc
try:
old = open(filename, 'r')
except IOError:
old = open('/dev/null', 'r')
new = open(filename + '.new', 'w')
s = self.id + '.'
l = len(s)
while 1:
line = old.readline()
if line == '':
break
if line[:l] != s:
new.write(line)
new.write(self.id + '.title:\t' + self.title + '\n')
new.write(self.id + '.artist:\t' + self.artist + '\n')
for i in range(1, len(self.track)):
new.write('%s.track.%r:\t%s\n' % (i, track))
old.close()
new.close()
posix.rename(filename + '.new', filename)
| StarcoderdataPython |
3334412 | from dateutil.parser import parse
def capitalizeinput(inputparameter):
'''
This will take the raw input text and
tokenize the string and capitalize all
the token and returns the formatted
capitalized text.
:param inputparameter :type str
:return: string(text will all the tokens capitalized)
'''
formatted_output = inputparameter.title()
return formatted_output
def comparedates(datelist):
'''
this function takes the list of the date
parse them and compares them and provide
larger date and smaller date
:param datelist:type list of str
:return: sorted dates
'''
try:
date1 = parse(datelist[0][0])
date2 = parse(datelist[1][0])
if date2> date1:
return datelist[1][0],datelist[0][0]
else:
return datelist[0][0],datelist[1][0]
except:
return datelist[0][0],datelist[1][0]
| StarcoderdataPython |
15391 | import math
import numpy as np
import torch
from torch import nn
from torch.backends import cudnn
from torch.utils.data import DataLoader
from tqdm import tqdm
from model import CharRNN
from data import TextDataset, TextConverter
class Trainer(object):
def __init__(self, args):
self.args = args
self.device = torch.device('cuda' if self.args.cuda else 'cpu')
self.convert = None
self.model = None
self.optimizer = None
self.criterion = self.get_loss
self.meter = AverageValueMeter()
self.train_loader = None
self.get_data()
self.get_model()
self.get_optimizer()
def get_data(self):
self.convert = TextConverter(self.args.txt, max_vocab=self.args.max_vocab)
dataset = TextDataset(self.args.txt, self.args.len, self.convert.text_to_arr)
self.train_loader = DataLoader(dataset, self.args.batch_size, shuffle=True, num_workers=self.args.num_workers)
def get_model(self):
self.model = CharRNN(self.convert.vocab_size, self.args.embed_dim, self.args.hidden_size, self.args.num_layers,
self.args.dropout, self.args.cuda).to(self.device)
if self.args.cuda:
cudnn.benchmark = True
def get_optimizer(self):
optimizer = torch.optim.Adam(self.model.parameters(), lr=self.args.lr)
self.optimizer = ScheduledOptim(optimizer)
@staticmethod
def get_loss(score, label):
return nn.CrossEntropyLoss()(score, label.view(-1))
def save_checkpoint(self, epoch):
if (epoch + 1) % self.args.save_interval == 0:
model_out_path = self.args.save_file + "epoch_{}_model.pth".format(epoch + 1)
torch.save(self.model, model_out_path)
print("Checkpoint saved to {}".format(model_out_path))
def save(self):
model_out_path = self.args.save_file + "final_model.pth"
torch.save(self.model, model_out_path)
print("Final model saved to {}".format(model_out_path))
@staticmethod
def pick_top_n(predictions, top_n=5):
top_predict_prob, top_predict_label = torch.topk(predictions, top_n, 1)
top_predict_prob /= torch.sum(top_predict_prob)
top_predict_prob = top_predict_prob.squeeze(0).cpu().numpy()
top_predict_label = top_predict_label.squeeze(0).cpu().numpy()
c = np.random.choice(top_predict_label, size=1, p=top_predict_prob)
return c
def train(self):
self.meter.reset()
self.model.train()
for x, y in tqdm(self.train_loader):
y = y.long()
x, y = x.to(self.device), y.to(self.device)
# Forward.
score, _ = self.model(x)
loss = self.criterion(score, y)
# Backward.
self.optimizer.zero_grad()
loss.backward()
# Clip gradient.
nn.utils.clip_grad_norm_(self.model.parameters(), 5)
self.optimizer.step()
self.meter.add(loss.item())
print('perplexity: {}'.format(np.exp(self.meter.value()[0])))
def test(self):
self.model.eval()
begin = np.array([i for i in self.args.begin])
begin = np.random.choice(begin, size=1)
text_len = self.args.predict_len
samples = [self.convert.word_to_int(c) for c in begin]
input_txt = torch.LongTensor(samples)[None]
input_txt = input_txt.to(self.device)
_, init_state = self.model(input_txt)
result = samples
model_input = input_txt[:, -1][:, None]
with torch.no_grad():
for i in range(text_len):
out, init_state = self.model(model_input, init_state)
prediction = self.pick_top_n(out.data)
model_input = torch.LongTensor(prediction)[None].to(self.device)
result.append(prediction[0])
print(self.convert.arr_to_text(result))
def predict(self):
self.model.eval()
samples = [self.convert.word_to_int(c) for c in self.args.begin]
input_txt = torch.LongTensor(samples)[None].to(self.device)
_, init_state = self.model(input_txt)
result = samples
model_input = input_txt[:, -1][:, None]
with torch.no_grad():
for i in range(self.args.predict_len):
out, init_state = self.model(model_input, init_state)
prediction = self.pick_top_n(out.data)
model_input = torch.LongTensor(prediction)[None].to(self.device)
result.append(prediction[0])
print(self.convert.arr_to_text(result))
def run(self):
for e in range(self.args.max_epoch):
print('===> EPOCH: {}/{}'.format(e + 1, self.args.max_epoch))
self.train()
self.test()
self.save_checkpoint(e)
self.save()
class AverageValueMeter(object):
"""
the meter tracker mainly focuses on mean and std
"""
def __init__(self):
super(AverageValueMeter, self).__init__()
self.n = None
self.sum = None
self.var = None
self.val = None
self.mean = None
self.std = None
self.reset()
def add(self, value, n=1):
self.val = value
self.sum += value
self.var += value * value
self.n += n
if self.n == 0:
self.mean, self.std = np.nan, np.nan
elif self.n == 1:
self.mean, self.std = self.sum, np.inf
else:
self.mean = self.sum / self.n
self.std = math.sqrt(
(self.var - self.n * self.mean * self.mean) / (self.n - 1.0))
def value(self):
return self.mean, self.std
def reset(self):
self.n = 0
self.sum = 0.0
self.var = 0.0
self.val = 0.0
self.mean = np.nan
self.std = np.nan
class ScheduledOptim(object):
"""A wrapper class for learning rate scheduling
"""
def __init__(self, optimizer):
self.optimizer = optimizer
self.lr = self.optimizer.param_groups[0]['lr']
self.current_steps = 0
def step(self):
"Step by the inner optimizer"
self.current_steps += 1
self.optimizer.step()
def zero_grad(self):
"Zero out the gradients by the inner optimizer"
self.optimizer.zero_grad()
def lr_multi(self, multi):
for param_group in self.optimizer.param_groups:
param_group['lr'] *= multi
self.lr = self.optimizer.param_groups[0]['lr']
def set_learning_rate(self, lr):
self.lr = lr
for param_group in self.optimizer.param_groups:
param_group['lr'] = lr
@property
def learning_rate(self):
return self.lr | StarcoderdataPython |
1658791 | # Under MIT licence, see LICENCE.txt
from ai.STA.Tactic.Tactic import Tactic
from ai.STA.Action.MoveToPosition import MoveToPosition
from ai.STA.Action.Idle import Idle
from ai.STA.Tactic import tactic_constants
from ai.Util.ball_possession import hasBallFacingTarget
from ai.STA.Tactic.tactic_constants import Flags
from ai.Util.ball_possession import hasBall
from RULEngine.Util.Pose import Pose
from RULEngine.Util.geometry import get_angle
from RULEngine.Util.constant import PLAYER_PER_TEAM
__author__ = 'RoboCupULaval'
class ReceiveBall(Tactic):
# TODO : Ajouter un état permettant de faire une translation pour attraper la balle si celle-ci se dirige à côté
"""
méthodes:
exec(self) : Exécute une Action selon l'état courant
attributs:
game_state: L'état courant du jeu.
player_id : Identifiant du joueur auquel est assigné la tactique
current_state : L'état courant de la tactique
next_state : L'état suivant de la tactique
status_flag : L'indicateur de progression de la tactique
"""
def __init__(self, game_state, player_id):
Tactic.__init__(self, game_state, player_id)
assert isinstance(player_id, int)
assert PLAYER_PER_TEAM >= player_id >= 0
self.current_state = self.rotate_towards_ball
self.next_state = self.rotate_towards_ball
self.player_id = player_id
def rotate_towards_ball(self):
if hasBall(self.game_state, self.player_id):
self.next_state = self.halt
self.status_flag = Flags.SUCCESS
return Idle(self.game_state, self.player_id)
else: # keep rotating
current_position = self.game_state.get_player_position()
ball_position = self.game_state.get_ball_position()
rotation_towards_ball = get_angle(current_position, ball_position)
pose_towards_ball = Pose(current_position, rotation_towards_ball)
move_to = MoveToPosition(self.game_state, self.player_id, pose_towards_ball)
self.next_state = self.rotate_towards_ball
self.status_flag = Flags.WIP
return move_to
| StarcoderdataPython |
31666 | <filename>references/encase/code/resNet_3.py<gh_stars>1-10
# -*- coding: utf-8 -*-
'''
split long seq into small sub_seq,
feed sub_seq to lstm
'''
from __future__ import division, print_function, absolute_import
import tflearn
import tflearn.data_utils as du
import numpy as np
import ReadData
import tensorflow as tf
from tensorflow.contrib import learn
from tensorflow.contrib.learn.python.learn.estimators import model_fn as model_fn_lib
from sklearn.model_selection import StratifiedKFold
import MyEval
import pickle
from tflearn.layers.recurrent import bidirectional_rnn, BasicLSTMCell
from tflearn.layers.core import dropout
tf.logging.set_verbosity(tf.logging.INFO)
def read_data():
with open('../../data1/expanded_three_part_window_3000_stride_500.pkl', 'rb') as fin:
train_data = pickle.load(fin)
train_label = pickle.load(fin)
val_data = pickle.load(fin)
val_label = pickle.load(fin)
test_data = pickle.load(fin)
test_label = pickle.load(fin)
return train_data, train_label, val_data, val_label, test_data, test_label
## TODO normalization
n_dim = 3000
n_split = 300
tf.reset_default_graph()
sess = tf.Session(config=tf.ConfigProto(log_device_placement=True))
X, Y, valX, valY, testX, testY = read_data()
X = X.reshape([-1, n_dim, 1])
testX = testX.reshape([-1, n_dim, 1])
### split
#X = X.reshape([-1, n_split, 1])
#testX = testX.reshape([-1, n_split, 1])
# Building Residual Network
net = tflearn.input_data(shape=[None, n_dim, 1])
print("input", net.get_shape())
############ reshape for sub_seq
net = tf.reshape(net, [-1, n_dim//n_split, n_split, 1])
print("reshaped input", net.get_shape())
net = tflearn.layers.conv.conv_2d_cnnlstm(net, 64, 16, 2)
#net = tflearn.conv_1d(net, 64, 16, 2, regularizer='L2', weight_decay=0.0001)
print("cov1", net.get_shape())
net = tflearn.batch_normalization(net)
print("bn1", net.get_shape())
net = tflearn.activation(net, 'relu')
print("relu1", net.get_shape())
# Residual blocks
'''net = tflearn.residual_bottleneck(net, 2, 16, 64, downsample_strides = 2, downsample=True, is_first_block = True)
print("resn2", net.get_shape())
net = tflearn.residual_bottleneck(net, 2, 16, 128, downsample_strides = 2, downsample=True)
print("resn4", net.get_shape())
net = tflearn.residual_bottleneck(net, 2, 16, 256, downsample_strides = 2, downsample=True)
print("resn6", net.get_shape())
net = tflearn.residual_bottleneck(net, 2, 16, 512, downsample_strides = 2, downsample=True)
print("resn8", net.get_shape())
net = tflearn.residual_bottleneck(net, 2, 16, 1024, downsample_strides = 2, downsample=True)
print("resn10", net.get_shape())'''
net = tflearn.layers.conv.residual_bottleneck_cnnlstm(net, 2, 16, 64, downsample_strides = 2, downsample=True, is_first_block = True)
print("resn2", net.get_shape())
net = tflearn.layers.conv.residual_bottleneck_cnnlstm(net, 2, 16, 64, downsample_strides = 2, downsample=True)
print("resn4", net.get_shape())
net = tflearn.layers.conv.residual_bottleneck_cnnlstm(net, 2, 16, 128, downsample_strides = 2, downsample=True)
print("resn6", net.get_shape())
net = tflearn.layers.conv.residual_bottleneck_cnnlstm(net, 2, 16, 128, downsample_strides = 2, downsample=True)
print("resn8", net.get_shape())
net = tflearn.layers.conv.residual_bottleneck_cnnlstm(net, 2, 16, 256, downsample_strides = 2, downsample=True)
print("resn10", net.get_shape())
net = tflearn.layers.conv.residual_bottleneck_cnnlstm(net, 2, 16, 256, downsample_strides = 2, downsample=True)
print("resn12", net.get_shape())
'''net = tflearn.layers.conv.residual_bottleneck_cnnlstm(net, 2, 16, 512, downsample_strides = 2, downsample=True)
print("resn14", net.get_shape())
net = tflearn.layers.conv.residual_bottleneck_cnnlstm(net, 2, 16, 512, downsample_strides = 2, downsample=True)
print("resn16", net.get_shape())
net = tflearn.layers.conv.residual_bottleneck_cnnlstm(net, 2, 16, 1024, downsample_strides = 2, downsample=True)
print("resn18", net.get_shape())
net = tflearn.layers.conv.residual_bottleneck_cnnlstm(net, 2, 16, 1024, downsample_strides = 2, downsample=True)
print("resn20", net.get_shape())'''
net = tflearn.batch_normalization(net)
net = tflearn.activation(net, 'relu')
#net = tflearn.global_avg_pool(net)
# LSTM
print("before LSTM, before reshape", net.get_shape())
############ reshape for sub_seq
net = tf.reshape(net, [-1, n_dim//n_split, 256*3])
print("before LSTM", net.get_shape())
net = bidirectional_rnn(net, BasicLSTMCell(16), BasicLSTMCell(16))
print("after LSTM", net.get_shape())
#net = dropout(net, 0.5)
# Regression
#net = tflearn.fully_connected(net, 64, activation='sigmoid')
#net = tflearn.dropout(net, 0.5)
net = tflearn.fully_connected(net, 4, activation='softmax')
print("dense", net.get_shape())
net = tflearn.regression(net, optimizer='momentum',
loss='categorical_crossentropy'
, learning_rate=0.1)
# Training
model = tflearn.DNN(net, checkpoint_path='../../models2/resnet_16_lstm',
max_checkpoints=10, clip_gradients=0., tensorboard_verbose=0)
model.fit(X, Y, n_epoch=10, validation_set=(valX, valY),
show_metric=True, batch_size=200, run_id='resnet_3', snapshot_step=100,
snapshot_epoch=False)
#Predict
cur_testX = []
y_predicted=[]
for i in range(13638):
if (i % 300 == 0 or i/300 == 45) and i != 0:
tmp_testX = np.array(cur_testX, dtype=np.float32)
tmp_testX = tmp_testX.reshape([-1, n_dim, 1])
y_predicted.extend(model.predict(tmp_testX))
cur_testX = []
cur_testX.append(testX[i])
#y_predicted=[model.predict(testX[i].reshape([-1, n_dim, 1])) for i in list(range(13638))]
#Calculate F1Score
MyEval.F1Score3_num(y_predicted, testY[:len(y_predicted)])
## save model
model.save('../model/ttt.tfl')
| StarcoderdataPython |
155083 | <filename>widgets/comboBox.py<gh_stars>1-10
from .base import *
class ComboBoxTemplateWidget(TemplateWidget):
def __init__(self, **kwargs):
super(ComboBoxTemplateWidget, self).__init__(**kwargs)
layout = QVBoxLayout()
self.setLayout(layout)
layout.setContentsMargins(0, 0, 0, 0)
self.comboBox = QComboBox()
self.comboBox.setContextMenuPolicy(Qt.DefaultContextMenu)
self.comboBox.currentIndexChanged.connect(self.somethingChanged)
self.comboBox.contextMenuEvent = self.comboBoxContextMenuEvent
layout.addWidget(self.comboBox)
def comboBoxContextMenuEvent(self, event):
menu = QMenu(self)
appendAction = QAction("Append", self)
appendAction.triggered.connect(self.appendItem)
menu.addAction(appendAction)
removeAction = QAction("Remove", self)
removeAction.triggered.connect(self.removeItem)
menu.addAction(removeAction)
editAction = QAction("Edit", self)
editAction.triggered.connect(self.editItems)
menu.addAction(editAction)
menu.addSeparator()
clearAction = QAction("Clear", self)
clearAction.triggered.connect(self.clearItems)
menu.addAction(clearAction)
menu.popup(event.globalPos())
def editItems(self):
items = ";".join([unicode(self.comboBox.itemText(i)) for i in range(self.comboBox.count())])
newItems, ok = QInputDialog.getText(self, "Rig Builder", "Items separated with ';'", QLineEdit.Normal, items)
if ok and newItems:
self.comboBox.clear()
self.comboBox.addItems([x.strip() for x in newItems.split(";")])
self.somethingChanged.emit()
def clearItems(self):
ok = QMessageBox.question(self, "Rig Builder", "Really clear all items?", QMessageBox.Yes and QMessageBox.No, QMessageBox.Yes) == QMessageBox.Yes
if ok:
self.comboBox.clear()
self.somethingChanged.emit()
def appendItem(self):
name, ok = QInputDialog.getText(self, "Rig Builder", "Name", QLineEdit.Normal, "")
if ok and name:
self.comboBox.addItem(name)
self.somethingChanged.emit()
def removeItem(self):
self.comboBox.removeItem(self.comboBox.currentIndex())
self.somethingChanged.emit()
def getDefaultData(self):
return {"items": ["a", "b"], "current": "a", "default": "current"}
def getJsonData(self):
return {"items": [unicode(self.comboBox.itemText(i)) for i in range(self.comboBox.count())],
"current": unicode(self.comboBox.currentText()),
"default": "current"}
def setJsonData(self, value):
self.comboBox.clear()
self.comboBox.addItems(value["items"])
if value["current"] in value["items"]:
self.comboBox.setCurrentIndex(value["items"].index(value["current"]))
| StarcoderdataPython |
155705 | """Output utilities
===================
Tiny module to hide fluiddyn for users of fluidlab.
"""
from fluiddyn.output.figs import show
| StarcoderdataPython |
109061 | """
These are some scripts used in testing multiple particles.
"""
import time
import datetime
import numpy as np
from particle import Particle
#==============================================================================
def fill_particles(diameter, density, births, lifetime, initial_positions, u0):
# someParticles = [p, p, ..., p Ninlets times for birth1
# then p, p, ..., p Ninlets times for birth2
# ...
# repeated len(births) time]
someParticles = []
for b in births:
list_p = [Particle(diameter, density, b, lifetime, pos0, u0) \
for pos0 in initial_positions]
someParticles.extend(list_p)
return np.array(someParticles)
def prepare_initial_positions(x0, list_y):
return np.array([[x0, y] for y in list_y])
def compute_particles(particles, flow, factor, printIt, too_far_stop):
t1 = time.time()
cpu_time = 0
count = 1
Npar = len(particles)
for p in particles:
print('\n---> Particle %d (remaining: %d)' % (count, Npar - count))
p.compute_trajectory(flow, factor, printIt, too_far_stop)
cpu_time = time.time() - t1
print('Accumulated CPU_TIME = %.2f seconds = %s (hh:mm:ss)' \
% (cpu_time, datetime.timedelta(seconds=cpu_time)))
count += 1
captured_ones = np.array([p for p in particles if p.captured])
return particles, captured_ones
def spread_particles(fill_args, flow, factor, printIt, too_far_stop):
# fill_args = diameter, density, births, lifetime, initial_positions, u0
particles = fill_particles(*fill_args)
return compute_particles(particles, flow, factor, printIt, too_far_stop) | StarcoderdataPython |
1655130 | from .keys import (BadSignatureError, BadPrefixError,
create_keypair, SigningKey, VerifyingKey,
remove_prefix, to_ascii, from_ascii)
(BadSignatureError, BadPrefixError,
create_keypair, SigningKey, VerifyingKey,
remove_prefix, to_ascii, from_ascii) # hush pyflakes
from ._version import get_versions
__version__ = str(get_versions()['version'])
del get_versions
| StarcoderdataPython |
3343554 | <reponame>sekikn/spark-executor-dict-plugin
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import unittest
from pyspark import SparkConf
from pyspark.sql import Row
from tests.requirements import have_pandas, have_pyarrow, \
pandas_requirement_message, pyarrow_requirement_message
from tests.testutils import ReusedSQLTestCase
@unittest.skipIf(
not have_pandas or not have_pyarrow,
pandas_requirement_message or pyarrow_requirement_message) # type: ignore
class RepairModelTests(ReusedSQLTestCase):
@classmethod
def conf(cls):
return SparkConf() \
.set("spark.master", "local[1]") \
.set("spark.driver.memory", "1g") \
.set("spark.jars", os.getenv("DICT_API_LIB")) \
.set("spark.plugins", "org.apache.spark.plugin.SparkExecutorDictPlugin") \
.set("spark.files", "{}/test.db".format(os.getenv("DICT_TESTDATA"))) \
.set("spark.executor.userClassPathFirst", "true")
@classmethod
def setUpClass(cls):
super(RepairModelTests, cls).setUpClass()
# Tunes # shuffle partitions
num_parallelism = cls.spark.sparkContext.defaultParallelism
cls.spark.sql(f"SET spark.sql.shuffle.partitions={num_parallelism}")
@classmethod
def tearDownClass(cls):
super(ReusedSQLTestCase, cls).tearDownClass()
def test_basics(self):
from pyspark.sql.functions import col, udf
@udf(returnType='string')
def _udf(x):
from client import DictClient
client = DictClient()
return str(client.lookup(x))
df = self.spark.range(4).selectExpr("CAST(id AS STRING) id")
df = df.select(_udf(col("id")).alias("value"))
self.assertEqual(df.orderBy("value").collect(), [
Row(value=""), Row(value="a"), Row(value="b"), Row(value="c")])
if __name__ == "__main__":
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| StarcoderdataPython |
75836 | <reponame>kostrse/coworkingmap
import os
import subprocess
def _get_exec_extensions():
if os.name == "nt":
return os.getenv("PATHEXT").split(os.pathsep)
else:
return [""]
def _resolve_path(command):
extensions = _get_exec_extensions()
def is_exec(filenanme):
return os.path.isfile(filenanme) and os.access(filenanme, os.X_OK)
fpath, fname = os.path.split(command)
if fpath:
if is_exec(command):
return command
else:
resolve_dirs = os.getenv("PATH").split(os.pathsep)
for resolve_dir in resolve_dirs:
dir_and_command = os.path.join(resolve_dir, command)
for extension in extensions:
command_path = dir_and_command + extension
if is_exec(command_path):
return command_path
return None
def call(command, args):
command_path = _resolve_path(command)
if not command_path:
command_path = command
subprocess.check_call([command_path] + args)
def call_in_dir(working_dir, command, args):
original_dir = os.getcwd()
os.chdir(working_dir)
try:
call(command, args)
finally:
os.chdir(original_dir)
| StarcoderdataPython |
3213405 | <filename>62-reticulate/simple.py
import matplotlib.pyplot as plt
import numpy as np
x = np.array([1,2,3])
y = np.square(x)
plt.figure()
plt.plot(x,y)
plt.show()
| StarcoderdataPython |
4835916 | """
Consolidate Services
Description of all APIs # noqa: E501
The version of the OpenAPI document: version not set
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from argocd_python_client.api_client import ApiClient, Endpoint as _Endpoint
from argocd_python_client.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from argocd_python_client.model.gpgkey_gnu_pg_public_key_create_response import GpgkeyGnuPGPublicKeyCreateResponse
from argocd_python_client.model.runtime_error import RuntimeError
from argocd_python_client.model.v1alpha1_gnu_pg_public_key import V1alpha1GnuPGPublicKey
from argocd_python_client.model.v1alpha1_gnu_pg_public_key_list import V1alpha1GnuPGPublicKeyList
class GPGKeyServiceApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __g_pg_key_service_create(
self,
body,
**kwargs
):
"""Create one or more GPG public keys in the server's configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.g_pg_key_service_create(body, async_req=True)
>>> result = thread.get()
Args:
body (V1alpha1GnuPGPublicKey): Raw key data of the GPG key(s) to create
Keyword Args:
upsert (bool): Whether to upsert already existing public keys.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
GpgkeyGnuPGPublicKeyCreateResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.g_pg_key_service_create = _Endpoint(
settings={
'response_type': (GpgkeyGnuPGPublicKeyCreateResponse,),
'auth': [],
'endpoint_path': '/api/v1/gpgkeys',
'operation_id': 'g_pg_key_service_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'body',
'upsert',
],
'required': [
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'body':
(V1alpha1GnuPGPublicKey,),
'upsert':
(bool,),
},
'attribute_map': {
'upsert': 'upsert',
},
'location_map': {
'body': 'body',
'upsert': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__g_pg_key_service_create
)
def __g_pg_key_service_delete(
self,
**kwargs
):
"""Delete specified GPG public key from the server's configuration # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.g_pg_key_service_delete(async_req=True)
>>> result = thread.get()
Keyword Args:
key_id (str): The GPG key ID to query for.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
bool, date, datetime, dict, float, int, list, str, none_type
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.g_pg_key_service_delete = _Endpoint(
settings={
'response_type': (bool, date, datetime, dict, float, int, list, str, none_type,),
'auth': [],
'endpoint_path': '/api/v1/gpgkeys',
'operation_id': 'g_pg_key_service_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'key_id',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'key_id':
(str,),
},
'attribute_map': {
'key_id': 'keyID',
},
'location_map': {
'key_id': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__g_pg_key_service_delete
)
def __g_pg_key_service_get(
self,
key_id,
**kwargs
):
"""Get information about specified GPG public key from the server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.g_pg_key_service_get(key_id, async_req=True)
>>> result = thread.get()
Args:
key_id (str): The GPG key ID to query for
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1GnuPGPublicKey
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['key_id'] = \
key_id
return self.call_with_http_info(**kwargs)
self.g_pg_key_service_get = _Endpoint(
settings={
'response_type': (V1alpha1GnuPGPublicKey,),
'auth': [],
'endpoint_path': '/api/v1/gpgkeys/{keyID}',
'operation_id': 'g_pg_key_service_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'key_id',
],
'required': [
'key_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'key_id':
(str,),
},
'attribute_map': {
'key_id': 'keyID',
},
'location_map': {
'key_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__g_pg_key_service_get
)
def __g_pg_key_service_list(
self,
**kwargs
):
"""List all available repository certificates # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.g_pg_key_service_list(async_req=True)
>>> result = thread.get()
Keyword Args:
key_id (str): The GPG key ID to query for.. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
V1alpha1GnuPGPublicKeyList
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.g_pg_key_service_list = _Endpoint(
settings={
'response_type': (V1alpha1GnuPGPublicKeyList,),
'auth': [],
'endpoint_path': '/api/v1/gpgkeys',
'operation_id': 'g_pg_key_service_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'key_id',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'key_id':
(str,),
},
'attribute_map': {
'key_id': 'keyID',
},
'location_map': {
'key_id': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__g_pg_key_service_list
)
| StarcoderdataPython |
3272478 | import requests
import json
import os
TOKEN = os.environ.get("STRAVA_BEARER")
try:
response_length = 100
data = []
page = 1
while response_length > 0:
r = requests.get(
"https://www.strava.com/api/v3/athlete/activities?per_page=30",
headers={f"Authorization": "Bearer {TOKEN}"},
params={"per_page": 30, "page": page},
)
r.raise_for_status()
d = r.json()
response_length = len(d)
page += 1
data.extend(d)
finally:
print(f"Length: {len(data)} / {page}")
with open("output.json", "w") as fh:
json.dump(data, fh)
| StarcoderdataPython |
125550 | __all__ = [
'extern_progs',
'set_config'
]
from XICRA.config import *
| StarcoderdataPython |
3331686 | import numpy as np
def RotZ(heading):
psi = heading
R = np.array([[np.cos(heading), -np.sin(heading), 0],
[np.sin(heading), np.cos(heading), 0],
[0, 0, 1]])
return R
R = RotZ(-1.889)
wayp = np.array([[6000],
[-300],
[-200]])
w = np.array([[6000],
[-5000],
[-200]])
wayp = np.hstack((wayp, w))
w = np.array([[-6000],
[-5000],
[-200]])
wayp = np.hstack((wayp,w))
w = np.array([[-6000],
[0],
[-200]])
wayp = np.hstack((wayp,w))
w = np.array([[50],
[0],
[-10]])
wayp = np.hstack((wayp, w))
print((R @ wayp)[:, 4])
| StarcoderdataPython |
1788960 | <gh_stars>1-10
#!/usr/bin/env python3
from datetime import datetime
import pytest
from astranslate import PBbool, PBdate, PBdict, PBlist, PBnumber, PBstring
def test_PBBool_ToApplescript_GivenBoolean_ReturnsBooleanString():
assert PBbool.to_applescript(True) == "True"
def test_PBBool_ToApplescript_GivenNumber_ReturnsNumberAsString():
assert PBbool.to_applescript(1) == "1"
def test_PBBool_ToPython_GivenTrue_ReturnsBoolean():
assert PBbool.to_python("true") is True
def test_PBBool_ToPython_GivenYes_ReturnsBoolean():
assert PBbool.to_python("yes") is True
def test_PBBool_ToPython_Given1_ReturnsBoolean():
assert PBbool.to_python("1") is True
def test_PBDate_ToPython_GivenExpectedDateString_ReturnsDateObject():
date_obj = PBdate.to_python("Thursday, January 1, 1970 12:00:00 AM")
assert isinstance(date_obj, datetime)
assert date_obj.year == 1970
assert date_obj.month == 1
assert date_obj.day == 1
assert date_obj.hour == 0
assert date_obj.minute == 0
assert date_obj.second == 0
def test_PBDate_ToApplescript_GivenDatetimeObject_ReturnsExpectedString():
date_obj = datetime(1970, 1, 1, 0, 0, 0)
date_str = PBdate.to_applescript(date_obj)
assert date_str == "Thursday, January 01, 1970 12:00:00 AM"
def test_PBDict_ToApplescript_GivenDictWithList_ReturnsExpectedString():
test_dict = {
"key1": "value1",
"key2": "value2",
"key3": ["list", "of", "values"],
}
dict_str = PBdict.to_applescript(test_dict)
assert dict_str == "<key1=value1><key2=value2><key3={list|of|values}>"
def test_PBDict_ToPython_GivenBasicDict_ReturnsExpectedDict():
dict_str = PBdict.to_python("<key1=value1><key2=value2>")
assert len(dict_str) == 2
assert list(dict_str) == ["key1", "key2"]
def test_PBList_ToApplescript_GivenBasicList_ReturnsExpectedString():
ls_str = PBlist.to_applescript(["a", "b", "c", "d", "e"])
assert ls_str == "{a|b|c|d|e}"
# @pytest.mark.xfail
def test_PBList_ToApplescript_GivenNestedList_ReturnsExpectedString():
ls_str = PBlist.to_applescript(["a", "b", "c", ["d", "e"]])
assert ls_str == "{a|b|c|{d|e}}"
def test_PBList_ToPython_GivenBasicList_ReturnsExpectedList():
ls_str = PBlist.to_python("{a|b|c|d|e}")
assert isinstance(ls_str, list)
assert ls_str == ["a", "b", "c", "d", "e"]
@pytest.mark.xfail
def test_PBList_ToPython_GivenNestedList_ReturnsExpectedList():
ls_str = PBlist.to_python("{a|b|c|{d|e}}")
assert ls_str == ["a", "b", "c", ["d", "e"]]
def test_PBNumber_ToApplescript_GivenInteger_ReturnsNumberString():
assert PBnumber.to_applescript(2) == "2"
def test_PBNumber_ToApplescript_GivenFloat_ReturnsNumberString():
assert PBnumber.to_applescript(3.9) == "3.9"
def test_PBNumber_ToPython_GivenIntString_ReturnsInt():
assert PBnumber.to_python("3") == 3
def test_PBNumber_ToPython_GivenFloatString_ReturnsFloat():
assert PBnumber.to_python("3.9") == 3.9
def test_PBString_ToApplescript_GivenString_ReturnsSameString():
assert PBstring.to_applescript("a string of text") == "a string of text"
def test_PBString_ToPython_GivenString_ReturnsSameString():
assert PBstring.to_python("a string of text") == "a string of text"
| StarcoderdataPython |
1695775 |
import pygame
import file_importer as fi
from math import pi
pygame.init()
size = (700, 700)
screen = pygame.display.set_mode(size)
pygame.display.set_caption('3D OBJ File Renderer')
clock, fps = pygame.time.Clock(), 30
screen_plane = (300, 300)
path = 'TestModels/robot.obj'
shape = fi.load_obj(path, screen_plane, size)
shape.flip()
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
exit()
key = pygame.key.get_pressed()
mouse_pos = pygame.mouse.get_pos()
mouse_pressed = pygame.mouse.get_pressed()
screen.fill(0)
shape.rotate()
shape.draw_face(screen)
pygame.display.update()
clock.tick(fps)
| StarcoderdataPython |
3304326 | <reponame>AgPipeline/AgPypeline
#!/usr/bin/env python3
"""
Purpose: Unit testing for entrypoint.py
Author : <NAME> <<EMAIL>>
Notes:
This file assumes it's in a subfolder off the main folder
"""
import argparse
import json
import os
import piexif
from agpypeline import configuration, environment
TEST_FILES = ['agpypeline/configuration.py', 'agpypeline/environment.py']
def test_exists():
"""Tests whether all necessary files are accessible"""
for file in TEST_FILES:
assert os.path.isfile(file)
def test_environment_exif_tags_to_timestamp():
"""Tests environment's exif_tags_to_timestamp function py processing image information in order
to extract the image's exif tags and then checking the function call on the exif tags against the
results from a file in the data directory"""
arr = []
environ = environment.__internal__()
assert environ.exif_tags_to_timestamp({}) is None
for image in os.listdir("images/jpg_images"):
if image.endswith(".JPG"):
tags_dict = piexif.load("images/jpg_images/" + image)
exif_tags = tags_dict["Exif"]
result = environ.exif_tags_to_timestamp(exif_tags)
arr.append(result)
with open("data/exif_tags_to_timestamp.txt", encoding='utf-8') as checkfile:
assert str(arr) == checkfile.read()
def test_environment_get_first_timestamp():
"""The image files included do have accessible timestamps until the update takes place, but this will
check the result when not having a timestamp included and when having a timestamp included"""
arr = []
environ = environment.__internal__()
no_timestamp_res = environ.get_first_timestamp("images/jpg_images/DJI_0340.JPG")
later_timestamp_res = environ.get_first_timestamp("images/jpg_images/DJI_0340.JPG", '2020-12-31')
earlier_timestamp_res = environ.get_first_timestamp("images/jpg_images/DJI_0340.JPG", '2000-12-31')
arr.append(no_timestamp_res)
arr.append(later_timestamp_res)
arr.append(earlier_timestamp_res)
with open("data/environment_get_first_timestamp.json", encoding='utf-8') as in_file:
checkfile = json.load(in_file)
assert no_timestamp_res == checkfile[0]
assert later_timestamp_res == checkfile[1]
assert earlier_timestamp_res == checkfile[2]
def test_environment_environment():
"""Tests initializing environment's Environment class by making sure it contains all necessary parameters"""
environ = environment.Environment(configuration.Configuration())
assert environ.sensor is None
assert environ.args is None
for entry in ['transformer_version', 'transformer_description', 'transformer_name', 'transformer_sensor',
'transformer_type', 'author_name', 'author_email', 'contributors', 'repository']:
assert hasattr(environ.configuration, entry)
if entry == 'contributors':
assert getattr(environ.configuration, entry) == []
else:
assert getattr(environ.configuration, entry) is None
def test_environment_generate_transformer_md():
"""Tests the call of generate_transformer_md on a default configuration"""
environ = environment.Environment(configuration.Configuration())
assert environ.generate_transformer_md() == {'version': None, 'name': None, 'author': None, 'description': None,
'repository': {'repUrl': None}}
def test_environment_add_parameters():
"""Tests the call of add_parameters with default parameters"""
parser = argparse.ArgumentParser()
environ = environment.Environment(configuration.Configuration())
environ.add_parameters(parser)
assert parser.epilog == "None version None author None None"
def test_environment_get_transformer_params():
"""Checks the call of get_transformer_parameters with default parameters against the output from a .json file
list_files is set to None because it is a function, and timestamp is set to none because it is the current
timestamp, which differs from second to second"""
with open("data/environment_get_transformer_params.json", encoding='utf-8') as in_file:
check_result = json.load(in_file)
environ = environment.Environment(configuration.Configuration())
namespace = argparse.Namespace()
namespace.file_list = []
namespace.working_space = []
result = environ.get_transformer_params(namespace, [])
result_dict = {'transformer_md': result['transformer_md'], 'full_md': result['full_md']}
check_md = dict(result['check_md']._asdict())
check_md['timestamp'] = ''
check_md['list_files'] = ''
result_dict['check_md'] = check_md
assert check_result == result_dict
| StarcoderdataPython |
3391359 | from flask import current_app, request
from flask_security.confirmable import requires_confirmation
from flask_security.forms import (
NextFormMixin,
get_form_field_label,
config_value,
)
from flask_security.utils import get_message, verify_and_update_password
from flask_wtf import FlaskForm
from werkzeug.local import LocalProxy
from wtforms import BooleanField, PasswordField, StringField, SubmitField
_security = LocalProxy(lambda: current_app.extensions["security"])
_datastore = LocalProxy(lambda: current_app.extensions["security"].datastore)
import logging
logger = logging.getLogger(__name__)
class LoginForm(FlaskForm, NextFormMixin):
"""Username login form"""
username = StringField(get_form_field_label("username"))
password = PasswordField(get_form_field_label("password"))
remember = BooleanField(get_form_field_label("remember_me"))
submit = SubmitField(get_form_field_label("login"))
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
if not self.next.data:
self.next.data = request.args.get("next", "")
self.remember.default = config_value("DEFAULT_REMEMBER_ME")
def validate(self):
if not super(LoginForm, self).validate():
return False
if not self.username.data.strip():
self.username.errors.append("Username not provided")
return False
if not self.password.data.strip():
self.password.errors.append(
get_message("PASSWORD_NOT_PROVIDED")[0]
)
return False
username = self.username.data
self.user = _security.datastore.find_user(username=username)
if not self.user:
logger.warning(
"not found {} using username field, "
"now using fallback with email".format(username)
)
self.user = _security.datastore.find_user(email=username)
if self.user is None:
self.username.errors.append(get_message("USER_DOES_NOT_EXIST")[0])
return False
if not self.user.password:
self.password.errors.append(get_message("PASSWORD_NOT_SET")[0])
return False
if not verify_and_update_password(self.password.data, self.user):
self.password.errors.append(get_message("INVALID_PASSWORD")[0])
return False
if requires_confirmation(self.user):
self.username.errors.append(
get_message("CONFIRMATION_REQUIRED")[0]
)
return False
if not self.user.is_active:
self.username.errors.append(get_message("DISABLED_ACCOUNT")[0])
return False
return True
| StarcoderdataPython |
3281978 | <reponame>etdv-thevoid/pokemon-rgb-enhanced
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
import sys
import os
import time
import datetime
from ctypes import c_int8
from copy import copy
import json
# New versions of json don't have read anymore.
if not hasattr(json, "read"):
json.read = json.loads
from .labels import (
get_label_from_line,
get_address_from_line_comment,
)
relative_jumps = [0x38, 0x30, 0x20, 0x28, 0x18, 0xc3, 0xda, 0xc2, 0x32]
relative_unconditional_jumps = [0xc3, 0x18]
call_commands = [0xdc, 0xd4, 0xc4, 0xcc, 0xcd]
end_08_scripts_with = [
0xe9, # jp hl
0xc9, # ret
] # possibly also:
# 0xc3, # jp
# 0xc18, # jr
# 0xda, 0xe9, 0xd2, 0xc2, 0xca, 0x38, 0x30, 0x20, 0x28, 0x18, 0xd8,
# 0xd0, 0xc0, 0xc8, 0xc9
spacing = "\t"
class RomStr(str):
"""
Simple wrapper to prevent a giant rom from being shown on screen.
"""
def __init__(self, *args, **kwargs):
if "labels" in kwargs.keys() and kwargs["labels"] == True:
self.load_labels()
str.__init__(self)
def __repr__(self):
"""
Simplifies this object so that the output doesn't overflow stdout.
"""
return "RomStr(too long)"
@classmethod
def load(cls, filename=None, crystal=True, red=False):
"""
Load a ROM into a RomStr.
"""
if crystal and not red and not filename:
file_handler = open("../baserom.gbc", "r")
elif red and not crystal and not filename:
file_handler = open("../pokered-baserom.gbc", "r")
elif filename not in ["", None]:
file_handler = open(filename, "rb")
else:
raise Exception("not sure which rom to load?")
bytes = file_handler.read()
file_handler.close()
return RomStr(bytes)
def load_labels(self, filename="labels.json"):
"""
Loads labels from labels.json.
(Or parses the source code file and
generates new labels.)
"""
filename = os.path.join(os.path.dirname(__file__), filename)
# blank out the hash
self.labels = {}
# check if the labels file exists
file_existence = os.path.exists(filename)
generate_labels = False
# determine if the labels file needs to be regenerated
if file_existence:
modified = os.path.getmtime(filename)
modified = datetime.datetime.fromtimestamp(modified)
current = datetime.datetime.fromtimestamp(time.time())
is_old = (current - modified) > datetime.timedelta(days=3)
if is_old:
generate_labels = True
else:
generate_labels = True
# scan the asm source code for labels
if generate_labels:
asm = open(os.path.join(os.path.dirname(__file__), "../main.asm"), "r").read().split("\n")
for line in asm:
label = get_label_from_line(line)
if label:
address = get_address_from_line_comment(line)
self.labels[address] = label
content = json.dumps(self.labels)
file_handler = open(filename, "w")
file_handler.write(content)
file_handler.close()
# load the labels from the file
self.labels = json.read(open(filename, "r").read())
def get_address_for(self, label):
"""
Return the address of a label.
This is slow and could be improved dramatically.
"""
label = str(label)
for address in self.labels.keys():
if self.labels[address] == label:
return address
return None
def length(self):
"""
len(self)
"""
return len(self)
def len(self):
"""
len(self)
"""
return self.length()
def interval(self, offset, length, strings=True, debug=True):
"""
Return hex values for the rom starting at offset until offset+length.
"""
returnable = []
for byte in self[offset:offset+length]:
if strings:
returnable.append(hex(ord(byte)))
else:
returnable.append(ord(byte))
return returnable
def until(self, offset, byte, strings=True, debug=False):
"""
Return hex values from rom starting at offset until the given byte.
"""
return self.interval(offset, self.find(chr(byte), offset) - offset, strings=strings)
def to_asm(self, address, end_address=None, size=None, max_size=0x4000, debug=None):
"""
Disassemble ASM at some address.
This will stop disassembling when either the end_address or size is
met. Also, there's a maximum size that will be parsed, so that large
patches of data aren't parsed as code.
"""
if type(address) in [str, unicode] and "0x" in address:
address = int(address, 16)
start_address = address
if start_address == None:
raise Exception("address must be given")
if debug == None:
if not hasattr(self, "debug"):
debug = False
else:
debug = self.debug
# this is probably a terrible idea.. why am i doing this?
if size != None and max_size < size:
raise Exception("max_size must be greater than or equal to size")
elif end_address != None and (end_address - start_address) > max_size:
raise Exception("end_address is out of bounds")
elif end_address != None and size != None:
if (end_address - start_address) >= size:
size = end_address - start_address
else:
end_address = start_address + size
elif end_address == None and size != None:
end_address = start_address + size
elif end_address != None and size == None:
size = end_address - start_address
raise NotImplementedError("DisAsm was removed and never worked; hook up another disassembler please.")
#return DisAsm(start_address=start_address, end_address=end_address, size=size, max_size=max_size, debug=debug, rom=self)
class AsmList(list):
"""
Simple wrapper to prevent all asm lines from being shown on screen.
"""
def length(self):
"""
len(self)
"""
return len(self)
def __repr__(self):
"""
Simplifies this object so that the output doesn't overflow stdout.
"""
return "AsmList(too long)"
if __name__ == "__main__":
cryrom = RomStr(open("../pokecrystal.gbc", "r").read());
asm = cryrom.to_asm(sys.argv[1])
print(asm)
| StarcoderdataPython |
1677494 | <gh_stars>0
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Bridge for using cclib data in PyQuante (http://pyquante.sourceforge.net)."""
from __future__ import print_function
import sys
try:
from PyQuante.Molecule import Molecule
except ImportError:
# Fail silently for now.
pass
def makepyquante(atomcoords, atomnos, charge=0, mult=1):
"""Create a PyQuante Molecule.
>>> import numpy
>>> from PyQuante.hartree_fock import hf
>>> atomnos = numpy.array([1,8,1],"i")
>>> a = numpy.array([[-1,1,0],[0,0,0],[1,1,0]],"f")
>>> pyqmol = makepyquante(a,atomnos)
>>> en,orbe,orbs = hf(pyqmol)
>>> print int(en * 10) / 10. # Should be around -73.8
-73.8
"""
return Molecule("notitle", list(zip(atomnos, atomcoords)), units="Angstrom",
charge=charge, multiplicity=mult)
if __name__ == "__main__":
import doctest
doctest.testmod()
| StarcoderdataPython |
1643860 | # Each view is responsible for doing one of two things: returning an HttpResponse object to the contents of the requested page, or raising an exception such as Http404. The rest is up to you.
# Here we just use the render method imported so we can render a html-page
from django.shortcuts import render
# Create your views here.
def say_hello(request):
# The render() function takes the requested object as its first argument, a template name as its second argument, and a dictionary as its optional third argument.
return render(request, 'hello.html', {'name':'Gustaf'})
| StarcoderdataPython |
3220303 | """
Experimental tests that play with thin-edge.io.
See https://thin-edge.io
"""
import logging
import os
import time
import pytest
CURL = "/usr/bin/curl -X POST http://localhost:{} -d hello_my_plugins"
import pytest
def test_go():
pass
def test_use_case_help(process_factory):
help = process_factory(["tedge", "-V"])
help.run()
assert help.get_stdout() == "tedge 0.5.2"
def test_use_connect(process_factory):
con = process_factory(["sudo", "tedge", "connect", "c8y"])
con.run()
print(con.get_stdout())
print(con.get_stderr())
assert con.returncode == 0
discon = process_factory(["sudo", "tedge", "disconnect", "c8y"])
discon.run()
print(discon.get_stderr())
print(discon.get_stdout())
assert discon.returncode == 0
def test_use_case_connect_and_observe(process_factory):
mos = process_factory(["/usr/bin/mosquitto_sub", "-v", "-t", "#"], name="mos")
mos.run_bg()
con = process_factory(["sudo", "tedge", "connect", "c8y"], name="con")
con.run()
print(con.get_stdout())
print(con.get_stderr())
assert con.returncode == 0
discon = process_factory(["sudo", "tedge", "disconnect", "c8y"], name="discon")
discon.run()
print(discon.get_stderr())
print(discon.get_stdout())
assert discon.returncode == 0
mos.kill()
print(mos.get_stdout())
print(mos.get_stderr())
assert "tedge/commands/req/software/list" in mos.get_stdout()
# TODO tedge connect in the background ?
# TODO inverse gherkin here?
| StarcoderdataPython |
4809337 | from pathlib import Path
import stubber
from packaging.version import parse
try:
import tomllib # type: ignore
except ModuleNotFoundError:
import tomli as tomllib
def test_package_versions_are_in_sync():
"""Checks if the pyproject.toml and package.__init__.py __version__ are in sync."""
# Q&D Location
path = Path(__file__).resolve().parents[1] / "pyproject.toml"
pyproject = tomllib.loads(open(str(path)).read())
pyproject_version = pyproject["tool"]["poetry"]["version"]
package_init_version = stubber.__version__
assert parse(package_init_version).public == parse(pyproject_version).public
| StarcoderdataPython |
75477 | <filename>tests/fileoperation_test.py
"""
Tests for file related operation
"""
import os
import pytest
from xkye import IO as io
# To test the missing file
def test_missing_input_file():
""" To test the missing input file """
xky_file = "../test/test.xky"
with pytest.raises(Exception):
mxkye = io(xky_file)
assert mxkye.read() is None
# To test the read operation
def test_input_file_success():
""" to test the correct input file """
xky_file = "in/test.xky"
dir_path = os.path.dirname(os.path.realpath(__file__))
xky_file = dir_path + "/" + xky_file
xkye = io(xky_file)
dictionary = xkye.read()
assert dictionary is True
| StarcoderdataPython |
3225619 | <gh_stars>10-100
# Copyright (c) 2018 <NAME>
#
# Distributed under the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
from phylanx import Phylanx
@Phylanx
def foo():
f = lambda a: a # noqa: E731
f(42)
assert (foo() == 42)
| StarcoderdataPython |
1780352 | """File downloader for the Virtual Campus of the Valladolid Unversity."""
from ._version import get_versions
__version__ = get_versions()["version"]
del get_versions
| StarcoderdataPython |
1744924 | <filename>atomic_reactor/inner.py
"""
Copyright (c) 2015-2022 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
Script for building docker image. This is expected to run inside container.
"""
import functools
import json
import logging
import signal
import threading
import os
import time
import re
from dataclasses import dataclass, field, fields
from textwrap import dedent
from typing import Any, Callable, Dict, Final, List, Optional, Union
from dockerfile_parse import DockerfileParser
from atomic_reactor.dirs import ContextDir, RootBuildDir
from atomic_reactor.plugin import BuildCanceledException, PluginsRunner
from atomic_reactor.constants import (
DOCKER_STORAGE_TRANSPORT_NAME,
REACTOR_CONFIG_FULL_PATH,
DOCKERFILE_FILENAME,
)
from atomic_reactor.types import ISerializer, RpmComponent
from atomic_reactor.util import (DockerfileImages,
base_image_is_custom, print_version_of_tools, validate_with_schema)
from atomic_reactor.config import Configuration
from atomic_reactor.source import Source, DummySource
from atomic_reactor.utils import imageutil
# from atomic_reactor import get_logging_encoding
from osbs.utils import ImageName
logger = logging.getLogger(__name__)
class BuildResults(object):
build_logs = None
dockerfile = None
built_img_inspect = None
built_img_info = None
base_img_inspect = None
base_img_info = None
base_plugins_output = None
built_img_plugins_output = None
container_id = None
return_code = None
class BuildResultsEncoder(json.JSONEncoder):
def default(self, obj): # pylint: disable=method-hidden,arguments-renamed
if isinstance(obj, BuildResults):
return {
'build_logs': obj.build_logs,
'built_img_inspect': obj.built_img_inspect,
'built_img_info': obj.built_img_info,
'base_img_info': obj.base_img_info,
'base_plugins_output': obj.base_plugins_output,
'built_img_plugins_output': obj.built_img_plugins_output,
}
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
class BuildResultsJSONDecoder(json.JSONDecoder):
def decode(self, obj):
d = super(BuildResultsJSONDecoder, self).decode(obj)
results = BuildResults()
results.built_img_inspect = d.get('built_img_inspect', None)
results.built_img_info = d.get('built_img_info', None)
results.base_img_info = d.get('base_img_info', None)
results.base_plugins_output = d.get('base_plugins_output', None)
results.built_img_plugins_output = d.get('built_img_plugins_output', None)
return results
class TagConf(ISerializer):
"""
confguration of image names and tags to be applied
"""
def __init__(self):
# list of ImageNames with 'static' tags
self._primary_images: List[ImageName] = []
# list if ImageName instances with unpredictable names
self._unique_images: List[ImageName] = []
# list of ImageName instances with 'floating' tags
# which can be updated by other images later
self._floating_images: List[ImageName] = []
def __eq__(self, other: object) -> bool:
if not isinstance(other, type(self)):
return False
return (
self._primary_images == other.primary_images and
self._unique_images == other.unique_images and
self._floating_images == other.floating_images
)
@property
def is_empty(self):
return (
len(self.primary_images) == 0 and
len(self.unique_images) == 0 and
len(self.floating_images) == 0
)
@property
def primary_images(self):
"""
primary image names are static and should be used for layering
:return: list of ImageName
"""
return self._primary_images
@property
def images(self):
"""
list of all ImageNames
:return: list of ImageName
"""
return self._primary_images + self._unique_images + self._floating_images
@property
def unique_images(self):
"""
unique image names are unpredictable and should be used for tracking only
:return: list of ImageName
"""
return self._unique_images
@property
def floating_images(self):
"""
floating image names are floating and should be used for layering
:return: list of ImageName
"""
return self._floating_images
def add_primary_image(self, image: Union[str, "ImageName"]) -> None:
"""add new primary image
:param image: str, name of image (e.g. "namespace/httpd:2.4")
:return: None
"""
self._primary_images.append(ImageName.parse(image))
def add_unique_image(self, image: Union[str, "ImageName"]) -> None:
"""add image with unpredictable name
:param image: str, name of image (e.g. "namespace/httpd:2.4")
:return: None
"""
self._unique_images.append(ImageName.parse(image))
def add_floating_image(self, image: Union[str, "ImageName"]) -> None:
"""add image with floating name
:param image: str, name of image (e.g. "namespace/httpd:2.4")
:return: None
"""
self._floating_images.append(ImageName.parse(image))
def get_unique_images_with_platform(self, platform: str) -> List[ImageName]:
"""
Add platform to unique images
:param platform: str, platform to be added to unique images
return: list of unique images with added platform
"""
def add_platform(image: ImageName) -> ImageName:
return ImageName(
registry=image.registry,
namespace=image.namespace,
repo=image.repo,
tag=f'{image.tag}-{platform}',
)
return list(map(add_platform, self.unique_images))
@classmethod
def load(cls, data: Dict[str, Any]):
tag_conf = TagConf()
image: ImageName
for image in data.get("primary_images", []):
tag_conf.add_primary_image(image)
for image in data.get("unique_images", []):
tag_conf.add_unique_image(image)
for image in data.get("floating_images", []):
tag_conf.add_floating_image(image)
return tag_conf
def as_dict(self) -> Dict[str, Any]:
return {
"primary_images": self.primary_images,
"unique_images": self.unique_images,
"floating_images": self.floating_images,
}
class FSWatcher(threading.Thread):
"""
Poll the filesystem every second in the background and keep a record of highest usage.
"""
def __init__(self, *args, **kwargs):
super(FSWatcher, self).__init__(*args, **kwargs)
self.daemon = True # exits whenever the process exits
self._lock = threading.Lock()
self._done = False
self._data = {}
def run(self):
""" Overrides parent method to implement thread's functionality. """
while True: # make sure to run at least once before exiting
with self._lock:
self._update(self._data)
if self._done:
break
time.sleep(1)
def get_usage_data(self):
""" Safely retrieve the most up to date results. """
with self._lock:
data_copy = self._data.copy()
return data_copy
def finish(self):
""" Signal background thread to exit next time it wakes up. """
with self._lock: # just to be tidy; lock not really needed to set a boolean
self._done = True
@staticmethod
def _update(data):
try:
st = os.statvfs("/")
except Exception as e:
return e # just for tests; we don't really need return value
mb = 1000 ** 2 # sadly storage is generally expressed in decimal units
new_data = dict(
mb_free=st.f_bfree * st.f_frsize // mb,
mb_total=st.f_blocks * st.f_frsize // mb,
mb_used=(st.f_blocks - st.f_bfree) * st.f_frsize // mb,
inodes_free=st.f_ffree,
inodes_total=st.f_files,
inodes_used=st.f_files - st.f_ffree,
)
for key in ["mb_total", "mb_used", "inodes_total", "inodes_used"]:
data[key] = max(new_data[key], data.get(key, 0))
for key in ["mb_free", "inodes_free"]:
data[key] = min(new_data[key], data.get(key, float("inf")))
return new_data
@dataclass
class ImageBuildWorkflowData(ISerializer):
"""Manage workflow data.
Workflow data is those data which are generated values by plugins through
the whole build workflow (pipeline) and must be shared in every build tasks.
These data can be dumped into dictionary object in order to be saved into a
file as JSON data, and then be loaded while atomic-reactor launches again to
execute another set of plugins for a different build task.
"""
dockerfile_images: DockerfileImages = field(default_factory=DockerfileImages)
tag_conf: TagConf = field(default_factory=TagConf)
plugins_results: Dict[str, Any] = field(default_factory=dict)
# Plugin name -> timestamp in isoformat
plugins_timestamps: Dict[str, str] = field(default_factory=dict)
# Plugin name -> seconds
plugins_durations: Dict[str, float] = field(default_factory=dict)
# Plugin name -> a string containing error message
plugins_errors: Dict[str, str] = field(default_factory=dict)
build_canceled: bool = False
plugin_failed: bool = False
# info about pre-declared build, build-id and token
reserved_build_id: Optional[int] = None
reserved_token: Optional[str] = None
koji_source_nvr: Dict[str, str] = field(default_factory=dict)
koji_source_source_url: Optional[str] = None
koji_source_manifest: Dict[str, Any] = field(default_factory=dict)
buildargs: Dict[str, str] = field(default_factory=dict) # --buildargs for container build
# When an image is exported into tarball, it can then be processed by various plugins.
# Each plugin that transforms the image should save it as a new file and append it to
# the end of exported_image_sequence. Other plugins should then operate with last
# member of this structure. Example:
# [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}]
# You can use util.get_exported_image_metadata to create a dict to append to this list.
# OSBS2 TBD exported_image_sequence will not work for multiple platform
exported_image_sequence: List[Dict[str, Union[str, int]]] = field(default_factory=list)
# mapping of downloaded files; DON'T PUT ANYTHING BIG HERE!
# "path/to/file" -> "content"
files: Dict[str, str] = field(default_factory=dict)
# Per platform List of RPMs that go into the final result
# Each RPM inside is a mapping containing the name, version, release and other attributes.
image_components: Dict[str, List[RpmComponent]] = field(default_factory=dict)
# List of all yum repos. The provided repourls might be changed (by resolve_composes) when
# inheritance is enabled. This property holds the updated list of repos, allowing
# post-build plugins (such as koji_import) to record them.
all_yum_repourls: List[str] = field(default_factory=list)
# Plugins can store info here using @annotation and @annotation_map decorators
# from atomic_reactor.metadata
annotations: Dict[str, Any] = field(default_factory=dict)
# OSBS2 TBD
image_id: Optional[str] = None
parent_images_digests: Dict[str, Dict[str, str]] = field(default_factory=dict)
# List of output files that are uploaded to Brew/Koji
# Each element is a two-strings list, local_filename and dest_filename. E.g.
# [
# {"local_filename": "/path/to/data.json", "dest_filename": "metadata.json"},
# {"local_filename": "/path/to/build.log", "dest_filename": "x86_64-build.log"},
# ]
koji_upload_files: List[Dict[str, str]] = field(default_factory=list)
@classmethod
def load(cls, data: Dict[str, Any]):
"""Load workflow data from given input."""
wf_data = cls()
if not data:
return wf_data
data_conv: Dict[str, Callable] = {
"dockerfile_images": DockerfileImages.load,
"tag_conf": TagConf.load,
}
def _return_directly(value):
return value
defined_field_names = set(f.name for f in fields(cls))
for name, value in data.items():
if name not in defined_field_names:
logger.info("Unknown field name %s", name)
continue
setattr(wf_data, name, data_conv.get(name, _return_directly)(value))
return wf_data
@classmethod
def load_from_dir(cls, context_dir: ContextDir) -> "ImageBuildWorkflowData":
"""Load workflow data from the data directory.
:param context_dir: a directory holding the files containing the serialized
workflow data.
:type context_dir: ContextDir
:return: the workflow data containing data loaded from the specified directory.
:rtype: ImageBuildWorkflowData
"""
if not context_dir.workflow_json.exists():
return cls()
with open(context_dir.workflow_json, "r") as f:
file_content = f.read()
raw_data = json.loads(file_content)
validate_with_schema(raw_data, "schemas/workflow_data.json")
# NOTE: json.loads twice since the data is validated at the first time.
workflow_data = json.loads(file_content, object_hook=WorkflowDataDecoder())
loaded_data = cls(**workflow_data)
return loaded_data
def as_dict(self) -> Dict[str, Any]:
return {field.name: getattr(self, field.name) for field in fields(self)}
def save(self, context_dir: ContextDir) -> None:
"""Save workflow data into the files under a specific directory.
:param context_dir: a directory holding the files containing the serialized
workflow data.
:type context_dir: ContextDir
"""
logger.info("Writing workflow data into %s", context_dir.workflow_json)
with open(context_dir.workflow_json, "w+") as f:
json.dump(self.as_dict(), f, cls=WorkflowDataEncoder)
class WorkflowDataEncoder(json.JSONEncoder):
"""Convert custom serializable objects into dict as JSON data."""
def default(self, o: object) -> Any:
if isinstance(o, ISerializer):
data = o.as_dict()
# Data type name used to know which type of object to recover.
data["__type__"] = o.__class__.__name__
return data
elif isinstance(o, ImageName):
return {
"__type__": o.__class__.__name__,
"str": o.to_str(),
}
return super().default(o)
class WorkflowDataDecoder:
"""Custom JSON decoder for workflow data."""
def _restore_image_name(self, data: Dict[str, str]) -> ImageName:
"""Factor to create an ImageName object."""
return ImageName.parse(data["str"])
def __call__(self, data: Dict[str, Any]) -> Any:
"""Restore custom serializable objects."""
loader_meths: Final[Dict[str, Callable]] = {
DockerfileImages.__name__: DockerfileImages.load,
TagConf.__name__: TagConf.load,
ImageName.__name__: self._restore_image_name,
}
if "__type__" not in data:
# __type__ is an identifier to indicate a dict object represents an
# object that should be recovered. If no type is included, just
# treat it as a normal dict and return.
return data
obj_type = data.pop("__type__")
loader_meth = loader_meths.get(obj_type)
if loader_meth is None:
raise ValueError(
f"Unknown object type {obj_type} to restore an object from data {data!r}."
)
return loader_meth(data)
class DockerBuildWorkflow(object):
"""
This class defines a workflow for building images:
1. pull image from registry
2. tag it properly if needed
3. obtain source
4. build image
5. tag it
6. push it to registries
"""
# The only reason this is here is to have something that unit tests can monkeypatch
_default_user_params: Dict[str, Any] = {}
def __init__(
self,
context_dir: ContextDir,
build_dir: RootBuildDir,
namespace: str,
pipeline_run_name: str,
data: Optional[ImageBuildWorkflowData] = None,
source: Source = None,
user_params: dict = None,
reactor_config_path: str = REACTOR_CONFIG_FULL_PATH,
client_version: str = None,
plugins_conf: Optional[List[Dict[str, Any]]] = None,
plugin_files: Optional[List[str]] = None,
keep_plugins_running: bool = False,
):
"""
:param context_dir: the directory passed to task --context-dir argument.
:type context_dir: ContextDir
:param build_dir: a directory holding all the artifacts to build an image.
:type build_dir: RootBuildDir
:param data:
:type data: ImageBuildWorkflowData
:param source: where/how to get source code to put in image
:param namespace: OpenShift namespace of the task
:param pipeline_run_name: PipelineRun name to reference PipelineRun
:param plugins_conf: the plugins to be executed in this workflow
:type plugins_conf: list[dict[str, any]] or None
:param user_params: user (and other) params that control various aspects of the build
:param reactor_config_path: path to atomic-reactor configuration file
:param plugin_files: load plugins also from these files
:param client_version: osbs-client version used to render build json
:param bool keep_plugins_running: keep plugins running even if error is
raised from previous one. This is passed to ``PluginsRunner`` directly.
"""
self.context_dir = context_dir
self.build_dir = build_dir
self.data = data or ImageBuildWorkflowData()
self.namespace = namespace
self.pipeline_run_name = pipeline_run_name
self.source = source or DummySource(None, None)
self.user_params = user_params or self._default_user_params.copy()
self.keep_plugins_running = keep_plugins_running
self.plugin_files = plugin_files
self.plugins_conf = plugins_conf or []
self.fs_watcher = FSWatcher()
self.storage_transport = DOCKER_STORAGE_TRANSPORT_NAME
if client_version:
logger.debug("build json was built by osbs-client %s", client_version)
# get info about base image from dockerfile
build_file_path, _ = self.source.get_build_file_path()
self.conf = Configuration(config_path=reactor_config_path)
# If the Dockerfile will be entirely generated from the container.yaml
# (in the Flatpak case, say), then a plugin needs to create the Dockerfile
# and set the base image
if build_file_path.endswith(DOCKERFILE_FILENAME):
self.reset_dockerfile_images(build_file_path)
def reset_dockerfile_images(self, path: str) -> None:
"""Given a new Dockerfile path, (re)set all the mutable state that relates to it.
Workflow keeps a dockerfile_images object, which corresponds to the parent images in
the Dockerfile. This object and the actual Dockerfile are both mutable (and plugins
frequently mutate them). It is the responsibility of every plugin to make the changes
in such a way that the actual parent images and their in-memory representation do not
get out of sync.
For extreme cases such as plugins creating an entirely new Dockerfile (e.g.
flatpak_create_dockerfile), this method *must* be used to replace the existing
dockerfile_images object with a new one and re-apply some mutations.
"""
df_images = self.data.dockerfile_images
# Consider dockerfile_images data was saved when previous task ended,
# e.g. prebuild, now subsequent task starts to run and the saved data
# is loaded into the dockerfile_images object. In this case, no need
# to update the restored dockerfile_images data.
if df_images.is_empty:
df_images = self._parse_dockerfile_images(path)
self.data.dockerfile_images = df_images
self.conf.update_dockerfile_images_from_config(df_images)
# But, still need to do this
self.imageutil.set_dockerfile_images(df_images)
def _parse_dockerfile_images(self, path: str) -> DockerfileImages:
dfp = DockerfileParser(path)
if dfp.baseimage is None:
raise RuntimeError("no base image specified in Dockerfile")
dockerfile_images = DockerfileImages(dfp.parent_images)
logger.debug("base image specified in dockerfile = '%s'", dfp.baseimage)
logger.debug("parent images specified in dockerfile = '%s'", dfp.parent_images)
custom_base_images = set()
for image in dfp.parent_images:
image_name = ImageName.parse(image)
image_str = image_name.to_str()
if base_image_is_custom(image_str):
custom_base_images.add(image_str)
if len(custom_base_images) > 1:
raise NotImplementedError("multiple different custom base images"
" aren't allowed in Dockerfile")
# validate user has not specified COPY --from=image
builders = []
for stmt in dfp.structure:
if stmt['instruction'] == 'FROM':
# extract "bar" from "foo as bar" and record as build stage
match = re.search(r'\S+ \s+ as \s+ (\S+)', stmt['value'], re.I | re.X)
builders.append(match.group(1) if match else None)
elif stmt['instruction'] == 'COPY':
match = re.search(r'--from=(\S+)', stmt['value'], re.I)
if not match:
continue
stage = match.group(1)
# error unless the --from is the index or name of a stage we've seen
if any(stage in [str(idx), builder] for idx, builder in enumerate(builders)):
continue
raise RuntimeError(dedent("""\
OSBS does not support COPY --from unless it matches a build stage.
Dockerfile instruction was:
{}
To use an image with COPY --from, specify it in a stage with FROM, e.g.
FROM {} AS source
FROM ...
COPY --from=source <src> <dest>
""").format(stmt['content'], stage))
return dockerfile_images
@functools.cached_property
def imageutil(self) -> imageutil.ImageUtil:
"""Get an ImageUtil instance.
The property is lazy, subsequent calls will return the same instance. This is important
for performance reasons (ImageUtil caches registry queries, a new instance would not have
the cache).
"""
return imageutil.ImageUtil(self.data.dockerfile_images, self.conf)
def parent_images_to_str(self):
results = {}
for base_image_name, parent_image_name in self.data.dockerfile_images.items():
base_str = str(base_image_name)
parent_str = str(parent_image_name)
if base_image_name and parent_image_name:
results[base_str] = parent_str
else:
logger.debug("None in: base %s has parent %s", base_str, parent_str)
return results
@property
def image(self):
return self.user_params['image_tag']
@property
def build_process_failed(self):
"""
Has any aspect of the build process failed?
"""
return self.data.plugin_failed
def throw_canceled_build_exception(self, *args, **kwargs):
self.data.build_canceled = True
raise BuildCanceledException("Build was canceled")
def build_docker_image(self) -> None:
"""Start the container build.
In general, all plugins run in order and the execution can be
terminated by sending SIGTERM signal to atomic-reactor.
When argument ``keep_plugins_running`` is set, the specified plugins
are all ensured to be executed and the SIGTERM signal is ignored.
"""
print_version_of_tools()
try:
self.fs_watcher.start()
if self.keep_plugins_running:
signal.signal(signal.SIGTERM, signal.SIG_IGN)
else:
signal.signal(signal.SIGTERM, self.throw_canceled_build_exception)
runner = PluginsRunner(self,
self.plugins_conf,
self.plugin_files,
self.keep_plugins_running,
plugins_results=self.data.plugins_results)
runner.run()
finally:
signal.signal(signal.SIGTERM, signal.SIG_DFL)
self.fs_watcher.finish()
| StarcoderdataPython |
1643536 | """
This tutorial shows you how to use a data recorder to record some data for
imitation learning for instance and how to load the data again. Or replay some
episodes.
"""
from causal_world.envs.causalworld import CausalWorld
from causal_world.task_generators.task import generate_task
from causal_world.loggers.data_recorder import DataRecorder
from causal_world.loggers.data_loader import DataLoader
import causal_world.viewers.task_viewer as viewer
def example():
# Here you learn how to record/ log entire episodes into a directory
# to reuse it later e.g. for reviewing logged episodes or using this
# data for pre-training policies.
# Construct a data_recorder that keeps track of every change in the environment
# We set the recording dumb frequency of episodes into log_files to 11 (default is 100)
data_recorder = DataRecorder(output_directory='pushing_episodes',
rec_dumb_frequency=11)
# Pass the data recorder to the World
task = generate_task(task_generator_id='pushing')
env = CausalWorld(task=task,
enable_visualization=True,
data_recorder=data_recorder)
# Record some episodes
for _ in range(23):
env.reset()
for _ in range(50):
env.step(env.action_space.sample())
env.close()
# Load the logged episodes
data = DataLoader(episode_directory='pushing_episodes')
episode = data.get_episode(14)
# Initialize a new environment according a specific episode and replay it
task = generate_task(episode.task_name, **episode.task_params)
env = CausalWorld(task, **episode.world_params, enable_visualization=True)
env.set_starting_state(episode.initial_full_state,
check_bounds=False)
for action in episode.robot_actions:
env.step(action)
env.close()
# You can achieve the same by using the viewer module in one line
viewer.view_episode(episode)
if __name__ == '__main__':
example()
| StarcoderdataPython |
4821907 | <gh_stars>1-10
import numpy
import pandas
import pymol
from io import StringIO
#import itertools
import os
import re
import subprocess
import sys
import time
from csubst import sequence
from csubst import utility
def initialize_pymol(g):
#pymol.pymol_argv = ['pymol','-qc']
#pymol.finish_launching()
pymol.cmd.do('delete all')
is_old_pdb_code = bool(re.fullmatch('[0-9][A-Za-z0-9]{3}', g['pdb']))
is_new_pdb_code = bool(re.fullmatch('pdb_[0-9]{5}[A-Za-z0-9]{3}', g['pdb']))
if is_old_pdb_code|is_new_pdb_code:
print('Fetching PDB code {}. Internet connection is needed.'.format(g['pdb']), flush=True)
pymol.cmd.do('fetch {}'.format(g['pdb']))
else:
print('Loading PDB file: {}'.format(g['pdb']), flush=True)
pymol.cmd.load(g['pdb'])
def write_mafft_map(g):
tmp_pdb_fasta = 'tmp.csubst.pdb_seq.fa'
mafft_map_file = tmp_pdb_fasta+'.map'
if os.path.exists(mafft_map_file):
os.remove(mafft_map_file)
pdb_seq = pymol.cmd.get_fastastr(selection='polymer.protein', state=-1, quiet=1)
with open(tmp_pdb_fasta, 'w') as f:
f.write(pdb_seq)
sequence.write_alignment(outfile='tmp.csubst.leaf.aa.fa', mode='aa', g=g, leaf_only=True)
cmd_mafft = [g['mafft_exe'], '--keeplength', '--mapout', '--quiet',
'--thread', '1',
'--op', str(g['mafft_op']),
'--ep', str(g['mafft_ep']),
'--add', tmp_pdb_fasta,
'tmp.csubst.leaf.aa.fa',
]
out_mafft = subprocess.run(cmd_mafft, stdout=subprocess.PIPE)
with open(g['mafft_add_fasta'], 'w') as f:
f.write(out_mafft.stdout.decode('utf8'))
for i in range(10):
if os.path.exists(mafft_map_file):
print('mafft map file was generated.', flush=True)
break
else:
print('mafft map file not detected. Waiting {:} sec'.format(i+1), flush=True)
time.sleep(1)
txt = 'CSUBST does not exclude poorly aligned regions. ' \
'Please carefully check {} before biological interpretation of substitution events.'
print(txt.format(g['mafft_add_fasta']), flush=True)
if os.path.getsize(g['mafft_add_fasta'])==0:
sys.stderr.write('File size of {} is 0. A wrong ID might be specified in --pdb.\n'.format(g['mafft_add_fasta']))
sys.stderr.write('Exiting.\n')
sys.exit(1)
def get_residue_numberings():
out = dict()
object_names = pymol.cmd.get_names()
for object_name in object_names:
if object_name.endswith('_pol_conts'):
continue
for ch in pymol.cmd.get_chains(object_name):
pymol.stored.residues = []
txt_selection = '{} and chain {} and name ca'.format(object_name, ch)
pymol.cmd.iterate(selection=txt_selection, expression='stored.residues.append(resi)')
residue_numbers = [ int(x) for x in pymol.stored.residues if not bool(re.search('[a-zA-Z]', x)) ]
residue_numbers = sorted(list(set(residue_numbers))) # Drop occasionally observed duplications
residue_iloc = numpy.arange(len(residue_numbers)) + 1
col1 = 'codon_site_'+object_name+'_'+ch
col2 = 'codon_site_pdb_'+object_name+'_'+ch
dict_tmp = {col1:residue_iloc, col2:residue_numbers}
df_tmp = pandas.DataFrame(dict_tmp)
out[object_name+'_'+ch] = df_tmp
return out
def add_pdb_residue_numbering(df):
residue_numberings = get_residue_numberings()
object_names = pymol.cmd.get_names()
for object_name in object_names:
for ch in pymol.cmd.get_chains(object_name):
key = object_name+'_'+ch
if residue_numberings[key].shape[0]==0:
sys.stderr.write('PDB protein sequence is unavailable: {}\n'.format(key))
continue
df = pandas.merge(df, residue_numberings[key], on='codon_site_'+key, how='left')
df.loc[:,'codon_site_pdb_'+key] = df.loc[:,'codon_site_pdb_'+key].fillna(0).astype(int)
return df
def add_mafft_map(df, mafft_map_file='tmp.csubst.pdb_seq.fa.map'):
with open(mafft_map_file, 'r') as f:
map_str = f.read()
map_list = map_str.split('>')[1:]
for map_item in map_list:
seq_name = re.sub('\n.*', '', map_item)
seq_csv = re.sub(seq_name+'\n', '', map_item)
if seq_csv.count('\n')==1: # empty data
df.loc[:,'codon_site_'+seq_name] = 0
df.loc[:,'aa_'+seq_name] = 0
else:
df_tmp = pandas.read_csv(StringIO(seq_csv), comment='#', header=None)
df_tmp.columns = ['aa_'+seq_name, 'codon_site_'+seq_name, 'codon_site_alignment']
is_missing_in_aln = (df_tmp.loc[:,'codon_site_alignment']==' -')
df_tmp = df_tmp.loc[~is_missing_in_aln,:]
df_tmp.loc[:,'codon_site_alignment'] = df_tmp.loc[:,'codon_site_alignment'].astype(int)
df = pandas.merge(df, df_tmp, on='codon_site_alignment', how='left')
df.loc[:,'codon_site_'+seq_name] = df.loc[:,'codon_site_'+seq_name].fillna(0).astype(int)
df.loc[:,'aa_'+seq_name] = df.loc[:,'aa_'+seq_name].fillna('')
return df
def calc_aa_identity(g):
seqs = sequence.read_fasta(path=g['mafft_add_fasta'])
seqnames = list(seqs.keys())
pdb_base = re.sub('\..*', '', os.path.basename(g['pdb']))
pdb_seqnames = [ sn for sn in seqnames if sn.startswith(pdb_base) ]
other_seqnames = [ sn for sn in seqnames if not sn.startswith(pdb_base) ]
aa_identity_values = dict()
for pdb_seqname in pdb_seqnames:
aa_identity_values[pdb_seqname] = []
for other_seqname in other_seqnames:
aa_identity = sequence.calc_identity(seq1=seqs[pdb_seqname], seq2=seqs[other_seqname])
aa_identity_values[pdb_seqname].append(aa_identity)
aa_identity_values[pdb_seqname] = numpy.array(aa_identity_values[pdb_seqname])
aa_identity_means = dict()
for pdb_seqname in pdb_seqnames:
aa_identity_means[pdb_seqname] = aa_identity_values[pdb_seqname].mean()
aa_ranges = dict()
for pdb_seqname in pdb_seqnames:
alphabet_sites = [ m.start() for m in re.finditer('[a-zA-Z]', seqs[pdb_seqname]) ]
aa_start = min(alphabet_sites)
aa_end = max(alphabet_sites)
aa_ranges[pdb_seqname] = [aa_start, aa_end]
g['aa_identity_values'] = aa_identity_values
g['aa_identity_means'] = aa_identity_means
g['aa_spans'] = aa_ranges
return g
def mask_subunit(g):
colors = ['wheat','slate','salmon','brightorange','violet','olive',
'firebrick','pink','marine','density','cyan','chocolate','teal',]
colors *= 10 # for supercomplex
g = calc_aa_identity(g)
pdb_seqnames = list(g['aa_identity_means'].keys())
if len(pdb_seqnames)==1:
return None
max_aa_identity_mean = max(g['aa_identity_means'].values())
for pdb_seqname in pdb_seqnames:
aa_identity_mean = g['aa_identity_means'][pdb_seqname]
if abs(max_aa_identity_mean-aa_identity_mean)<g['float_tol']:
max_pdb_seqname = pdb_seqname
max_spans = g['aa_spans'][pdb_seqname]
break
i = 0
for pdb_seqname in pdb_seqnames:
if pdb_seqname==max_pdb_seqname:
continue
spans = g['aa_spans'][pdb_seqname]
is_nonoverlapping_N_side = (max_spans[1] < spans[0])
is_nonoverlapping_C_side = (max_spans[0] > spans[1])
if (is_nonoverlapping_N_side|is_nonoverlapping_C_side):
continue
chain = pdb_seqname.replace(g['pdb']+'_', '')
print('Masking chain {}'.format(chain), flush=True)
pymol.cmd.do('color {}, chain {} and polymer.protein'.format(colors[i], chain))
i += 1
for chain in pymol.cmd.get_chains(selection='polymer.nucleic'):
print('Masking chain {}'.format(chain), flush=True)
pymol.cmd.do('color {}, chain {} and polymer.nucleic'.format(colors[i], chain))
i += 1
def set_color_gray(object_names, residue_numberings):
for object_name in object_names:
if object_name.endswith('_pol_conts'):
continue
for ch in pymol.cmd.get_chains(object_name):
key = object_name+'_'+ch
codon_site_pdb = residue_numberings[key].loc[:,'codon_site_pdb_'+key]
is_nonzero = (codon_site_pdb!=0)
residue_start = codon_site_pdb.loc[is_nonzero].min()
residue_end = codon_site_pdb.loc[is_nonzero].max()
cmd_color = "color gray80, {} and chain {} and resi {:}-{:}"
pymol.cmd.do(cmd_color.format(object_name, ch, residue_start, residue_end))
def set_substitution_colors(df, g, object_names, N_sub_cols):
for object_name in object_names:
if object_name.endswith('_pol_conts'):
continue
for ch in pymol.cmd.get_chains(object_name):
codon_site_col = 'codon_site_pdb_'+object_name+'_'+ch
if not codon_site_col in df.columns:
continue
color_sites = dict()
color_sites['Nany2spe'] = []
color_sites['Nany2dif'] = []
color_sites['single_sub'] = []
for i in df.index:
codon_site = df.at[i,codon_site_col]
prob_Nany2spe = df.at[i,'Nany2spe']
prob_Nany2dif = df.at[i,'Nany2dif']
prob_single_sub = df.loc[i,N_sub_cols].max()
if codon_site==0:
continue
elif (prob_Nany2spe>=g['pymol_min_combinat_prob'])&(prob_Nany2dif<=prob_Nany2spe):
color_sites['Nany2spe'].append(codon_site)
elif (prob_Nany2dif>=g['pymol_min_combinat_prob'])&(prob_Nany2dif>prob_Nany2spe):
color_sites['Nany2dif'].append(codon_site)
elif (prob_single_sub>=g['pymol_min_single_prob']):
color_sites['single_sub'].append(codon_site)
for key in color_sites.keys():
if key=='Nany2spe':
hex_value = utility.rgb_to_hex(r=1, g=0, b=0)
elif key=='Nany2dif':
hex_value = utility.rgb_to_hex(r=0, g=0, b=1)
elif key=='single_sub':
hex_value = utility.rgb_to_hex(r=0.4, g=0.4, b=0.4)
print('Amino acid sites with {} will be painted with {}.'.format(key, hex_value), flush=True)
txt_resi = '+'.join([str(site) for site in color_sites[key]])
cmd_color = "color {}, {} and chain {} and resi {}"
pymol.cmd.do(cmd_color.format(hex_value, object_name, ch, txt_resi))
if key in ['Nany2spe','Nany2dif']:
cmd_tp = "set transparency, 0.3, {} and chain {} and resi {:}"
pymol.cmd.do(cmd_tp.format(object_name, ch, txt_resi))
def write_pymol_session(df, g):
df = df.reset_index(drop=True)
pymol.cmd.do('set seq_view, 1')
if g['remove_solvent']:
pymol.cmd.do("remove solvent")
if g['remove_ligand']:
molecule_codes = g['remove_ligand'].split(',')
for molecule_code in molecule_codes:
pymol.cmd.do("remove resn "+molecule_code)
pymol.cmd.do("preset.ligand_sites_trans_hq(selection='all')")
pymol.cmd.do("hide wire")
pymol.cmd.do("hide ribbon")
pymol.cmd.do("show cartoon")
pymol.cmd.do("show surface")
pymol.cmd.do("set transparency, 0.65")
object_names = pymol.cmd.get_names()
#residue_numberings = get_residue_numberings()
#set_color_gray(object_names, residue_numberings)
pymol.cmd.do("color gray80, polymer.protein")
pymol.cmd.do('util.cbag organic')
N_sub_cols = df.columns[df.columns.str.startswith('N_sub_')]
set_substitution_colors(df, g, object_names, N_sub_cols)
if g['mask_subunit']:
mask_subunit(g)
pymol.cmd.do('zoom')
pymol.cmd.deselect()
pymol.cmd.save(g['session_file_path'])
def quit_pymol():
pymol.cmd.quit(code=0)
| StarcoderdataPython |
1697151 | <filename>pyon/core/governance/conversation/core/local_type.py
class Enum(set):
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
LocalType = Enum(["SEND", "RESV"]) | StarcoderdataPython |
1607709 | # -*- coding: utf-8 -*-#
# -------------------------------------------------------------------------------
# Name: 3.8
# Description: 多层感知机章节
# Author: xuyapeng
# Date: 2020/8/26
# -------------------------------------------------------------------------------
import tensorflow as tf
from matplotlib import pyplot as plt
import numpy as np
import random
def set_figsize(figsize=(3.5, 2.5)):
# 设置图的尺寸
plt.rcParams['figure.figsize'] = figsize
def xyplot(x_vals, y_vals, name):
plt.figure()
set_figsize(figsize=(5, 2.5))
plt.plot(x_vals.numpy(), y_vals.numpy())
plt.xlabel('x')
plt.ylabel(name + '(x)')
x = tf.Variable(tf.range(-8, 8, 0.1), dtype=tf.float32)
with tf.GradientTape(persistent=True) as tape:
tape.watch(x)
relu_y = tf.nn.relu(x)
sigmoid_y = tf.nn.sigmoid(x)
tanh_y = tf.nn.tanh(x)
relu_dy_dx = tape.gradient(relu_y, x)
sigmoid_dy_dx = tape.gradient(sigmoid_y, x)
tanh_dy_dx = tape.gradient(tanh_y, x)
row_num = 3
col_num = 2
fig, ax = plt.subplots(row_num, col_num, figsize=(10, 10))
fig.subplots_adjust(hspace=1, wspace=0.3)
plot_data_list = [{'relu': relu_y, 'relu_dy_dx': relu_dy_dx},
{'sigmoid': sigmoid_y, 'sigmoid_dy_dx': sigmoid_dy_dx},
{'tanh': tanh_y, 'tanh_dy_dx': tanh_dy_dx}]
# for i in range(row_num):
# ax[i, 0].plot(x.numpy(), relu_y.numpy())
# for j in range(1, col_num):
# ax[i, j].plot(x.numpy(), relu_dy_dx.numpy())
for i in range(len(plot_data_list)):
for j, key in enumerate(plot_data_list[i].keys()):
ax[i, j].plot(x.numpy(), plot_data_list[i][key].numpy())
ax[i, j].set_xlabel(key)
plt.show()
| StarcoderdataPython |
1631001 | # -*- coding: UTF-8 -*-
# @Time: 2020-06-11 17:09
# @Author: wyd
# @File: toast获取.py
from appium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from appium.webdriver.common.mobileby import MobileBy
import time
desired_caps = {}
# 自动化测试引擎
desired_caps["automationName"] = "UiAutomator2"
# 平台类型
desired_caps["platformName"] = "Android"
# 平台版本号
desired_caps["platformVersion"] = "7.1.2"
# 设备名称
desired_caps["deviceName"] = "Android Emulator"
# app包名
desired_caps["appPackage"] = "com.baidu.netdisk"
# app入口activity
desired_caps["appActivity"] = "com.baidu.netdisk.ui.Navigate"
# 连接appium_server。前提:appium_desktop要启动,有监听端口。
# 将desired_caps发送给appium_server,打开app
driver = webdriver.Remote("http://127.0.0.1:4723/wd/hub", desired_caps)
# 同意协议
WebDriverWait(driver, 10).until(
EC.visibility_of_element_located((MobileBy.ID, 'com.baidu.netdisk:id/dialog_button_confirm')))
driver.find_element_by_id("com.baidu.netdisk:id/dialog_button_confirm").click()
# 微信快捷登录
WebDriverWait(driver, 10).until(EC.visibility_of_element_located((MobileBy.ID, 'com.baidu.netdisk:id/wxLoginBtn')))
driver.find_element_by_id('com.baidu.netdisk:id/wxLoginBtn').click()
# xpath表达式 文本匹配
loc = (MobileBy.XPATH, '//*[contains(@text,"微信未安装")]')
# 等待的时候,要用元素存在的条件presence_of_element_located,不能用元素可见的条件
try:
WebDriverWait(driver, 5).until(EC.presence_of_element_located(loc))
print(driver.find_element_by_xpath('//*[contains(@text,"微信未安装")]').text)
except:
print('没有找到匹配的toast')
| StarcoderdataPython |
1638684 | from StaticMethods.Exceptions import exception
from StaticMethods.addition import addition
from StaticMethods.division import division
from StaticMethods.roundOff import roundOff
def calMedian(a):
length = len(a)
half = int(division(2, length))
for val in a:
exception(val)
if length % 2 == 0:
median = division(2, addition(a[half - 1], a[half]))
else:
median = a[half]
return roundOff(median)
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.