lang
stringclasses 10
values | seed
stringlengths 5
2.12k
|
---|---|
python | # See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import pytest
import subprocess |
python | import numpy as np
print "Equal?", np.testing.assert_equal((1, 2), (1, 3))
|
python | # Write results
f = open(output_file,'w')
line = 'ct_all\t%s\tseq\n'%('\t'.join(['ct_%d'%n for n in sample_nums]))
f.write(line)
for i in indices:
seq = seqs[i]
counts = seq_to_counts_dict[seq]
line = '\t'.join([str(x) for x in counts]) + '\t' + seq + '\n'
f.write(line)
f.close() |
python | display2 = myrotate(display2, 90)
display = np.hstack([display1, display2])
cv.imshow("prerot", (prerot*255).astype(np.uint8))
cv.imshow("hippodeep - me", (display*255).astype(np.uint8))
if cv.waitKey(0) == 27:
cv.imwrite("display.png", (display*255).astype(np.uint8))
quit()
|
python |
if len(self.inputs) % len(folds):
raise TransformException(f"Ragged input count {len(self.inputs)} in {self.name}")
group_size = len(self.inputs) // len(folds)
if group_size < 1: |
python | """
command = f"getstate,{addr}"
CMD = bytes(command, encoding='utf8')+CR
response = await self.raw_request(CMD)
return response
async def getversion(self, module):
"""Get the version number of 'module'.
The 'module' number is 1, 2, 3, etc. It is _not_ a "connector address".
"""
command = f"getversion,{module}"
CMD = bytes(command, encoding='utf8')+CR |
python |
# Internal Settings
VR_PAYMENT_SHOPPER_RESULT_URL_NAME = getattr(
settings, "VR_PAYMENT_SHOPPER_RESULT_URL_NAME", "vr-payment:return"
) |
python | break
logging.info(f"Last index reached: {index}")
num_class = index + 1
if job_ids:
jobs.append(submit_job(client, job_ids, start_index, num_class))
job_ids.clear()
logging.info(f"{len(jobs)} jobs downloading {total_num_comments} comments...")
dataframes = [None] * len(jobs) |
python | """File with all exceptions for this python package"""
class SeleniumScreenshotsError(Exception):
"""Main exception of this python package
"""
|
python | import dimod
SPIN = dimod.SPIN
BINARY = dimod.BINARY
Vartype = dimod.Vartype
def to_cxxcimod(var_type):
# convert to cxxcimod type |
python | items: The combination of Interfaces and/or LSPs that the Demand takes
from source to destination
splits: each item on the path (Interface and/or LSP) and the number of cumulative
ECMP path splits that the Demand has transited as it egresses the source node for
that element. |
python | from std_msgs.msg import Int32
from std_msgs.msg import String
n = 0
a = ['必う日は近い','他人に妨げられる事あり','願いを伝えよ','他人を助けよ、助けることで叶う','初めは思わしくないが後は必ず良し','2つの願いを一度に叶えんとすれば悪し','思いがけぬ人の助けありて叶うことあり','にわかに事をなさんとすれば災いあり','必ず叶う','早くすれば他人の助けありて調う','少し時がかかるが叶う','願い難し、努力せよ','叶い難く利なけれど後自然成就す','無理に事をなすは悪し、時を待て叶う','遅くとも必ず叶う','叶いにくい']
b = ['来ず、待たない方が良い','便りが有り来る','音信有り来る','来る','来る、連れあり','便り無し来ず','すぐあり','非を認めれば来る','遅いが来る','来ること難し','待つことで災いあり','来る喜びあり','さわりなく来る','速やかに来たる','かなり遅く来る']
c = ['上方にあり','下方にあり','低き所にて見つかる','高き所にあり','男子の知ることあり','女にとふべし','手近より出る','出ず','いつのまにかある','苦労して捜し出すことになる','家族にとふと良い','忘れたころに出る','必ず出る、早く捜せ','遠くにあり','出るとも遅し','戻らず','北を捜すべし','西を捜すべし','南を捜すべし','東を捜すべし','捜さずとも出る']
d = ['列車の旅が良い','早朝出立すべし','出発は吉日を選べ','ゆるゆる行くべし','つれあれば共に行け','近き所、特に良し','行く先利益あり','船旅良し','災いあり控えるべし','遠くに縁あり','何れに行くも損なし','水難に注意すべし','北は控えよ','特に南よし','夏に行くが良し','海に近いところ良し','海を越えると良い','山の近くが良い','秋に行くが良し','夜に出立すべし']
e = ['確かなる利益あり','利あり、売るに良し','物の値、大きく変動あり','乗り気にてやるべし','利益少なし','値段上下無し','苦労の甲斐あり','売り買い何れも吉','成せば成る','隆昌に向かうが慎重にせよ','投資せよ','買うに良し、利あり','利益少なし、時を待て','利益相当あり','己の思うままに成すべし','俄かに下ることあり','災い有り','利も損も無し','少し待て','他人の手を借りるべし','後に利益あり','利益有るが何れか失う','物の値下げるべし','物の値上げるべし']
f = ['初心に返ると良い','他人を当てにすること勿れ','安心して勉学せよ','幸運訪れ有','焦らず頑張','全力を尽くせば必ず叶う','順調に進む','挫けずに努力せよ','友達を選べ','実力以上を発揮','日々の積重現わる','感謝の心で一筋に努力せよ、叶う','自己への甘えを捨てよ','寝不足に注意','雑念多し、努力せよ','頑張れば幸運有り','辛ければ他人に相談せよ']
g = ['人生の伴走者に出会う','愛情を捧げよ','愛を捧げよ倖せあり','短気は敵','自身に問題あり','お互いを知れ','一途な想いが愛を深める、行動で示せ','楽しむことが吉','考え過ぎれば災いあり','多くを話合うこと','我を張らず愛情を捧げる事','思わぬ出会い有り、逃すな','相手に誠意無し','深入りするな','周りに気を付ける事','他人の話を聞くこと','贈り物に注意','焦らず考えよ','器大きくあるべし','自己に甘えることなく勇気を出すこと吉']
h = ['日頃を気をつけよ','治る気第一','信心によりてなおる','軽し治る','治る','重し、気をつけよ','休むが一番の治療','重いが全快す','全快早し','重くない治る','睡眠第一','食生活に気をつけよ','医者の話を聞くべし','細心の注意を払え','周りの話を聞くべし','再発に気をつけよ','気持ち次第で必ず治る','落ち着いて待つべし'] |
python | game_status.bbref_game_id = bbref_game_id
if bb_game_id:
game_status.bb_game_id = bb_game_id
game_status.scrape_status_date_id = date_status.id
game_status.season_id = date_status.season_id
return game_status
def create_pitch_app_status_record(bbref_game_id, bb_game_id, game_status, player_id, pitch_app_id):
pitch_app_status = db.PitchAppScrapeStatus()
pitch_app_status.pitcher_id_mlb = player_id.mlb_id |
python | assert upload.validate_junction_data('leads', valid_data_json) is True
def test_check_data_columns():
"""Testing out check_data_columns method this is supposed to return a new
dictionary that has values for all the elements in a list
The new dictionary will have elements from the list as it's keys if there
is key missing from the dictionary |
python | response = self.request(type="mutation",
call='newQuickmemo(token: "{0}", title: "{1}", body: "{2}")'
.format(self.access_token, "idea title", "idea body"),
body='''
result{
... on ResponseMessageField{ |
python | print('=-'*30)
print(f'[ {linhas[0][0]} ] [ {linhas[0][1]} ] [ {linhas[0][2]} ]')
print(f'[ {linhas[1][0]} ] [ {linhas[1][1]} ] [ {linhas[1][2]} ]')
print(f'[ {linhas[2][0]} ] [ {linhas[2][1]} ] [ {linhas[2][2]} ]')
print('=-'*30)
print(f'A soma dos valores pares é {par}.'
f'\nA soma dos valores da terceira coluna é {c3}.' |
python |
msg_fmt = 'It\'s been a while, check for a new version of ' + \
'Intel(R) Distribution of OpenVINO(TM) toolkit here {0} or on the GitHub*'
def get_ov_update_message():
expected_update_date = datetime.date(year=2020, month=10, day=1)
current_date = datetime.date.today()
link = 'https://software.intel.com/en-us/openvino-toolkit/choose-download?cid=&source=upgrade&content=2020_3_LTS'
|
python | graphData = root.find("{GraphInfo}graphml")
return graphData.find("{GraphInfo}graph")
def getKeyFrames(root):
graphData = root.find("{GraphInfo}graphml")
keys = graphData.findall('{GraphInfo}key')
|
python | """
class BaseKeyValueStorage(object):
"""
The storage of key-values.
"""
def add(self, category, key, value=None):
"""
Add a new attribute. If the key already exists, raise an exception.
Args:
category: (string, int) the category of data.
key: (string) attribute's key. |
python | spark.catalog.listDatabases().select("name").show()
spark.catalog.listTables.show()
spark.catalog.isCached("sample_07")
spark.catalog.listFunctions().show()
|
python | ###############################################################################
from pyro.dynamic import pendulum
from pyro.control import nonlinear
from pyro.analysis import simulation
###############################################################################
sys = pendulum.SinglePendulum()
|
python | mpl.rcParams['mathtext.fontset'] = 'cm'
mpl.rcParams['mathtext.rm'] = 'serif'
import matplotlib.pyplot as plt
def label_diff_x(ax, x0, x1, y, cap_size, y_text, text):
xmid = x0 + ((x1 - x0) / 2)
ax.plot([x0, x0, x1, x1], [y - cap_size, y, y, y - cap_size], color='black', lw=1)
#
ax.annotate(text, xy=(xmid, y), xytext=(xmid, y_text), zorder=10, ha='center', va='center', fontsize='medium')
|
python | from eth_account._utils.transactions import serializable_unsigned_transaction_from_dict, encode_transaction
from web3 import Web3, HTTPProvider
def get_signature_prefix( signature_rs, address, transaction_hash, chainId, web3 ):
try:
r, s = signature_rs
except:
print( "Invalid signature argument!" )
raise SystemExit()
v = chainId * 2 + 35
if web3.eth.account._recover_hash( bytes( transaction_hash ), vrs=( v, r, s ) ) != address:
v = chainId * 2 + 36
|
python | size = np.array([ceil((1 - percentage) * im_size1), im_size2], dtype = np.int32)
w_start = 0
w_end = im_size2
h_start = 0
h_end = int(ceil((1 - percentage) * im_size1))
else: # Translate bottom 20 percent
offset = np.array([-percentage, 0.0], dtype = np.float32)
size = np.array([ceil((1 - percentage) * im_size1), im_size2], dtype = np.int32)
w_start = 0
w_end = im_size2
h_start = int(floor((percentage) * im_size1))
h_end = im_size1
return offset, size, w_start, w_end, h_start, h_end |
python | episode = re.search('Episode-([0-9]*)', text).group(1)
season = re.search('Season-([0-9]*)', text).group(1)
file_name = 'Coffee_Break_Spanish_S' + season + '_E' + episode
print(file_name) |
python | # If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
from pathlib import Path
import sys
src_dir = Path("../../src").absolute()
sys.path.insert(0, str(src_dir))
# -- Project information -----------------------------------------------------
project = "ixdat"
copyright = "2020, the ixdat team"
author = "ixdat team" |
python | socket_listener_thread = threading.Thread(target=listener, args=(client_sock,))
socket_listener_thread.daemon = True
socket_listener_thread.start()
#Starts connection thread
listen_thread = threading.Thread(target=listen_for_connections)
listen_thread.daemon = True
listen_thread.start()
root.mainloop()
|
python | help='max learning rate, must be more than args.lr')
parser.add_argument('--lr-period-updates', default=5000, type=float, metavar='LR',
help='initial number of updates per period (cycle length)')
parser.add_argument('--shrink-min', action='store_true',
help='if set, also shrinks min lr') |
python |
# load weights for transfer learning
if self.transfer_learning:
model.load_weights(load_weight_file_path(self.transfer_model_name))
# for outer_loop in tqdm(range(outer_training_steps)):
for steps in range(self.training_steps):
# fetch training data |
python | name: str
# Fields used in dependencies MUST be declared with `field`
credit_card: NotNull[int] = field(default=None)
billing_address: NotNull[str] = field(default=None)
dependencies = dependent_required({credit_card: [billing_address]})
# it can also be done outside the class with
# dependent_required({"credit_card": ["billing_address"]}, owner=Billing)
|
python | self: ivy.Container,
low: Union[float, ivy.Container] = 0.0,
high: Union[float, ivy.Container] = 1.0,
device: Optional[Union[ivy.Device, ivy.NativeDevice, ivy.Container]] = None,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False, |
python |
agent = NomadAgent()
agent.deploy_flow(
flow_run=GraphQLResult(
{
"flow": GraphQLResult(
{ |
python | ims['query'] = str(ims['b_resolved_monster_id'])
ims['resolved_monster_id'] = None
id_view_state = await IdViewState.deserialize(dgcog, user_config, ims)
id_control = TransformInfoMenu.id_control(id_view_state)
return id_control
@staticmethod
async def respond_with_transform(message: Optional[Message], ims, **data):
dgcog = data['dgcog']
user_config = data['user_config']
|
python | import os
for folder, subfolders, files in os.walk(os.getcwd()):
for file in files:
filepath = os.path.join(os.path.abspath(folder), file)
print(filepath)
|
python | >>> height(t)
1
>>> t = descendants_from_list(Tree(13), [0, 1, 3, 5, 7, 9, 11, 13], 3)
>>> height(t)
3
"""
return 1 + max([height(c) for c in t.children]) if t.children else 1
def count(t): |
python | <gh_stars>0
import math
def main(inputstring):
return str(eval(inputstring))
|
python | from django.http import HttpResponse
from easy_pdf.views import PDFTemplateView
from .models import Producto
from django.views.generic import ListView
from django.db.models import Count, Q
|
python | class Migration(migrations.Migration):
dependencies = [
('main', '0012_job_master_job'),
]
operations = [
migrations.AddField(
model_name='client',
name='port',
field=models.PositiveIntegerField(blank=True, default=22, max_length=5, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(65535)]),
),
]
|
python | def reply(msg, options):
request = sk.Request()
request.ParseFromString(msg)
if options.wait > 0:
time.sleep(options.wait)
resp = sk.Response()
resp.result = sk.Response.RES_OK |
python | return None
new_response = {}
for key, value in buildbucket_response.iteritems(): |
python | 'labelSet': {
'instance_type': 'small'
},
'ruleset': [
{
'metric': 'request_cpu',
'value': 0.005,
'unit': 'core-hours'
},
{
'metric': 'usage_cpu',
'value': 0.015,
'unit': 'core-hours'
},
{ |
python | url(
r'(?P<swot_id>[0-9]+)/items/votes/$',
views.vote_list,
name='get'
),
url(
r'items/(?P<swot_item_id>[0-9]+)/votes/$',
views.vote,
name='post'
), |
python | valor = preco+(preco*(taxa/100))
return valor
def diminuir (preco,taxa):
valor = preco - (preco*(taxa/100))
return valor
def metade (preco = 0):
valor = preco/2
return valor
|
python | # -*- coding: utf-8 -*-
#----------------------------------------------------
def primo(n):
'''(int) -> bool
RECEBE um número inteiro n.
RETORNA True se n é primo e False em caso contrário.
'''
# remova o print() e escreva suas função a seguir
primo = True
if n <= 1: |
python | import pytest
import importlib
import os.path
import sys
def test_config_paths():
assert os.path.exists('/etc/profile.d/init_conda.sh')
assert os.path.exists('/srv/conda/.condarc')
assert os.path.exists('/srv/start')
assert os.path.exists('/srv/conda/etc/dask.yml') |
python | #!/usr/bin/env python
#-*- coding:Utf-8 -*-
import function as f
if __name__ == '__main__':
f.start_game()
|
python | yaml_in_file = open(self.template_dir + '/cassandra.yaml.template', 'r')
yaml_out_file = open(host_dir + '/cassandra.yaml', 'w+')
changes = { "LOCAL_ADDRESS":container['data_ip'],
"DATA_DIR":config.data_directory,
"CACHE_DIR":config.cache_directory, |
python | parsed_naixement = row['naixement']
try:
nou_alumne = Alumne.objects.filter(
nom = stripped_nom,
cognom1 = stripped_cognom1,
cognom2 = stripped_cognom2,
num_llista = row['id_nen'],
)[0] |
python | return (False, True)
#if self.map[int(toPoint.y)][int(toPoint.x)]==2:
# inPadding = True
points = self.model.calculateCorners(toPoint, th1, th2)
#header
right_back_wheel = Point(points[5][0], points[5][1])
left_back_wheel = Point(points[4][0], points[4][1])
right_front_wheel = Point(points[1][0], points[1][1])
left_front_wheel = Point(points[0][0], points[0][1])
left_front = Point(points[6][0], points[6][1]) |
python | 7.00000000e+00, 8.00000000e+00, 8.333333e+00])
cls.model_json = model_to_json(cls.model)
def test_model_save_and_load(self):
prophet_model = ProphetModel(self.model_json, 1, "d", "ds")
with mlflow.start_run() as run:
mlflow_prophet_log_model(prophet_model)
# Load the saved model from mlflow
run_id = run.info.run_id
prophet_model = mlflow.pyfunc.load_model(f"runs:/{run_id}/model") |
python | if event.fwd_from:
await func(event)
else:
pass
return wrapper
return decorator
def am_i_admin():
|
python | api.do_q68(answer, api)
elif answer.question.pk == 147: # business_strengths
api.do_q147(answer, api)
elif answer.question.pk == 49: # target_market_types, target_market_first_traits, target_market_second_traits
api.do_q49(answer, api)
elif answer.question.pk == 51: # customer_price_sensitivity |
python |
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
# TODO |
python |
Update a release. This can change some metadata associated with
the release (the ref, url, and dates).
:pparam string organization_slug: the slug of the organization the
release belongs to.
:pparam string project_slug: the slug of the project to change the
release of.
:pparam string version: the version identifier of the release.
:param string ref: an optional commit reference. This is useful if
a tagged version has been provided.
:param url url: a URL that points to the release. This can be the |
python | # Use of this source code is governed by a BSD-3-clause license that can be
# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
from __future__ import print_function as _
from __future__ import division as _
from __future__ import absolute_import as _ |
python | def test_init(self):
value_function = TabularQFunction(num_states=4, num_actions=2)
torch.testing.assert_allclose(value_function.table, 0)
def test_compile(self):
torch.jit.script(TabularQFunction(num_states=4, num_actions=2))
|
python |
assert dvc.stage.collect_granular("dir") == [(stage2, None)]
assert dvc.stage.collect_granular("dir", recursive=True) == [
(stage1, None)
]
remove(tmp_dir / "dir")
|
python | #!/usr/bin/env python3
from pwn import *
binary = ELF('sort_it')
binary.write(0x1208,5*b'\x90')
binary.save('sort_it_patched')
os.chmod('sort_it_patched',0o755)
|
python | def imdecode(bytes):
"""Decode byte string to float64 image in [-1.0, 1.0].
Args:
bytes: Byte string.
Returns:
A float64 image in [-1.0, 1.0]. |
python |
install_requires = [x.strip() for x in all_reqs if 'git+' not in x]
# if platform.system() != "Windows":
# install_requires.append('pygdal==' + PYGDAL_VERSION)
|
python |
K3=np.array([Vnow+K2[1]*h/2,csm(Pnow+K2[0]*h/2,Vnow+K2[1]*h/2)])
K4=np.array([Vnow+K3[1]*h,csm(Pnow+K3[0]*h,Vnow+K3[1]*h)])
Pnext=np.copy(Pnow) |
python | assert sqi.conditions[0] == ['a', SQL_OP.EQ, 'b']
sqi = SQLQueryInfo()
sqi.parse_then_add_condition('a', 'like', 'b')
assert sqi.conditions[0] == ['a', SQL_OP.LIKE, 'b']
for i in SQL_OP.ALL: |
python | ret_list = p.map(func, [group for name, group in dfGrouped])
return ret_list
def create_output_dir(directory_name):
try:
os.mkdir(directory_name)
except OSError:
print('Output folder at {} already exists'.format(args.output_folder)) |
python | outmeandict = {}
outvardict = {}
scalefactors = {}
for node in nodes:
print node
store = pd.HDFStore(TSVAULTFILE)
fcdf = store['/'.join((CATEGORY, FCNAME, node))] |
python | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, |
python | from django.apps import AppConfig
class TestVupConfig(AppConfig):
name = 'test_vup'
|
python | import unittest
import pymel.core.language as lang
class testCase_pythonToMelCmd(unittest.TestCase):
def test_bool_arg(self):
self.assertEqual(lang.pythonToMelCmd('xform', rao=1).strip(),
'xform -rao')
def test_multi_arg(self):
self.assertEqual(lang.pythonToMelCmd('xform', translation=(1,2,3)).strip(),
'xform -translation 1 2 3')
|
python | from optparse import make_option
import sys
import os
import re
naiveip_re = re.compile(r"""^(?:
(?P<addr>
(?P<ipv4>\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address
(?P<ipv6>\[[a-fA-F0-9:]+\]) | # IPv6 address
(?P<fqdn>[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN
):)?(?P<port>\d+)$""", re.X) |
python | self.assertEqual(virus_dic[STA_K][4], H)
self.assertEqual(virus_dic[STA_K][5], F)
def test_propagate_to_transportation_isolated(self):
# i0 is isolated
# Everyone stays safe
env_dic = {
IW_K: {0: 1, 1: 1, 4: 1, 5: 0},
ITI_K: {0: {0, 5}, 4: {4}, 5: {0, 5}},
IBE_K: {0: 1, 1: 1, 4: 1, 5: 1}
} |
python | from django.shortcuts import get_object_or_404
from django.views.decorators.cache import cache_page
from descriptor.models import Descriptor
from igdectk.rest.handler import *
from igdectk.rest.response import HttpResponseRest
from organisation.models import Organisation
from .base import RestOrganisationModule
class RestOrganisationType(RestOrganisationModule): |
python | Returns the checksum of a file.
Parameters
----------
pathname : Path
hash_function : callable
Hashing function used to calculate the checksum.
Returns |
python | from collections import Counter
from pprint import pprint
from tqdm import tqdm
from tools.lib.route import Route
from tools.lib.logreader import LogReader
if __name__ == "__main__":
r = Route(sys.argv[1])
cnt: Counter = Counter()
for q in tqdm(r.qlog_paths()):
lr = LogReader(q)
car_events = [m for m in lr if m.which() == 'carEvents']
for car_event in car_events: |
python | #input_state = GPIO.wait_for_edge(17, GPIO.RISING)
#print('got state', input_state)
input_state = GPIO.input(17)
if input_state == False:
print('Button Pressed')
GPIO.output(18, GPIO.HIGH if not on else GPIO.LOW)
on = not on
time.sleep(0.5)
|
python | for file in self.files.get('changelogs', []):
url = Path(self.url, '/wp-content/themes/%s/%s' % (theme, file))
resp = self.send(url=url, method="GET")
if resp.status_code == 200 and resp.content != ("" or None):
if resp.url == url:
more('Changelog: %s' % (resp.url))
break
def fpd(self, theme): |
python |
self.summary_reports=["Agatston Score","Mass Score","Volume"]
self.labelScores = dict()
self.totalScores=dict()
for sr in self.summary_reports:
self.labelScores[sr]=[]
self.totalScores[sr]=0
|
python |
def run(self):
"""
Execute salt-key
"""
import salt.key
self.parse_args()
key = salt.key.KeyCLI(self.config)
if check_user(self.config["user"]):
key.run() |
python |
image_x = 50
image_y = 50
train_y = np_utils.to_categorical(y_train)
test_y = np_utils.to_categorical(y_test)
train_y = train_y.reshape(train_y.shape[1], train_y.shape[2])
test_y = test_y.reshape(test_y.shape[1], test_y.shape[2])
x_train = x_train.reshape(x_train.shape[0], 50, 50, 1)
x_test = x_test.reshape(x_test.shape[0], 50, 50, 1)
print("x_train shape: "+ str(x_train.shape))
|
python |
# Calculate shapiro wilk p-value
from scipy.stats import shapiro
shap_w, shap_p = shapiro(y)
print(shap_p)
# Get outliers
# convert to z-scores
from scipy.stats import zscore
y_z_scores = zscore(y) # convert y into z scores |
python | <gh_stars>0
# Set a security to a delayed settlement model: settle 7 days later, at 8am.
self.Securities["IBM"].SettlementModel = DelayedSettlementModel(7, timedelta(hours = 8)) |
python | """
import numpy as np
DATA = np.array([[-1.1, 0.0, 1.1],
[2.2, 3.3, 4.4]])
# np.ndarray: DATA converted to int
result_int = ...
# np.ndarray: DATA converted to bool |
python | raise_exception = True
def get_queryset(self):
name = self.request.GET.get('name')
if name:
return OSFGroup.objects.filter(name__icontains=name)
return OSFGroup.objects.all()
def get_context_data(self, **kwargs):
query_set = kwargs.pop('object_list', self.object_list)
page_size = self.get_paginate_by(query_set) |
python |
class GsUploadManager(GsManager, AbstractTransferManager):
"""
Google cloud storage upload manager that performs either simple upload or
parallel composite upload depending on file size.
|
python |
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes: |
python | def __init__(self, dict_):
super(self.__class__, self).__init__(dict_)
@classmethod
def retrieve(cls, params=None):
if params is None:
params = dict()
url = 'acct/detail_contact_info'
response = cls(Api.call(url, params)) |
python | pass
def SetNamedPipeHandleState(*args, **kwargs): # real signature unknown
pass
def TransactNamedPipe(*args, **kwargs): # real signature unknown |
python |
for i in range(len(tokens)):
assert expected_tokens[i].type == tokens[i].type
assert expected_tokens[i].value == tokens[i].value
def test_syntax_analysis(self): |
python | if args.room.lower() not in profile.rooms:
raise Exception(f"No room could be found in the profile: {args.profile} with the name: {args.room.lower()}")
current_devices = profile.rooms[args.room.lower()]
try:
with open(os.path.join('.', 'resources', 'devices.json'), 'r') as device_file: |
python | l = l.split(":")
p[-1][2] = l[1]
l = l[0].split("]")
l = l[0].split(", ")
p[-1][1] = float(l[1]) |
python | app = phosphorus.App()
@app.add_endpoint(r'/([a-z])([0-9])')
def letter_number(match):
return phosphorus.Response(f'{match.group(1)},{match.group(2)}')
app = webtest.TestApp(app)
def test_regex_groups():
r = app.get('/d4') |
python | @param {DoRequest} translated This request translated to the given state
@param {State} state The state which is used to make the request
reversible.
@type DoRequest
''' |
python | from .models.certificate_issuer_info import CertificateIssuerInfo
from .models.certificate_issuer_info_list_response import CertificateIssuerInfoListResponse
from .models.certificate_issuer_request import CertificateIssuerRequest
from .models.certificate_issuer_update_request import CertificateIssuerUpdateRequest
from .models.certificate_issuer_verify_response import CertificateIssuerVerifyResponse
from .models.cfssl_attributes import CfsslAttributes
from .models.cfssl_auth_credentials import CfsslAuthCredentials
from .models.create_certificate_issuer_config import CreateCertificateIssuerConfig
from .models.error_object_response import ErrorObjectResponse
from .models.field_message_entry import FieldMessageEntry
from .models.global_sign_credentials import GlobalSignCredentials
# import apis into sdk package |
python | np.testing.assert_array_equal(mask2, expected2)
def test_dict_patch(self):
r"""Tests :meth:`texar.utils.dict_patch`.
"""
src_dict = { |
python | from typing import Any
from typing import Callable
from typing import Dict
from unicodedata_reader import *
class UnicodeGeneralCategoryDataCli(UnicodeDataCli):
def __init__(self):
super().__init__()
self._entries = UnicodeDataReader.default.general_category()
def _core_columns(self) -> Dict[str, Callable[[int, str], Any]]:
return {
'GC': lambda code, ch: self._entries.value(code), |
python | # 文件名:client.py
import socket # 导入 socket 模块
s = socket.socket() # 创建 socket 对象 socket.AF_INET, socket.SOCK_STREAM
# host = socket.gethostname() # 获取本地主机名
# port = 12345 # 设置端口号
|
python | from .views import index, IndexView, loginView, logoutView
urlpatterns = [
path('logout/',logoutView, name='logout'),
path('login/',loginView,name='login'),
# path('', IndexView.as_view(), name = 'index'),
path('',index,name='index'),
path('admin/', admin.site.urls), |
python |
#Setting the subplot axis title
ax_1.set(title='Intelligence')
#Plotting box plot
ax_2.boxplot(super_best['Speed'])
#Setting the subplot axis title
ax_2.set(title='Speed')
#Plotting box plot
ax_3.boxplot(super_best['Power'])
|
python | from __future__ import absolute_import
from signature import DSA
from random import randint
_SERIAL_NO_LOWER = 1 << 127
_SERIAL_NO_UPPER = (1 << 128) - 1
_SATOSHI_LOWER = 10
_SATOSHI_UPPER = 500
transaction_constants = {'title': '*** Bitcoin transaction ***',
'serial': 'Serial number: ',
'p': 'p: ',
'q': 'q: ', |
python | param_decorator = "const *"
validation_suffix = textwrap.dedent(
"""\
std::vector<nonstd::optional<{cpp_type}>> {name}_buffer;
{name}_buffer.reserve({name}_items};
|
python | obs, rew, done, _ = e.step(a)
obs_, rew_, done_, _ = e_.step(a[None])
assert np.allclose(obs, obs_, **isclose_kwargs)
assert np.isclose(rew, rew_, **isclose_kwargs)
assert done == done_
if done:
break
def test_env(envid): |
python | def testBadStrings(self):
for s in ('foo', 'bar', 'xxx', 'yyy', ''):
try:
str2bool(s)
assert False, 'Expected "%s" to produce an exception' % s |
Subsets and Splits