lang
stringclasses
10 values
seed
stringlengths
5
2.12k
python
def show_trailer(self): """ opens the movie trailer when its poster is clicked """ webbrowser.open(self.trailer_youtube_url)
python
class EnumItem(MongoModel): itemId: str = None code: str = None label: str = None parentCode: str = None replaceCode: str = None enumId: str = None
python
if mid**2 < x: left=mid+1 else: right = mid if left**2==x:return left if left**2>x:return left-1
python
class Search: def querySentence(process): request_query = process['request_query'] file = process['file'] type_file = process['type'] number_of_pages = 0 text = "" if type_file == "pdf": number_of_pages = ProcessFiles.getNumberOfPdfPages(file) elif type_file == "docx": text = ProcessFiles.readDocx(file) number_of_pages = 1
python
self.horizontalLayout.setObjectName("horizontalLayout") self.frame = QtWidgets.QFrame(self.centralwidget) self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel) self.frame.setFrameShadow(QtWidgets.QFrame.Raised) self.frame.setObjectName("frame")
python
# create a treeview with dual scrollbars self.tree = Treeview(container, selectmode="extended", height=8, show="headings") # self.tree.grid(column=_column, row=_row, \ # columnspan=_columnspan, rowspan=_rowspan, \ # padx=5,pady=5,sticky='news') vsb = Scrollbar(self.parent, orient='vertical', command=self.tree.yview) hsb = Scrollbar(self.parent, orient='horizontal', command=self.tree.xview) self.tree.configure(yscrollcommand=vsb.set, xscrollcommand=hsb.set) # vsb.grid(column=(_column + _columnspan-1), row=0, rowspan=_rowspan, sticky='nse') #, in_=container) # hsb.grid(column=0, row=(_row + _rowspan), columnspan=_columnspan, sticky='ews') #, in_=container) # container.grid_columnconfigure(0, weight=1) # container.grid_rowconfigure(0, weight=1)
python
def load(): from pickle import load filter_dict = {} filter_db = open('filter.db', 'rb') try: filter_dict = load(filter_db) return filter_dict except: return {}
python
return resp, body def shutdown_host(self, hostname): """Shutdown a host.""" resp, body = self.get("os-hosts/%s/shutdown" % str(hostname), self.headers) node = etree.fromstring(body) body = [xml_to_json(x) for x in node.getchildren()] return resp, body
python
What is the length of the shortest polymer you can produce by removing all units of exactly one type and fully reacting the result? """ import re import string from src.year2018.day05a import solve as length def solve(task: str) -> int: """Find the shortest polymer for each possible reduction.""" results = [] polymer = task.strip()
python
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from robot import model, utils from keyword import Keyword
python
"besto", "betulo", "bevelo", "bezoni", "biaso", "biblioteko", "biciklo", "bidaro", "bieno", "bifsteko", "bigamiulo",
python
<gh_stars>1-10 # -*- coding:utf-8 -*- # Author: hankcs # Date: 2019-12-06 15:31 CONLL03_EN_TRAIN = 'https://file.hankcs.com/corpus/conll03_en_iobes.zip#eng.train.tsv' CONLL03_EN_VALID = 'https://file.hankcs.com/corpus/conll03_en_iobes.zip#eng.dev.tsv' CONLL03_EN_TEST = 'https://file.hankcs.com/corpus/conll03_en_iobes.zip#eng.test.tsv'
python
from pcfg import PCFG from pcfg_logprob import LogProbPCFG class PCFG_Predictor(nn.Module): def __init__(self, feature_extractor, template_cfg): """ feature_extractor: a neural network module taking a list of tasks (each task is a list of input-outputs) and returning a tensor of shape [len(list_of_tasks), feature_extractor.output_dimensionality] template_cfg: a cfg giving the structure that will be output
python
from wagtail.core.models import Page from wagtail_headless_preview.models import HeadlessPreviewMixin class SimplePage(HeadlessPreviewMixin, Page): pass
python
) # shape: [num_chunks*B, C,D,H,W] y = super().forward(x) # shape: [num_chunks*B, C', D', H', W'] _, C1, D1, H1, W1 = y.shape y = y.reshape(num_chunks, B, C1, D1, H1, W1) return y
python
#passageiros #'policial' #'presidiario' terminal = {'descricao':'terminal', 'pessoas': ['piloto','oficial1','oficial2','chefe de serviço','comissário1','comissário2','policial','presidiario']} aviao = { 'descricao':'aviao', 'pessoas': [] }
python
# Copyright (c) 2015 <NAME> # See the file LICENSE for copying permission. from . import common from . import service_status_common from ...systems import service class ServiceStatusStderrLog(service_status_common.ServiceStatusCommonLog): # [ ? ] console-setup states = ['?']
python
from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME,
python
"src/python/BUILD": ( "protobuf_source(name='proto', source='foo.proto')\n" "python_sources(dependencies=[':proto'])" ), } ) return rule_runner
python
class User: def __init__(self, data): self._id = data['id'] self._email = data['email'] self._name = data['name'] def get_all_data(self): return { 'id': self._id, 'email': self._email, 'name': self._name }
python
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures from nova.openstack.common import rpc class CastAsCall(fixtures.Fixture):
python
return @pyre.export def body(self, document, **kwds):
python
from . import util from . import default from .package_stub import PackageStub from .archive_reader import ArchiveReader class Archive(PackageStub): def __init__(self, path): self.archive = ArchiveReader(path)
python
setattr(new_obj, key, list_rec(value)) elif isinstance(value, pr.Simple): setattr(new_obj, key, recursion(value)) return new_obj def list_rec(list_obj): copied_list = list_obj[:] # lists, tuples, strings, unicode for i, el in enumerate(copied_list): if isinstance(el, pr.Simple): copied_list[i] = recursion(el) elif isinstance(el, list): copied_list[i] = list_rec(el) return copied_list return recursion(obj)
python
from libs.ValidationError import ValidationError # Session expires in `x` seconds SESSION_EXPIRES = 3600 # 1 hr # HTTP 400 BAD_REQUEST = 400 FAILED_REQUEST = 500
python
# Write the inner skull surface as an .obj file that can be imported by # Blender. mne.write_surface(op.join(conv_dir, 'inner_skull.obj'), coords, faces, overwrite=True) # Also convert the outer skull surface. coords, faces = mne.read_surface(op.join(bem_dir, 'outer_skull.surf')) mne.write_surface(op.join(conv_dir, 'outer_skull.obj'), coords, faces, overwrite=True)
python
return dist_to_boundary, particle_zone_idx def move_particle(self, particle_indices, distance, particle_zone_idx, zstop=LARGE_DOUBLE): self.tally.flux(self.mu[particle_indices],
python
dependencies = [ ('razorpayapp', '0004_auto_20200731_2302'), ] operations = [ migrations.RemoveField( model_name='razorpayhistory', name='card_number', ), ]
python
""" helps['batchai job show'] = """ type: command short-summary: Show information about a job.
python
from config import updateProjectStatusToSubprojectMin def doDelivered(scaffold_home, cfg, prj_only="", sp_only=""): if prj_only == "": print(f"Error: `deliver` command requires specifying only one project (and optionally only one subproject)") return False # update status to DELIVERED if was FILEDTICKETS, otherwise don't prj = cfg._projects.get(prj_only, None) if not prj: print(f"{prj_only}: Project not found in config") return False ran_command = False for sp in prj._subprojects.values():
python
if deploy_to_abs: dll_folder = temp_folder() mkdir(dll_folder) else: dll_folder = "" conanfile = """from conans import ConanFile from conans.tools import save class Pkg(ConanFile): requires = "Lib/0.1@user/testing" def build(self): save("myapp.exe", "myexe") def package(self): self.copy("*")
python
desired_nodes = np.array([ [4.0, 0.0, 0.0], [0.0, 4.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 4.0], [2.0, 0.0, 2.0], [0.0, 2.0, 2.0], [0.0, 0.0, 2.0], ]) np.testing.assert_almost_equal(
python
main_lay = QtWidgets.QVBoxLayout() tab_card = MTabWidget() tab_card.addTab(MLabel("test 1"), "Current Element") tab_card.addTab(MLabel("test 2"), "Linked Assets") tab_card.addTab(MLabel("test 2"), "Hero Shots") tab_card.addTab(MLabel("test 3"), "Linked Metadata") self.tab_closable = MTabWidget() self.tab_closable.setTabsClosable(True) self.tab_closable.addTab(MLabel("test 1"), "标签一") self.tab_closable.addTab(MLabel("test 2"), "标签二") self.tab_closable.addTab(MLabel("test 3"), "标签三") self.tab_closable.tabCloseRequested.connect(self.slot_close_tab)
python
if hidden_states is not None: hidden_states_tensor = torch.from_numpy(hidden_states) predictions = self.model(data_tensor, hidden_states_tensor) else: predictions = self.model(data_tensor)
python
mode='reg', supports_masking=False, transitions=None, **kwargs): self.transitions = None super(CRF, self).__init__(**kwargs) self.output_dim = int(output_dim) self.mode = mode
python
space.create_segment((x + 6, y + 5), (x + 9, y + 5), color=pyxel.COLOR_BLACK), space.create_segment((x - 9, y - 6), (x - 13, y - 6), color=pyxel.COLOR_BLACK), space.create_segment((x - 9, y + 6), (x - 13, y + 6), color=pyxel.COLOR_BLACK), ] # Cria juntas entre as rodas e o carro w1.junction(car).pivot() w2.junction(car).pivot() w3.junction(car).pivot() w4.junction(car).pivot() r1 = w1.junction(car).fix_angle(max_bias=4) r2 = w2.junction(car).fix_angle(max_bias=4) w3.junction(car).fix_angle()
python
self.convL7_ = nn.Conv2d(256, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.actib7 = Hermite(num_pol=num_pol, planes=512) self.actib7_wts = self.actib7.get_vars() # self.bnL7_ = nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # self.act7 = nn.ReLU() self.convL8_ = nn.Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.actib8 = Hermite(num_pol=num_pol, planes=512) self.actib8_wts = self.actib8.get_vars() # self.bnL8_ = nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # self.act8 = nn.ReLU() self.convL9_ = nn.Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
python
outtweets = [[tweet.id_str, tweet.created_at, tweet.favorite_count, tweet.retweet_count, tweet.source.encode("utf-8"), tweet.full_text.encode("utf-8")] for tweet in alltweets] logger.info('Writing the ouput in a file') with open('tweets/%s_tweets.csv' % handle, 'wb') as f:
python
dataframes). """ import numpy as np import iguanas.utils as utils from iguanas.rule_generation._base_generator import _BaseGenerator from iguanas.utils.types import KoalasDataFrame, KoalasSeries from iguanas.utils.typing import KoalasDataFrameType, KoalasSeriesType,\ PandasDataFrameType from typing import Callable, List, Set, Tuple from pyspark.ml.feature import VectorAssembler
python
from fabric.api import task @task def migrate(): pass
python
c_square = c ** c linear = {} quadratic = {} offset = 0.0 vartype = dimod.BINARY for index, value in enumerate(numbers): linear[index + 1] = value * (value - c) for index1, value1 in enumerate(numbers[:-1]): for index2 in range(index1 + 1, len(numbers)): value = value1 * numbers[index2] idx = (index1 + 1, index2 + 1) quadratic[idx] = quadratic[tuple(reversed(idx))] = value
python
# %% [198. **House Robber](https://leetcode.com/problems/house-robber/) # 問題:盗みを最大化せよ。連続する2軒の両方から盗んではいけない。 # 解法:各家までで、最大は「2つ前までの最大+その家」と「1つ前までの最大」のどちらか class Solution: def rob(self, nums: List[int]) -> int: pre = cur = 0 for num in nums: pre, cur = cur, max(pre + num, cur) return cur
python
<gh_stars>1-10 from typing import Any import numpy as np import torch from PIL import Image import os.path as osp __all_img_exts__ = ['.png', '.jpeg', '.jpg', '.bmp']
python
# noinspection PyUnresolvedReferences # - Convenience pass-through from "private" sub-module. from ._defaults import DFLT_CLASSIFIER_FACTORY, DFLT_DETECTION_FACTORY from ._interface import ObjectDetector, ImageMatrixObjectDetector
python
return (int(tunnit), int(minuutit * 60)) # Perustetaan aplikaation rakenne. def serve_layout():
python
nn.BatchNorm1d(emb_dim), nn.ReLU(inplace=True), nn.Conv1d(emb_dim, 2, 1), ) self.pred_cor = nn.Sequential(
python
# in case of failure it returns a fail-over template. otherwise it returns list of tokens return super().__call__(template, fields, **kwargs) def _format_prompt( self, template=DEFAULT_PROMPT, async_prompt: tp.Optional[AsyncPrompt] = None, **kwargs ):
python
print(np.mean(ssim_noise)) print(np.mean(ssim_prev)) #### the following is grammatically correct, but for this dataset may be out of memory # ssim_noise = ssim(X_test[:, 1:] , X_hat[:, 1:], # data_range=X_hat[:, 1:].max() - X_hat[:, 1:].min(),multichannel=True)
python
lt = create_linear_transform(linear_transform, 2) assert lt is not None def test_create_linear_transform_unknown(): """Assert an error is raised if an invalid input is given.""" with pytest.raises(ValueError) as excinfo: create_linear_transform('not_a_transform', 2)
python
self.invert = -1 self.color = '黑色' if isRed: self.first = True self.invert = 1
python
fontsize=30, fontweight="bold", ) ax.legend((line_1, line_2), ("mean-price", "actual-price")) plt.xlabel("Test Date Range", fontsize=20, fontweight="bold")
python
data = {} data['sourceLanguage'] = "en" #data['targetLanguages'] = ["fr","es-mx"] data['notes']=["string"] data['metadata']={} data['partner']='' data['segmentSeparatorPattern']='string' data['noTranslationPattern']='string' client.create_bundle(common.bundleId1, data=data) bundle1_entries = {} bundle1_entries['greet']="Hello" bundle1_entries['weather']="It is snowing" client.upload_resource_entries(common.bundleId1,"en", data=bundle1_entries)
python
print(df_y_ori.shape) df_emb_ori = pd.DataFrame( df_y_ori, columns=['col_%d' % i for i in range(np.shape(df_y_ori)[1])], # index=train_data['sample'] ) # df_y_ori = df_y_ori[:6180] ##use train data ## load train data df_all = pd.read_csv("dataset/v10/df_all_new2.csv") # print(list(train_df.columns)) # print(list(extra_test_df.columns)) df_all = df_all[df_all["split_type"] == "primary_train"] print(df_all[["sample", "cancer_type"]]) print(df_emb_ori.shape)
python
print "sha1 of file contents:", hashlib.sha1(test).hexdigest() # Read data in parts. print "Reading in example data in parts:" md5_inparts = hashlib.md5() with open("data/example.xml") as f: for line in f: # Add our first string. md5_inparts.update(line) print "md5 for file read in parts:", md5_inparts.hexdigest()
python
REQ_DIE = 'DIE' # Request sent by a 'peer' to the other 'peer' to indicate that it should terminate REQ_TEST_PARENT = "I'M PARENT PROCESS" # Requests to be ignored REQ_TEST_CHILD = "I'M CHILD PROCESS" # Requests to be ignored S_PID_OFFSET = 1 # offset where the PID of the sender process is located in the message R_PID_OFFSET = S_PID_OFFSET + 1 # offset where the PID of the recipient process is located in the message # Message structure exchanged between two peers # +------------------------+ # + Request + # (String) # +------------------------+ # +------------------------+
python
n = 13 N = 20287 K = 105 k = 12 prb = hypergeom.cdf(k, N, n, K)
python
badwidget = widget.TextBox("I am a naughty widget.") config = minimal_conf_noscreen config.screens = [ libqtile.config.Screen( **{position: libqtile.bar.Bar([badwidget], 10)} ) ] manager_nospawn.start(config) testbar = manager_nospawn.c.bar[position]
python
sys.path.append("../../../rdbms") from connect import connect # connect to mysql, cnx is global to this file cnx = connect() def run_inferencing(filenames): """ run model in inferencing mode (predict) params: model file name and sample dataframe return: a file of predictions """ print(f"\ninferring using filenames: {filenames}")
python
# print 'residual = %s' % residual end_point = point_along_line(utm_line, utm_line_length - residual) # print 'end_point = %s' % (end_point,) try:
python
called. definition: Options containing additional function definition details. DESCRIPTION Registers a new function into an existing test module. The name should be a valid identifier. It should be an existing module, in the format of: testutil[.<name>]*] The function will be registered following the respective naming convention for JavaScript and Python.
python
class GradientReverse(torch.autograd.Function): scale = 1.0 @staticmethod def forward(ctx, x): return x.view_as(x)
python
copy which is still bound to the scope of the original object. """ return osmium.osm.mutable.Way(base=way, **args) def create_mutable_relation(rel, **args): """ Create a mutable relation replacing the properties given in the named parameters. Note that this function only creates a shallow copy which is still bound to the scope of the original object.
python
SENTINEL_CONSTELLATION = "Sentinel 1" SENTINEL_PROVIDER = pystac.Provider( name="ESA", roles=["producer", "processor", "licensor"], url="https://earth.esa.int/web/guest/home", ) SAFE_MANIFEST_ASSET_KEY = "safe-manifest"
python
return @abstractproperty def hash(self): return def split_transfer_kwargs(kwargs, skip=None): """ Takes keyword arguments *kwargs*, splits them into two separate dictionaries depending on their content, and returns them in a tuple. The first one will contain arguments related to file
python
return cprop return default def browse_sortkey_reverse(prop): ''' Get sorting function for directory listing based on given attribute name, with some caveats: * Directories will be first. * If *name* is given, link widget lowercase text will be used istead.
python
def test_invalid_signature_email_token(setup): u = User(email='<EMAIL>') db.session.add(u) db.session.commit() assert u.validate_email('ODJIAWIOEDJASWOD') == 'bad_signature' def test_invalid_user_email_token(setup): u = User(email='<EMAIL>') db.session.add(u)
python
"<NAME>|The spirit, the will to win, and the will to excel are the things that endure. These qualities are so much more important than the events that occur.", "<NAME>|There are very real obstacles and challenges to any course of action. And there's no need to add to them, by making up obstacles of your own. Unchain yourself from the bondage of your own thinking.", "Pythagoras|Wisdom thoroughly learned will never be forgotten.", ) # TODO: Create a dictionary (see instructions for more information) def parse_data(data): pass
python
# written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
python
import pytest from model_bakery import baker from pypro.modulos import facade from pypro.modulos.models import Modulo @pytest.fixture def modulos(db): titulos = ['antes', 'depois'] return [baker.make(Modulo, titulo=t) for t in titulos] def test_listar_modulos_ordenados(modulos): assert list(sorted(modulos, key=lambda modulo: modulo.titulo)) == facade.listar_modulos_ordenados()
python
@staticmethod def _pad_sequence(sequences, pad_symbol=0): sequence_lengths = [len(sequence) for sequence in sequences] max_len = max(sequence_lengths) for i, length in enumerate(sequence_lengths): to_add = max_len - length sequences[i] += [pad_symbol] * to_add return sequences, sequence_lengths def collate_function(self, batch): src_list, src_length_list = TVAEDataset._pad_sequence( [example[:self.limit] for example in batch], self.pad_symbol) batch = { "src": torch.LongTensor(src_list)
python
self.wait_for_and_click(second_level_element_selector) dataset_selector = first_level_element_selector.descendant(".dataset") self.wait_for_and_click(dataset_selector) self.sleep_for(self.wait_types.UX_TRANSITION) self.screenshot("multi_history_list_list_list") @selenium_test def test_copy_history(self): history_id = self.current_history_id() method = self.dataset_collection_populator.create_list_in_history( history_id, contents=["0", "1", "0", "1"] ).json self.prepare_multi_history_view(method)
python
nums = 1 count = 1 while n > nums: nums += 6 * count count += 1
python
if self.detect(): self._idle = False self.inspect() # enter the 'inspect' sequence else: # only give 'turn and turn' commands if we are not already doing that
python
import numpy as np def relu(x): return np.maximum(0,x) print(relu(10)) print(relu(-5))
python
sleep(3) ps2.all_output('on') sleep(10) while current_time <= target_time: ps_measure_check('1', current_cycle, input_current_limit, input_voltage_limit, tolerance) ps_measure_check('2', current_cycle, input_current_limit, input_voltage_limit, tolerance)
python
class cloudflare_waf_rule(terrascript.Resource): pass class cloudflare_waf_override(terrascript.Resource): pass class cloudflare_worker_route(terrascript.Resource): pass
python
input_new = parse_input.algorithm_info(P,T) #logging.info(input_new.language) logging.info("Number of Designs:") logging.info(len(input_new.language)) my_beluga = beluga_obj.beluga_obj(input_new.language,input_new.design_objective)
python
axs[0].set(title='Pupil') axs[1].set(title='PSF') axs[2].set(title='MTF') bbox_props = dict(boxstyle="rarrow", fill=None, lw=1) axs[0].text(1.385, 1.07, r'|Fourier Transform|$^2$', ha='center', va='center', bbox=bbox_props, transform=axs[0].transAxes) axs[0].text(3.15, 1.07, r'|Fourier Transform|', ha='center', va='center', bbox=bbox_props,
python
PEDESTRIAN_CELL = 1 OBSTACLE_CELL = 2 TARGET_CELL = 3 def __init__(self, cell_grid): self.targets_list = [] self.pedestrian_list = [] self.obstacles_list = [] self.grid = []
python
:param model: given model :type model: torch.nn.Module :param devices: list of available devices for model running :type devices: list :return: model in correct device :rtype: torch.nn.Module """
python
groundtruth_file = os.path.join(DATA_FOLDER, 'gt_test_{}.csv'.format(i)) with open('{}_{}.json'.format(predictions_file, i), 'r') as f: fold = json.load(f) folds.append(fold) predictions.update(fold) ids_fold, gt_fold = shared.load_id2gt(groundtruth_file) ids += ids_fold groundtruth.update(gt_fold) groundtruth_ids = set(ids) predictions_ids = set(predictions.keys())
python
if protocolNum == "06": PCAP["Protocol"] = "TCP (%s)" % (protocolNum) transportLayerHeader = packet[34:54] transportLayerHeader = unpack(">HHLLBBHHH", transportLayerHeader) PCAP["Data"] = packet[54:] PCAP["Source Port"]= transportLayerHeader[0] PCAP["Destination Port"] = transportLayerHeader[1]
python
def detect_config_error(conf_file): ''' Check grub configuration for syntax error in GRUB_CMDLINE_LINUX value. :return: Function returns True if error was detected, otherwise False. ''' with open(conf_file, 'r') as f: config = f.read() pattern = r'GRUB_CMDLINE_LINUX="[^"]+"(?!(\s*$)|(\s+GRUB))' return re.search(pattern, config) is not None
python
print("Keystone exception caught: \n[%s]" % e) sys.exit(-1) # API request wrapper object class APIRequest(): def __init__(self, keystone_client_object=None, keystone_session_object=None, tenant=None): if keystone_client_object==None or keystone_session_object==None: raise Exception("Missing Parameter: keystone_client_object cannot be 'None'") if tenant == None: raise Exception("Missing Parameter: tenant object cannot be 'None'") self.keystone_client = keystone_client_object self.auth_token = keystone_session_object.get_token() self.tid = tenant
python
id = '0', label = '0', score = 1.337, ) ], ) ], story_body = '0', story_language = '0', story_title = '0', published_at_end = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), published_at_start = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f') ) else : return RelatedStories(
python
from setuptools import setup setup( name='Py-Authorize', version='1.2.3.0',
python
from flearn.client import Client from flearn.common.strategy import AVG from flearn.common.trainer import Trainer class Gen(AVG): """ Federated Distillation via Generative Learning
python
cv2.rectangle(frame, p1, p2, (50, 220, 100), thickness=2) # draw text at a proper location btn_width = (box[2] - box[0]) / 2.0 btn_height = (box[3] - box[1]) / 2.0 font_size = min(btn_width, btn_height) * 0.6 text_len = len(text)
python
f'--{argument.replace("_", "-")}', help=f'{value.help_}' + (f' (available: {", ".join(value.available)})' if value.available else '') + f' (default: {value.default_value if value.def_val_descr is None else value.def_val_descr})', type=value.type, default=value.default_value, dest=argument, required=value.default_value is None )
python
required = ['path'] # structured help for current command, args needs to be ordered help = { 'main': "Will set the folder path to the specified path.\n" "By using this command there is no need for usage of the absolute path to the files.", 'args': collections.OrderedDict([ ('path', 'A folder path to where the user is supposed to have the file that he will work with.\n' 'WARNING: No spaces allowed. Use quotes around the path if it contains spaces.'), ]), 'examples': ['set_path D:\\Project_storage_path'] }
python
serializer_class = StationSerializer http_method_names = ['get'] def get_queryset(self): queryset = Station.objects.all() station_name = self.request.query_params.get('station_name', None) if station_name is not None: queryset = queryset.filter(station_name__icontains=station_name) queryset = queryset.order_by('station_name') return queryset class StationRouteViewSet(viewsets.ModelViewSet): serializer_class = StationRouteSerializer http_method_names = ['get']
python
for i in range(divisor): extended_keyword = extended_keyword + keyword return extended_keyword + extended_keyword[0:remainder] def filter_sentence(sentence): """ check each character of sentence with our english alphabet dictionary return filtered sentence """ sent = "" for i in sentence.lower(): if i in eng_alphabets: sent = sent + i return sent
python
else: assert False return aux, add def convert_from_mxnet(model, checkpoint_prefix, debug=False): _, mxnet_weights, mxnet_aux = mxnet.model.load_checkpoint(checkpoint_prefix, 0)
python
pd.to_datetime('2012-01-06'): pd.Series(['pending', 'pending', 'Customer Approval'], index=[0, 1, 2]), pd.to_datetime('2012-01-07'): pd.Series(['Customer Approval', 'Customer Approval', 'Customer Approval'], index=[0, 1, 2])} our_jira = Metrics(config=jira_config) expected_frame = pd.DataFrame(expected) actual_frame = our_jira.cfd(until_date=date(2012, 1, 8)) assert_frame_equal(actual_frame, expected_frame), actual_frame
python
assert normalized == "join o'reilly 's pmr" def test_mapping(tokenizer):
python
if ang == kick_ang kick(pwr) B if holding ball 5 sec send "B got ball" to C C while game_end stop game get_data send to GA wait evo (new generation)
python
# @param {integer[]} nums # @return {integer} def rob(self, nums): if len(nums) == 1: return nums[0] def rob_util(nums): res1 = res2 = 0 for n in nums:
python
t_indices = self.target_indices[task] task_samples += [t_indices] task_samples = np.concatenate(task_samples) np.random.shuffle(task_samples) self.task_samples = task_samples
python
if params['gan_critic_class_layers'] > 0: # split upper layers for internal classifier only class_enc_params = params.copy() class_enc_params['num_layers'] = params['gan_critic_class_layers'] self.class_encoder = TransformerEncoder(class_enc_params) else: self.class_encoder = None if params['gan_critic_critic_layers'] > 0: # split upper layers for critic part only when used with internal classifier
python
# not always true but usually true r0 = neutron0.state.position self.assertTrue(r[0]!=r0[0]) self.assertTrue(r[1]!=r0[1]) # always true self.assertTrue(abs(r[0]-r0[0])<=dx/2) self.assertTrue(abs(r[1]-r0[1])<=dy/2) self.assertTrue(abs(r[2])==r0[2]) self.assertEqual( tuple(n.state.velocity),