lang
stringclasses
10 values
seed
stringlengths
5
2.12k
python
@cli.command(name="list", help="List pending tasks") @click.option("--all", "show_all", is_flag=True, help="List all tasks") @db.setup() async def list_tasks(show_all: bool) -> None: rows: list[tuple[str, str, str, str, str]] = [ ("Task ID", "Name", "Arguments", "Run at", "Schedule ID") ] rows.extend( (str(task_id), name, str(arguments), run_at.isoformat(), str(from_schedule_id)) for task_id, name, arguments, run_at, from_schedule_id in await get_tasks( show_all=show_all )
python
def populate_zeta_grid(connection, grid_interval_mm): """Populate uniform grid on zeta """ cursor = connection.cursor() cursor.execute(""" INSERT INTO zeta_grid (grid_interval_mm) VALUES (?) """, (grid_interval_mm,)) cursor.execute(""" SELECT min(zeta_mm), max(zeta_mm) FROM water_level""") zeta_bounds = cursor.fetchone() cursor.executemany("""
python
from airbyte_cdk.sources.utils.schema_helpers import ResourceSchemaLoader, check_config_against_spec_or_exit from pytest import fixture from pytest import raises as pytest_raises logger = AirbyteLogger() MODULE = sys.modules[__name__] MODULE_NAME = MODULE.__name__.split(".")[0] SCHEMAS_ROOT = "/".join(os.path.abspath(MODULE.__file__).split("/")[:-1]) / Path("schemas") @fixture(autouse=True, scope="session")
python
return TokenState(row.RaiInUniswap, row.EthInUniswap, row.debt, row.collateral)
python
self.frame.group_by('colors', {'colors': self.context.agg.sum}) def test_stats_on_string_var(self): """Non-numeric aggregates error on non-numeric column""" with self.assertRaises(Exception): self.frame.group_by('colors', {'colors': self.context.agg.var}) def test_invalid_column_name(self): """Aggregate on non-existant column errors""" with self.assertRaises(Exception): self.frame.group_by( 'InvalidColumnName', {'colors': self.context.agg.var})
python
Args: predictions (distribution): a batch of distribution predictions. reduction (str): the method to aggregate the results across the batch. Can be 'none', 'mean', 'sum', 'median', 'min', or 'max'. resolution (int): the number of discretization bins, where higher resolution increases estimation accuracy but also requires more memory/compute. Returns: tensor with shape [batch_size] or []: The computed mean, when reduction is 'none' the shape is [batch_size], otherwise the shape is []. """ quantiles = _implicit_quantiles(resolution)
python
"Mc": WS, "Me": None, "No": None, "Zs": WS, "Zl": WS, "Zp": WS, "Pc": WS, # TODO: figure out if this wants to be None "Pd": WS,
python
if default: if Maker.inst().def_target: report.warn("default target set more than once.") Maker.inst().def_target = target if depends: # Add dependencies that are not already in the target's dependency
python
<gh_stars>1-10 ''' Evaluate Plays output.mp4 Video Receives evaluation of grasped (g) and notgrasped (n) from human Calculates the accuracy of Inference Reads output.csv and updates it Generates dataset suggestions based on mistakes of the learning algorithm First argument: Save suggestion dataset (0: Don't save, int: interval for frames) Software License Agreement (MIT License) Copyright (c) 2020, <NAME>. '''
python
<gh_stars>0 """Submodule for preprocessing data from particular models."""
python
sheet_name="Sheet1", header=0, engine="openpyxl", keep_default_na=False, ) # Now need to edit data and then combine the two
python
def test_can_define_new_format() -> None: def my_format_validator(value: str) -> str: if value == "valid value": return value raise ValueError() with pytest.raises(KeyError): validate_string_format("valid value", "my-format") StringFormat["my-format"] = my_format_validator assert validate_string_format("valid value", "my-format")
python
try: tb.load_data_into_table(shape, model) except: if not skip_model_not_found: raise
python
operations = [ migrations.AlterModelOptions( name='vulnerability', options={'ordering': ['-cvss_score'], 'verbose_name': 'Vulnerability', 'verbose_name_plural': 'Vulnerabilities'}, ), migrations.RemoveField( model_name='vulnerability', name='false_positive', ), migrations.RemoveField( model_name='vulnerability',
python
""" compute the probabilities of each of the tokens and then compute the aggregate probability of the top 15 tokens. return a tuple of the form: (aggregate_probability, [(probability, token), ...]) """ probs = self.get_token_probabilities(tokens) prod = reduce(operator.mul, [x for (x,y) in probs], 1) inv_prod = reduce(operator.mul, [1-x for (x,y) in probs], 1)
python
super(BingTTSValidator, self).__init__(tts) def validate_dependencies(self): pass def validate_lang(self): # TODO pass def validate_connection(self): # TODO pass
python
self.left_child = left_child self.right_child = right_child def greatest_node(root: Node): node_max_right = root.value node_max_left = root.value if root.left_child is not None: node_max_right = max(root.left_child.value, greatest_node(root.left_child)) if root.right_child is not None:
python
class CommentAdmin(admin.ModelAdmin): fieldsets = ( (None, {'fields': ('content_type', 'object_id', 'site')}), ('Content', {'fields': ('user', 'headline', 'comment')}), ('Ratings', {'fields': ('rating1', 'rating2', 'rating3', 'rating4', 'rating5', 'rating6', 'rating7', 'rating8', 'valid_rating')}), ('Meta', {'fields': ('is_public', 'is_removed', 'ip_address')}), ) list_display = ('user', 'submit_date', 'content_type', 'get_content_object') list_filter = ('submit_date',) date_hierarchy = 'submit_date'
python
Class variables are used instead of tk's widget name convention for windows for accessibility, permanence, and simplicity. """ def __init__(self, world): #initialize root self.world = world self.root = tk.Tk() self.root.geometry(WORLD_COORDINATES)
python
self.devices = {} self.dongles = [] self.forcesensors = [] for name in config_dict['dev_names']: dev_type = config_dict['dev_type'][name]
python
def _getpalette(): try: return {'palette': _Console.console.getpalette()} except console.NotSupportedError: bottle.abort(501, 'Console does not support this function') except console.CommunicationError: bottle.abort(503, 'Unable to communicate with console')
python
class LMSAuthentication(BaseAuthentication): def authenticate(self, request): secret_key = request.META.get('HTTP_X_SECRET_KEY') if secret_key is None: raise AuthenticationFailed() person = get_student_or_professor(secret_key=secret_key) if person is None: raise PermissionDenied()
python
from odoo import models, fields, api, exceptions, _ class HrAttendance(models.Model): _inherit = "hr.attendance" check_in_exception = fields.Boolean( string="Abnormal clocking in at work", readonly=True ) check_out_exception = fields.Boolean(
python
raise Exception( "Different numbers of dimensions detected while " "reading in a group of descriptor files. Inconsistency in the " "dimensions is problematic." ) else: first_file = False self.__dims = dims self.__min_vals.resize(len(self.__file_grp_reader.files_read), self.__dims) self.__max_vals.resize(len(self.__file_grp_reader.files_read), self.__dims) for index, file_name in enumerate(self.__file_grp_reader.files_read):
python
def get_basic_config(self): config = AppConfig() config.update_server_config( single_dataset__obs_names=None, single_dataset__var_names=None, ) config.update_server_config(app__flask_secret_key="secret") config.update_default_dataset_config( embeddings__names=["umap"], presentation__max_categories=100, diffexp__lfc_cutoff=0.01, ) return config def stdAsserts(self, data): """ run these each time we load the data """ self.assertIsNotNone(data)
python
file.close() # call: = operations.read_file("txt/adversary_dice_values.txt") def read_file(file_name="txt/player_dice_values.txt"): file = open(file_name, "r") content = file.read().split() file.close()
python
num_inversions = inversion_p + inversions_q + cross_inversions return C, num_inversions def _count_cross_inversions(P, Q): """ Counts the inversions across two sorted arrays. And combine the two arrays into one sorted array For all 1<= i<=len(P) and for all 1 <= j <= len(Q), if P[i] > Q[j], then (i, j) is a cross inversion Parameters
python
import osmnx as ox import numpy as np import pyvista as pv ############################################################################### # Read in the graph directly from the Open Street Map server. # address = 'Holzgerlingen DE' # graph = ox.graph_from_address(address, dist=500, network_type='drive') # pickle.dump(graph, open('/tmp/tmp.p', 'wb')) # Alternatively, use the pickeled graph included in our examples. from pyvista import examples
python
class Dealer: time_func = time.time keyboard_interrupt = False
python
def on_size(self, event): if self.context is None: return self.Layout()
python
machine.FakeFullstackMachine( self.environment.hosts[i], network['id'], tenant_uuid, self.safe_client)) for i in range(2)] for vm in vms:
python
# Determine where this drive is in the tree. if j == 0: # root element heritage = [[name, j]] elif j == heritage[len(heritage) - 1][1] + 1: # child of last element heritage.append([name, j]) elif j < heritage[len(heritage) - 1][1]: # this is an aunt, but not a root heritage = heritage[:j - 1] else:
python
for i in array: s += i return s def main(): lst = [i for i in range(100000000)] a = get_len(lst) b = get_sum(lst) cProfile.run('main()')
python
def form_valid(self, form): logout(self.request) return HttpResponseRedirect(reverse('home'))
python
n = int(input()) if n == 25: print("Christmas") elif n == 24: print("Christmas Eve") elif n == 23: print("Christmas Eve Eve") elif n == 22: print("Christmas Eve Eve Eve")
python
def __init__(self, # filename = None, # uncomment if want vehicle def in file num_vehicles, horizon ): # copy the python from the example. Vehicle = namedtuple( 'Vehicle',
python
PRINTLOG = 30 UPDATEPARSER = 31 UPDATESTATE = 32 UPDATEKEYS = 33 ADDSNAPSHOT = 34 PARSEHANDLE = 35 FETCHPARSERDATA = 36 START = 40 PAUSE = 41 PROCEED = 42 TAKESNAPSHOT = 43
python
class VideoPTI(PTI): def __init__(self, opts: RunConfig): super().__init__(opts)
python
def enable_account(self, *args, **kwargs): """Fake enable_account method to mirror the IBMQ provider factory.""" pass def disable_account(self): """Fake disable_account method to mirror the IBMQ provider factory.""" pass def save_account(self, *args, **kwargs): """Fake save_account method to mirror the IBMQ provider factory.""" pass
python
setup(name='pyxll_utils', version='1.0', author='PyXLL Ltd, Enthought Inc.', packages=find_packages(), # Provides a namespace for extension points to contribute to. This
python
def search_anime_movies(*, search_query: str, page: int) -> Dict: result = {'movies': [], 'has_next_page': False} if len(search_query) < JIKAN_SEARCH_QUERY_MIN_LENGTH: return result
python
return_scale (bool, optional): Whether to return ``w_scale`` and ``h_scale``. Default: ``False``. Returns: :obj:`np.ndarray` | tuple: The resized image (and scales). """ out_img = cv2.resize(img, size, interpolation=_INTERP_CODES[interpolation]) if return_scale: h, w = img.shape[:2] w_scale = size[0] / w
python
return ds def main(): ds = open_ds() #plt.plot(ds[('BPSK', 12)][25][0][:]) #plt.plot(ds[('BPSK', 12)][25][1][:]) #plt.show() #calc_mod_energies(ds) #calc_mod_stddev(ds) calc_mod_bias(ds)
python
self.model.add(Dropout(0.25)) self.model.add(Dense(64, activation="relu")) self.model.add(Dropout(0.25)) self.model.add(Dense(32, activation="relu")) self.model.add(Dropout(0.4))
python
EventHandler.__init__(self) self.PIN = pin GPIO.setup(self.PIN, GPIO.IN) def cleanup(self): GPIO.cleanup(self.PIN) def check(self): if GPIO.input(self.PIN): self.fire()
python
RequestHandler_T = Callable[["RequestSession"] , Awaitable[Any]] MessagePreprocessor_T = Callable[["NoneBot", "CQEvent", "PluginManager"], Awaitable[Any]] __all__ = [ 'Context_T',
python
Member, \ MemberList, \ PermissionList, \ PrincipalKind, \ Principal, \
python
break process_line = in_line.replace(' ', '') if '<<' in process_line: process_line = process_line.split('<<')[0] process_line = process_line.replace(process_type, '') found_name = process_line.split('{')[0] return found_name def find_plant_structure(plant_structures: list[PlantContent], in_package: str, in_name: str) -> PlantContent: """ Finds the structure in the structure list, if there is no match for the name of the structure then a new structure is created and added to the list. Both the name and the package are used to identify the structure, this alows for a class with the same name to exist in differnt packages
python
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for SyntaxNet lexicon."""
python
message=('%s is not a valid image file' % filename) ) runDemo = ImageIFDemo if __name__ == '__main__': demo = ImageIFDemo() mainloop()
python
} data_sources = ['GDSC', 'PDXE'] data_types = ['fpkm'] genes_filtering = 'mini' data_normalization = 'library_size' # Can be TPM, "library_size" or "log". Else will not have any influence. source = 'GDSC' target = 'PDXE' folder = '../data/' data_df = read_data(tissues=tissues, data_types=[e for e in data_types], projects=projects,
python
lon1 = nodes['Longitude'].iloc[links["From_Node"].iloc[i]] lat1 = nodes['Latitude'].iloc[links["From_Node"].iloc[i]] lon2 = nodes['Longitude'].iloc[links["To_Node"].iloc[i]] lat2 = nodes['Latitude'].iloc[links["To_Node"].iloc[i]] # convert decimal degrees to radians lon1, lat1, lon2, lat2 = map(math.radians, [lon1, lat1, lon2, lat2]) # haversine distance formula dlon = lon2 - lon1 dlat = lat2 - lat1 a = math.sin(dlat / 2) ** 2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2) ** 2 c = 2 * math.asin(math.sqrt(a)) r = 3956 # Radius of earth in miles
python
('L_EXTENDEDPRICE' , 'double'), ('L_DISCOUNT' , 'double'), ('L_TAX' , 'double'), ('L_RETURNFLAG' , 'char(1)'), ('L_LINESTATUS' , 'char(1)'), ('L_SHIPDATE' , 'int'), # date ('L_COMMITDATE' , 'int'), # date ('L_RECEIPTDATE' , 'int'), # date ('L_SHIPINSTRUCT' , 'char(25)'), ('L_SHIPMODE' , 'char(10)'),
python
self._coll = None print(self._opts) host = opts['host'] port = opts['port'] self._client = MongoClient(host, port) print(self._client) def list_databases(self): return self._client.list_database_names() def list_collections(self):
python
class Meta: model = User fields = "__all__" class FaqSerializer(serializers.ModelSerializer): class Meta: model = Faq
python
""" VPNProfile application """ title = "VPN Profiles" menu = [_("Setup"), _("VPN Profiles")] model = VPNProfile
python
return pickle.loads(r[0]) else: return r[0] return None
python
depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ###
python
from django import forms class HashForm(forms.Form): """
python
logging.getLogger("swiftclient").setLevel(logging.CRITICAL) self.logger = logging.getLogger(__name__) def upload(self, container, source, object_name): obj = SwiftUploadObject(source, object_name) with SwiftService(options=self.config) as swift: try: for result in swift.upload(container=container, objects={obj}): if not result["success"]: raise result["error"] except SwiftError as e: self.logger.error(e.value)
python
print(" ".join([str(item) for item in row]) + "\n") class TileException(Exception): pass
python
def on_moved(self,event): self.logger.debug(event) self.on_action(event.dest_path) def on_action(self,magnet): magnet_processed=str(os.path.abspath(magnet)) self.logger.info('Processing file: {0}'.format(magnet)) magnet_contents=Path(magnet).read_text() torrent_path=self.client.magnet2torrent(magnet_contents,config('torrent_blackhole',os.path.dirname(magnet))) if torrent_path is not None: magnet_processed+='.processed' else: magnet_processed+='.err' shutil.move(magnet,magnet_processed)
python
) from .models import ( Absence, AbsenceEntitlement, AbsenceType, Attendance, CostCenter, Department, DynamicAttr, Employee, HolidayCalendar, Office, ShortEmployee, Team, WorkSchedule,
python
self._frame.set_dpi(dpi) ### Interface methods ### def render(self, newpath=None, keep_open=False): """Render the entire movie and compress it into video.""" savepath = self.path if newpath is None else newpath if os.path.isfile(savepath): out('Found existing movie file!') if not input('Replace? (y/N) ').strip().lower().startswith('y'): return os.unlink(savepath)
python
class BeginScene(Scene) : def construct(self) : title = Text("Manim Homework Vol.2", font='微软雅黑', stroke_width=0) author = Text("Made by @DistinctWind", font='微软雅黑', stroke_width=0).scale(0.5) title.move_to(UP*0.5) author.move_to(DOWN*0.5) self.play(Write(title)) self.play(Write(author)) self.wait(2)
python
#### ##Now interpret the data streambuff = base64.b64decode(LW_node.findall('Array/Stream')[0].text) if specbunch.subtype_raw == 0: specbunch.subtype = 'transfer function B/A in format (Y)' specbunch.type_name = 'TF' data = np.frombuffer(streambuff, dtype='c8') specbunch.xfer = data.reshape(M, -1) elif specbunch.subtype_raw == 1: specbunch.subtype = 'transfer function A in format (Y)' specbunch.type_name = 'STF' data = np.frombuffer(streambuff, dtype='c8') specbunch.response = data.reshape(M, -1)
python
Uf = [] for gdl in modeloL.geometria.gdls: u = None for e in modeloH.elementos: u = e.solucionInterpoladaLocal(gdl[0],gdl[1]) if u: break if not u: raise Exception(gdl) u = 10 Uf.append(u) Uf = np.array(Uf).reshape([len(Uf),1]) error = lambda u,u_gt: np.abs((u-u_gt)/u_gt) errores = error(Ug,Uf)
python
dl = computeDescriptionLength(dlmode=dlmode, gtype=gtype, V=G.number_of_nodes(), W=pattern.NCount+1, kw=params['kw_new'], q=q) elif mode == 2: best_node, params = max(candidates.items(), key=lambda x: computeInterestingness( AD( pattern.ECount+x[1]['kw_surplus'], pattern.expectedEdges + x[1]['mu_w_surplus'] ),\ computeDescriptionLength( dlmode=dlmode, gtype=gtype, V=G.number_of_nodes(), W=pattern.NCount+1, kw=pattern.ECount+x[1]['kw_surplus'], q=q, kws=pattern.kws+x[1]['kws_surplus'], isSimple = isSimple ) ) ) params['mu_w_new'] = pattern.expectedEdges + params['mu_w_surplus'] params['kw_new'] = pattern.ECount + params['kw_surplus'] params['kws_new'] = pattern.kws + params['kws_surplus']
python
class SchemaURLNode(URLNode): def __init__(self, url_node): super().__init__(url_node.view_name, url_node.args, url_node.kwargs, url_node.asvar) def render(self, context): url = super().render(context) return clean_tenant_url(url)
python
srcs = [cpp] + native.glob([ "src/test/**/*.hpp" ]), copts = select({
python
return super(MyFormatter, self).convert_field(value, conversion) def vformat(self, format_string, args, kwargs): words = format_string.split() print(words) # words = [super(MyFormatter, self).vformat(x, args, kwargs) for x in # words] # words = [self.vformat(x, args, kwargs) for x in words]
python
file_name = parts[0] text = parts[1] if len(parts) == 3: text = parts[2] if not file_name.endswith('.wav'): file_name = file_name + '.wav' input_file_path = os.path.join(path, 'wavs', file_name)
python
cbar = fig.colorbar(im) # Open serial port ser = serial.Serial(port='COM4', baudrate=921600) rows = 60
python
# ---------------------------------------------------------------------------------------------------- ### # Params which can be modified by exporting environment variables. ### communication_port = int(os.environ.get('communication_port') or 11211)
python
lo, hi = 1, num while lo <= hi: mid = (lo + hi) >> 1 if mid * mid == num: return True elif mid * mid < num: lo = mid + 1
python
) return [CertificateContact._from_certificate_contacts_item(contact_item=item) for item in contacts.contact_list] @distributed_trace_async async def delete_contacts(self, **kwargs: "Any") -> List[CertificateContact]: # pylint:disable=unsubscriptable-object # disabled unsubscriptable-object because of pylint bug referenced here: # https://github.com/PyCQA/pylint/issues/2377 """Deletes the certificate contacts for the key vault. Requires the certificates/managecontacts permission. :return: The deleted contacts for the key vault. :rtype: list[~azure.keyvault.certificates.CertificateContact] :raises: :class:`~azure.core.exceptions.HttpResponseError`
python
net_dict = net.state_dict() state_dict = state_dict_total for k, v in state_dict.items():
python
super(VGG16, self).__init__(False, False) class NoBackbone(nn.Module): r""" A model with no CNN backbone for non-image data. """ def __init__(self, *args, **kwargs): super(NoBackbone, self).__init__()
python
def getprofile(self, link): self.driver.get(link) def getmessagedict(self): max=getmaxrow("message") already_messaged={} for i in range(1,max+1):
python
image_height = 7 image_padding = 3 # 3 columns of padding image_width = math.floor(len(image) / image_height) arr = bytearray(image) offset = 0 while True: display.set_image(arr, image_width, image_height, offset_x=offset, wr=True, on_level=1, padding=image_padding) display.show() time.sleep(0.5) offset = (offset + 1) % image_width
python
if args.dry_run: out = sys.stdout else: out = open(LOGFILE, "w+b") if policy == POLICY_TAG and not args.dry_run: print("[WARNING]\n>> no push, when possible call 'git push --tags origin master' <<", file=out) def tag_master(out): global VERSION, TAG if Branch() != "master": call(out, "git", "checkout", "master") call(out, "git", "pull", "--rebase") call(out, "python", "build_tag.py")
python
pattern = Struct( "PhaseID" / Int16ul, "DescriptionSize" / Int16ul, "Description" / Bytes(lambda this: this.DescriptionSize) )
python
allows us to normalize this natural variance away. ''' baseline = tree.kl_div_dirichlet_baseline( prior_weight, observation_weight, window_size, sequence_count, sample_rate) goko_divs = {} """ This is the actual object that computes the KL Divergence statistics between the samples we feed in and the new samples. Internally, it is an evidence hashmap containing categorical distributions, and a queue of paths.
python
loss_criterion = nn.CrossEntropyLoss() # Initializing the optimizer get_optimizer = getattr(self, "get_optimizer", optimizers.get_optimizer) optimizer = get_optimizer(self.model) # Initializing the learning rate schedule, if necessary if hasattr(config, 'lr_schedule'): lr_schedule = optimizers.get_lr_schedule(optimizer,
python
# SPDX-License-Identifier: MIT import itertools from decimal import Decimal, InvalidOperation from typing import Iterator, List, Sequence from pydantic import BaseModel, Extra, Field, root_validator from .utils import DatafileIterMixin, FromDictMixin, dformat
python
import json import re class WalletProviderProfile(object): def __init__(self, json_response): self.json_response = json_response
python
in each region is computed. Args: x (~chainer.Variable): Input variable. The shape is expected to be 4 dimentional: (n: batch, c: channel, h, height, w: width). rois (array): Input roi. The shape is expected to be :math:`(R, 4)`, and each datum is set as below: (y_min, x_min, y_max, x_max). The dtype is :obj:`numpy.float32`. roi_indices (array): Input roi indices. The shape is expected to
python
def __init__(self, filename1, filename2): self._filename1 = filename1 self._filename2 = filename2 pass def main(): pass
python
max_x = len(heightmap) - 1 max_y = len(heightmap[0]) - 1 if x + 1 <= max_x: aggregate_basin_coords(x + 1, y, heightmap, init=False) if y + 1 <= max_y: aggregate_basin_coords(x, y + 1, heightmap, init=False) if x - 1 >= 0: aggregate_basin_coords(x - 1, y, heightmap, init=False) if y - 1 >= 0: aggregate_basin_coords(x, y - 1, heightmap, init=False)
python
with open(os.path.join(path_for_dictionary_dir, temp_key), 'w') as temp_file: temp_file.write(str(dictionary[temp_key])) else: np.save(os.path.join(path_for_dictionary_dir, temp_key), dictionary[temp_key]) return
python
from app.tests.pgtest import gametest gametest()
python
def test_write_bytes(): path = "./tests/fixtures/bytes" write(random_bytes, path) content = io.open(path, 'rb').read() assert os.path.isfile(path) assert content == random_bytes def test_write_handle_text(): text = u"The quick brown 🦊 jumps over the lazy dog" path = "./tests/fixtures/string2.txt"
python
description = db.StringField() start_date = db.StringField() apply_date = db.StringField() duration = db.IntField() stipend = db.IntField() applicants = db.ListField(db.StringField()) status = db.StringField() problem_statement = db.StringField() solution = db.StringField() class Scores(db.Document):
python
from numpy import ndarray from collections import OrderedDict from scipy import sparse import os import sklearn # import numpy import typing # Custom import commands if any import warnings import numpy as np from sklearn.utils import check_array from sklearn.exceptions import NotFittedError from sklearn.utils.validation import check_is_fitted
python
SUB = str.maketrans("0123456789", "₀₁₂₃₄₅₆₇₈₉") SUP = str.maketrans("0123456789", "⁰¹²³⁴⁵⁶⁷⁸⁹") from os.path import dirname, abspath ROOT_PATH = dirname(dirname(__file__))
python
x_speed = 3 elif event.key == pygame.K_UP: y_speed = -3 elif event.key == pygame.K_DOWN: y_speed = 3 if event.type == pygame.KEYUP: if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT: x_speed = 0
python
print(weighted_mean_function) from numpy import average weighted_mean_numpy = average(houses_per_year['Mean Price'],weights = houses_per_year['Houses Sold']) print(weighted_mean_numpy) equal = round(weighted_mean_function, 10) == round(weighted_mean_numpy, 10)
python
scheme, netloc, path, params, query, fragment = _safe_ParseResult( parse_url(url), encoding=encoding) except UnicodeEncodeError as e: scheme, netloc, path, params, query, fragment = _safe_ParseResult( parse_url(url), encoding='utf8')
python
#bytes = ' '.join(bytes) #return bytes def asm(self, offset, instructions, delim=';'): ''' :result
python
artist_url = artist["perma_url"] artist_token = extract_token(artist_url) artist_image_url = artist["image"] artist = Artist(artist_name, artist_token, artist_image_url, raw_json=artist) artists.append(artist) return artists