blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1e21179f0570aa693579e6491ffaf4b3e2c88bff
|
b844c72c394b13d9ed4f73222a934f962d6ff187
|
/src/structures/program.py
|
f7b7a6f7418d7d3bc1faf0ca35d1146a66d15fcf
|
[] |
no_license
|
curtisbright/sagesat
|
b9b4c9180c75ce8574217058ffa4e121163ccf36
|
8fe52609ab6479d9b98a1e6cf2199a4f12c27777
|
refs/heads/master
| 2021-01-01T17:52:01.288449 | 2015-08-19T18:14:26 | 2015-08-19T18:14:26 | 41,425,883 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 401 |
py
|
'''
Created on Oct 17, 2014
@author: ezulkosk
'''
class Program(object):
'''
classdocs
'''
def __init__(self, ast):
'''
Constructors
'''
self.ast = ast
self.bools = { }
self.graphs = { }
def toStr(self, indent):
res = "Program\n"
for i in self.ast:
res += i.toStr(1) + "\n"
return res
|
[
"[email protected]"
] | |
5f1c39b7f1a0f726f7dcc29c7018a74e5f080035
|
609eb72e6f9fefe18ebe806c2aed24bb5b0562c1
|
/apps/invoices/models.py
|
f0acc13e1d3d4a681f55e66650461091b02f2bd6
|
[
"MIT"
] |
permissive
|
PocketGM/django-htk
|
68b0f780e9f748932e857bf66f3e0ffdf9fb2fa2
|
371ce2c68bc825df174e11d0f6f4c489a8184d9f
|
refs/heads/master
| 2020-12-27T15:26:31.946007 | 2014-12-12T10:45:45 | 2014-12-12T10:45:45 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,266 |
py
|
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from htk.apps.invoices.constants import *
from htk.apps.invoices.utils import compute_invoice_code
from htk.fields import CurrencyField
from htk.utils.enums import enum_to_str
class BaseInvoice(models.Model):
customer = models.ForeignKey(settings.HTK_INVOICE_CUSTOMER_MODEL, related_name='invoices')
date = models.DateField()
notes = models.TextField(max_length=256, blank=True)
invoice_type = models.PositiveIntegerField(default=HTK_INVOICE_DEFAULT_TYPE.value)
paid = models.BooleanField(default=False)
payment_terms = models.PositiveIntegerField(default=HTK_INVOICE_DEFAULT_PAYMENT_TERM.value)
class Meta:
abstract = True
def __unicode__(self):
value = 'Invoice #%s' % self.id
return value
def get_encoded_id(self):
invoice_code = compute_invoice_code(self)
return invoice_code
def get_url(self):
url = reverse('invoices_invoice', args=(self.get_encoded_id(),))
return url
def get_total(self):
line_items = self.line_items.all()
subtotal = 0
for line_item in line_items:
subtotal += line_item.get_amount()
return subtotal
def get_invoice_type(self):
from htk.apps.invoices.enums import InvoiceType
invoice_type = InvoiceType(self.invoice_type)
return invoice_type
def get_payment_terms(self):
from htk.apps.invoices.enums import InvoicePaymentTerm
invoice_payment_term = InvoicePaymentTerm(self.payment_terms)
str_value = enum_to_str(invoice_payment_term)
return str_value
class BaseInvoiceLineItem(models.Model):
invoice = models.ForeignKey(settings.HTK_INVOICE_MODEL, related_name='line_items')
name = models.CharField(max_length=64)
description = models.TextField(max_length=256)
unit_cost = CurrencyField(default=0)
quantity = models.PositiveIntegerField(default=1)
class Meta:
abstract = True
def __unicode__(self):
value = 'Line Item for Invoice #%s' % self.invoice.id
return value
def get_amount(self):
amount = self.unit_cost * self.quantity
return amount
|
[
"[email protected]"
] | |
4bd8dd1ae4d0a490aba2aeb6656b3246b7aa3b32
|
9d278285f2bc899ac93ec887b1c31880ed39bf56
|
/ondoc/doctor/migrations/0225_merge_20190314_1713.py
|
b3a7b682746a743fbde3d1c7d89eac3c2c5f43b0
|
[] |
no_license
|
ronit29/docprime
|
945c21f8787387b99e4916cb3ba1618bc2a85034
|
60d4caf6c52a8b70174a1f654bc792d825ba1054
|
refs/heads/master
| 2023-04-01T14:54:10.811765 | 2020-04-07T18:57:34 | 2020-04-07T18:57:34 | 353,953,576 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 286 |
py
|
# Generated by Django 2.0.5 on 2019-03-14 11:43
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('doctor', '0224_auto_20190314_1709'),
('doctor', '0223_providersignuplead_matrix_lead_id'),
]
operations = [
]
|
[
"[email protected]"
] | |
c35aa7b06fb0de485363edc1da75caeecd3bf974
|
9dba277eeb0d5e9d2ac75e2e17ab5b5eda100612
|
/exercises/1901050046/d11/mymodule/main.py
|
3607d2c0e686c773d0b1336d3c3af49404d9e679
|
[] |
no_license
|
shen-huang/selfteaching-python-camp
|
e8410bfc06eca24ee2866c5d890fd063e9d4be89
|
459f90c9f09bd3a3df9e776fc64dfd64ac65f976
|
refs/heads/master
| 2022-05-02T05:39:08.932008 | 2022-03-17T07:56:30 | 2022-03-17T07:56:30 | 201,287,222 | 9 | 6 | null | 2019-08-08T15:34:26 | 2019-08-08T15:34:25 | null |
UTF-8
|
Python
| false | false | 867 |
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import yagmail
import getpass
import requests
import stats_word
from pyquery import PyQuery
#提取文章正文
r = requests.get('https://mp.weixin.qq.com/s/pLmuGoc4bZrMNl7MSoWgiA')
document = PyQuery(r.text)
content = document('#js_content').text()
#统计词频
result = stats_word.stats_text_cn(content,100,len_size = 2)
result_str = ""
for i in result:
result_str += str(i)
print("统计结果为:", result_str)
#配置邮箱
sender = input("输入发件人邮箱:")
password = getpass.getpass('输入发件人邮箱密码:')
recipients = input('输入收件人邮箱:')
#链接邮箱服务器
yag = yagmail.SMTP( user= sender, password=password, host='smtp.sina.cn')
# 邮箱正文
contents = result_str
# 发送邮件
yag.send(recipients, '自学训练营学习4群 DAY11 sixthspace', contents)
|
[
"[email protected]"
] | |
47e2512f693b8d7dae3919a19c1129913658adac
|
2aace9bb170363e181eb7520e93def25f38dbe5c
|
/build/idea-sandbox/system/python_stubs/-57053121/scipy/stats/mvn.py
|
4839dbc5f44d33ff30f8a051e8b11af29844004e
|
[] |
no_license
|
qkpqkp/PlagCheck
|
13cb66fd2b2caa2451690bb72a2634bdaa07f1e6
|
d229904674a5a6e46738179c7494488ca930045e
|
refs/heads/master
| 2023-05-28T15:06:08.723143 | 2021-06-09T05:36:34 | 2021-06-09T05:36:34 | 375,235,940 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,653 |
py
|
# encoding: utf-8
# module scipy.stats.mvn
# from C:\Users\Doly\Anaconda3\lib\site-packages\scipy\stats\mvn.cp37-win_amd64.pyd
# by generator 1.147
"""
This module 'mvn' is auto-generated with f2py (version:2).
Functions:
value,inform = mvnun(lower,upper,means,covar,maxpts=d*1000,abseps=1e-06,releps=1e-06)
value,inform = mvnun_weighted(lower,upper,means,weights,covar,maxpts=d*1000,abseps=1e-06,releps=1e-06)
error,value,inform = mvndst(lower,upper,infin,correl,maxpts=2000,abseps=1e-06,releps=1e-06)
COMMON blocks:
/dkblck/ ivls
.
"""
# no imports
# Variables with simple values
__version__ = b'$Revision: $'
# functions
def dkblck(*args, **kwargs): # real signature unknown
""" 'i'-scalar """
pass
def mvndst(lower, upper, infin, correl, maxpts=None, abseps=None, releps=None): # real signature unknown; restored from __doc__
"""
error,value,inform = mvndst(lower,upper,infin,correl,[maxpts,abseps,releps])
Wrapper for ``mvndst``.
Parameters
----------
lower : input rank-1 array('d') with bounds (n)
upper : input rank-1 array('d') with bounds (n)
infin : input rank-1 array('i') with bounds (n)
correl : input rank-1 array('d') with bounds (n*(n-1)/2)
Other Parameters
----------------
maxpts : input int, optional
Default: 2000
abseps : input float, optional
Default: 1e-06
releps : input float, optional
Default: 1e-06
Returns
-------
error : float
value : float
inform : int
"""
pass
def mvnun(lower, upper, means, covar, maxpts=None, abseps=None, releps=None): # real signature unknown; restored from __doc__
"""
value,inform = mvnun(lower,upper,means,covar,[maxpts,abseps,releps])
Wrapper for ``mvnun``.
Parameters
----------
lower : input rank-1 array('d') with bounds (d)
upper : input rank-1 array('d') with bounds (d)
means : input rank-2 array('d') with bounds (d,n)
covar : input rank-2 array('d') with bounds (d,d)
Other Parameters
----------------
maxpts : input int, optional
Default: d*1000
abseps : input float, optional
Default: 1e-06
releps : input float, optional
Default: 1e-06
Returns
-------
value : float
inform : int
"""
pass
def mvnun_weighted(lower, upper, means, weights, covar, maxpts=None, abseps=None, releps=None): # real signature unknown; restored from __doc__
"""
value,inform = mvnun_weighted(lower,upper,means,weights,covar,[maxpts,abseps,releps])
Wrapper for ``mvnun_weighted``.
Parameters
----------
lower : input rank-1 array('d') with bounds (d)
upper : input rank-1 array('d') with bounds (d)
means : input rank-2 array('d') with bounds (d,n)
weights : input rank-1 array('d') with bounds (n)
covar : input rank-2 array('d') with bounds (d,d)
Other Parameters
----------------
maxpts : input int, optional
Default: d*1000
abseps : input float, optional
Default: 1e-06
releps : input float, optional
Default: 1e-06
Returns
-------
value : float
inform : int
"""
pass
# no classes
# variables with complex values
__loader__ = None # (!) real value is '<_frozen_importlib_external.ExtensionFileLoader object at 0x000001CB57F39940>'
__spec__ = None # (!) real value is "ModuleSpec(name='scipy.stats.mvn', loader=<_frozen_importlib_external.ExtensionFileLoader object at 0x000001CB57F39940>, origin='C:\\\\Users\\\\Doly\\\\Anaconda3\\\\lib\\\\site-packages\\\\scipy\\\\stats\\\\mvn.cp37-win_amd64.pyd')"
|
[
"[email protected]"
] | |
3df813b7b10c86143253c4d824e3408615ad7b62
|
5141a9446464e03df639093bb75307c3a421084b
|
/sim/materials/pitch/segment_03/pitch_handlers.py
|
edc792cea7b2c4d034c339a6744fc4dab648d58e
|
[] |
no_license
|
GregoryREvans/sim
|
b32faaa4ec0288dfc03d33e27a46971c30bf6c33
|
d9be48232c41365759d551137786627cff140abc
|
refs/heads/master
| 2022-02-26T15:54:15.807251 | 2022-02-10T13:51:19 | 2022-02-10T13:51:19 | 248,852,915 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,623 |
py
|
import evans
from sim.materials.pitch.segment_03.pitches import (
voice_1_chord_1,
voice_1_chord_2,
voice_1_chord_3,
voice_1_chord_4,
voice_1_chord_5,
voice_1_chord_6,
voice_1_chord_7,
voice_1_chord_8,
voice_2_chord_1,
voice_2_chord_2,
voice_2_chord_3,
voice_2_chord_4,
voice_2_chord_5,
voice_2_chord_6,
voice_2_chord_7,
voice_2_chord_8,
voice_3_chord_1,
voice_3_chord_2,
voice_3_chord_3,
voice_3_chord_4,
voice_3_chord_5,
voice_3_chord_6,
voice_3_chord_7,
voice_3_chord_8,
voice_4_chord_1,
voice_4_chord_2,
voice_4_chord_3,
voice_4_chord_4,
voice_4_chord_5,
voice_4_chord_6,
voice_4_chord_7,
voice_4_chord_8,
)
piano_pitch_handler_one_1 = evans.PitchHandler(
pitch_list=voice_1_chord_1, forget=False, name="voice_1_chord_1"
)
piano_pitch_handler_one_2 = evans.PitchHandler(
pitch_list=voice_1_chord_2, forget=False, name="voice_1_chord_2"
)
piano_pitch_handler_one_3 = evans.PitchHandler(
pitch_list=voice_1_chord_3, forget=False, name="voice_1_chord_3"
)
piano_pitch_handler_one_4 = evans.PitchHandler(
pitch_list=voice_1_chord_4, forget=False, name="voice_1_chord_4"
)
piano_pitch_handler_one_5 = evans.PitchHandler(
pitch_list=voice_1_chord_5, forget=False, name="voice_1_chord_5"
)
piano_pitch_handler_one_6 = evans.PitchHandler(
pitch_list=voice_1_chord_6, forget=False, name="voice_1_chord_6"
)
piano_pitch_handler_one_7 = evans.PitchHandler(
pitch_list=voice_1_chord_7, forget=False, name="voice_1_chord_7"
)
piano_pitch_handler_one_8 = evans.PitchHandler(
pitch_list=voice_1_chord_8, forget=False, name="voice_1_chord_8"
)
# ##
piano_pitch_handler_two_1 = evans.PitchHandler(
pitch_list=voice_2_chord_1, forget=False, name="voice_2_chord_1"
)
piano_pitch_handler_two_2 = evans.PitchHandler(
pitch_list=voice_2_chord_2, forget=False, name="voice_2_chord_2"
)
piano_pitch_handler_two_3 = evans.PitchHandler(
pitch_list=voice_2_chord_3, forget=False, name="voice_2_chord_3"
)
piano_pitch_handler_two_4 = evans.PitchHandler(
pitch_list=voice_2_chord_4, forget=False, name="voice_2_chord_4"
)
piano_pitch_handler_two_5 = evans.PitchHandler(
pitch_list=voice_2_chord_5, forget=False, name="voice_2_chord_5"
)
piano_pitch_handler_two_6 = evans.PitchHandler(
pitch_list=voice_2_chord_6, forget=False, name="voice_2_chord_6"
)
piano_pitch_handler_two_7 = evans.PitchHandler(
pitch_list=voice_2_chord_7, forget=False, name="voice_2_chord_7"
)
piano_pitch_handler_two_8 = evans.PitchHandler(
pitch_list=voice_2_chord_8, forget=False, name="voice_2_chord_8"
)
# ##
piano_pitch_handler_three_1 = evans.PitchHandler(
pitch_list=voice_3_chord_1, forget=False, name="voice_3_chord_1"
)
piano_pitch_handler_three_2 = evans.PitchHandler(
pitch_list=voice_3_chord_2, forget=False, name="voice_3_chord_2"
)
piano_pitch_handler_three_3 = evans.PitchHandler(
pitch_list=voice_3_chord_3, forget=False, name="voice_3_chord_3"
)
piano_pitch_handler_three_4 = evans.PitchHandler(
pitch_list=voice_3_chord_4, forget=False, name="voice_3_chord_4"
)
piano_pitch_handler_three_5 = evans.PitchHandler(
pitch_list=voice_3_chord_5, forget=False, name="voice_3_chord_5"
)
piano_pitch_handler_three_6 = evans.PitchHandler(
pitch_list=voice_3_chord_6, forget=False, name="voice_3_chord_6"
)
piano_pitch_handler_three_7 = evans.PitchHandler(
pitch_list=voice_3_chord_7, forget=False, name="voice_3_chord_7"
)
piano_pitch_handler_three_8 = evans.PitchHandler(
pitch_list=voice_3_chord_8, forget=False, name="voice_3_chord_8"
)
# ##
piano_pitch_handler_four_1 = evans.PitchHandler(
pitch_list=voice_4_chord_1, forget=False, name="voice_4_chord_1"
)
piano_pitch_handler_four_2 = evans.PitchHandler(
pitch_list=voice_4_chord_2, forget=False, name="voice_4_chord_2"
)
piano_pitch_handler_four_3 = evans.PitchHandler(
pitch_list=voice_4_chord_3, forget=False, name="voice_4_chord_3"
)
piano_pitch_handler_four_4 = evans.PitchHandler(
pitch_list=voice_4_chord_4, forget=False, name="voice_4_chord_4"
)
piano_pitch_handler_four_5 = evans.PitchHandler(
pitch_list=voice_4_chord_5, forget=False, name="voice_4_chord_5"
)
piano_pitch_handler_four_6 = evans.PitchHandler(
pitch_list=voice_4_chord_6, forget=False, name="voice_4_chord_6"
)
piano_pitch_handler_four_7 = evans.PitchHandler(
pitch_list=voice_4_chord_7, forget=False, name="voice_4_chord_7"
)
piano_pitch_handler_four_8 = evans.PitchHandler(
pitch_list=voice_4_chord_8, forget=False, name="voice_4_chord_8"
)
|
[
"[email protected]"
] | |
03b959614e50f787f0f04938891a9d27ef1ec31b
|
c50e7eb190802d7849c0d0cea02fb4d2f0021777
|
/src/storage-blob-preview/azext_storage_blob_preview/tests/latest/test_storage_sas_scenarios.py
|
7fe8ebfd5f1fd7662ee08735a0b96f88a2c80fb6
|
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
Azure/azure-cli-extensions
|
c1615b19930bba7166c282918f166cd40ff6609c
|
b8c2cf97e991adf0c0a207d810316b8f4686dc29
|
refs/heads/main
| 2023-08-24T12:40:15.528432 | 2023-08-24T09:17:25 | 2023-08-24T09:17:25 | 106,580,024 | 336 | 1,226 |
MIT
| 2023-09-14T10:48:57 | 2017-10-11T16:27:31 |
Python
|
UTF-8
|
Python
| false | false | 7,018 |
py
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from datetime import datetime, timedelta
from azure.cli.testsdk import (LiveScenarioTest, ResourceGroupPreparer, StorageAccountPreparer,
JMESPathCheck, JMESPathCheckExists, NoneCheck)
from ..storage_test_util import StorageScenarioMixin
class StorageSASScenario(StorageScenarioMixin, LiveScenarioTest):
@ResourceGroupPreparer()
@StorageAccountPreparer(name_prefix='blobsas', kind='StorageV2', location='eastus2euap')
def test_storage_blob_sas_permission_scenario(self, resource_group, storage_account):
"""
Test service SAS operations.
A service SAS is secured with the storage account key. A service SAS delegates access to a resource in only
one of the Azure Storage services: Blob storage, Queue storage, Table storage, or Azure Files.
"""
expiry = (datetime.utcnow() + timedelta(hours=1)).strftime('%Y-%m-%dT%H:%MZ')
account_info = self.get_account_info(resource_group, storage_account)
container = self.create_container(account_info)
local_file = self.create_temp_file(128, full_random=False)
blob_name = self.create_random_name('blob', 16)
self.kwargs.update({
'expiry': expiry,
'account': storage_account,
'container': container,
'local_file': local_file,
'blob': blob_name
})
# ----account key----
# test sas-token for a container
sas = self.storage_cmd('storage container generate-sas -n {} --https-only --permissions dlrwt --expiry {} -otsv',
account_info, container, expiry).output.strip()
self.kwargs['container_sas'] = sas
self.cmd('storage blob upload -c {container} -f "{local_file}" -n {blob} '
'--account-name {account} --sas-token "{container_sas}"')
# test sas-token for a blob
sas = self.storage_cmd('storage blob generate-sas -c {} -n {} --https-only --permissions acdrwt --expiry {} '
'-otsv', account_info, container, blob_name, expiry).output.strip()
self.kwargs['blob_sas'] = sas
self.cmd('storage blob upload -c {container} -f "{local_file}" -n {blob} --overwrite '
'--account-name {account} --sas-token "{blob_sas}" --tags test=tag ')
self.cmd('storage blob show -c {container} -n {blob} --account-name {account} --sas-token {blob_sas}') \
.assert_with_checks(JMESPathCheck('name', blob_name),
JMESPathCheck('tagCount', 1))
self.cmd('storage blob tag list -n {} -c {} --account-name {} --sas-token "{}" '.format(blob_name,
container, storage_account, sas)).assert_with_checks(JMESPathCheck('test', 'tag'))
# ----connection string----
connection_str = self.cmd('storage account show-connection-string -n {account} --query connectionString '
'-otsv').output.strip()
self.kwargs['con_str'] = connection_str
# test sas-token for a container
sas = self.cmd('storage container generate-sas -n {container} --https-only --permissions dlrw '
'--connection-string {con_str} --expiry {expiry} -otsv').output.strip()
self.kwargs['container_sas'] = sas
self.cmd('storage blob upload -c {container} -f "{local_file}" -n {blob} '
'--account-name {account} --sas-token "{container_sas}"')
# test sas-token for a blob
sas = self.cmd('storage blob generate-sas -c {container} -n {blob} --account-name {account} --https-only '
'--permissions acdrwt --expiry {expiry} -otsv').output.strip()
self.kwargs['blob_sas'] = sas
self.cmd('storage blob show -c {container} -n {blob} --account-name {account} --sas-token {blob_sas}') \
.assert_with_checks(JMESPathCheck('name', blob_name))
@ResourceGroupPreparer()
@StorageAccountPreparer()
def test_storage_blob_sas_permission_scenario(self, resource_group, storage_account):
"""
Test service SAS with stored access policy.
A stored access policy is defined on a resource container, which can be a blob container, table, queue,
or file share. The stored access policy can be used to manage constraints for one or more service shared
access signatures. When you associate a service SAS with a stored access policy, the SAS inherits the
constraints—the start time, expiry time, and permissions—defined for the stored access policy.
"""
expiry = (datetime.utcnow() + timedelta(hours=1)).strftime('%Y-%m-%dT%H:%MZ')
account_info = self.get_account_info(resource_group, storage_account)
container = self.create_container(account_info)
local_file = self.create_temp_file(128, full_random=False)
blob_name = self.create_random_name('blob', 16)
policy = self.create_random_name('policy', 16)
self.storage_cmd('storage container policy create -c {} -n {} --expiry {} --permissions acdlrw', account_info,
container, policy, expiry)
self.storage_cmd('storage container policy list -c {} ', account_info, container)\
.assert_with_checks(JMESPathCheckExists('{}.expiry'.format(policy)),
JMESPathCheck('{}.permission'.format(policy), 'racwdl'))
self.storage_cmd('storage container policy show -c {} -n {} ', account_info, container, policy, expiry)\
.assert_with_checks(JMESPathCheckExists('expiry'),
JMESPathCheck('permission', 'racwdl'))
sas = self.storage_cmd('storage blob generate-sas -n {} -c {} --policy-name {} -otsv ', account_info, blob_name,
container, policy).output.strip()
self.storage_cmd('storage blob upload -n {} -c {} -f "{}" --sas-token "{}" ', account_info, blob_name, container,
local_file, sas)
self.storage_cmd('storage container policy update -c {} -n {} --permissions acdlr', account_info, container,
policy)
self.storage_cmd('storage container policy show -c {} -n {} ', account_info, container, policy)\
.assert_with_checks(JMESPathCheckExists('expiry'),
JMESPathCheck('permission', 'racdl'))
self.storage_cmd('storage container policy delete -c {} -n {} ', account_info, container, policy)
self.storage_cmd('storage container policy list -c {} ', account_info, container) \
.assert_with_checks(NoneCheck())
|
[
"[email protected]"
] | |
c0aeb537a3746a1fd86d34cb9b84507d15349fce
|
527fd39d3a1555800c2c32025fdd15fd86ba6672
|
/make_Flexible_Finction/args.py
|
0c2e15b007ee07cc6e3d18abdd7e1d7b7d6597f0
|
[] |
no_license
|
rohanwarange/Python-Tutorials
|
cfd39551f7ff62bd032946976ba3820474e42405
|
53d8fb226f94d027ae7999f9678697206d37d83a
|
refs/heads/master
| 2023-06-18T10:45:36.884324 | 2021-07-07T17:44:22 | 2021-07-07T17:44:22 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 187 |
py
|
def total(a,b):
return a+b
print(total(5,5))
# *args
def total(*args):
print(args)
total=0
for num in args:
total+=num
return total
print(total(1,2,3,4,5))
|
[
"[email protected]"
] | |
4cd3d45d04bbde67e61a64cad5efbb27ea26f331
|
8a41a7f9340cfa784cb36d35dca1ecb1630e4097
|
/Programming/Python/dict_practice/Dictionaries_Ordered.py
|
daef72a4b09a83b3aa461282c1f773d366a4206e
|
[] |
no_license
|
anishst/Learn
|
02e6b6cce43cf21621d328ef0fc25168267a9a3d
|
a1aed8b78b19acdb23e20be57b67fb242e0aefc5
|
refs/heads/master
| 2022-05-13T10:17:40.293640 | 2022-03-30T12:44:21 | 2022-03-30T12:44:21 | 173,595,812 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 179 |
py
|
from collections import OrderedDict
d = OrderedDict()
d['foo'] = 1
d['bar'] = 2
d['spam'] = 3
d['groom'] = 4
for key in d:
print(key,d[key])
import json
print(json.dumps(d))
|
[
"[email protected]"
] | |
52b7b2f5dcb464cd81400a0fb5ba7962dfdc5ca5
|
a03a7935a191d63bee76fd3b85a61ee27f98904a
|
/src/visitpy/visit_utils/setup.py
|
3e5361e0abaa404122a4e29ab1228fc6a4e762b9
|
[] |
no_license
|
cchriste/visit
|
57091c4a512ab87efd17c64c7494aa4cf01b7e53
|
c72c413f571e56b52fb7221955219f11f4ba19e3
|
refs/heads/master
| 2020-04-12T06:25:27.458132 | 2015-10-12T15:41:49 | 2015-10-12T15:41:49 | 10,111,791 | 5 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,130 |
py
|
#*****************************************************************************
#
# Copyright (c) 2000 - 2015, Lawrence Livermore National Security, LLC
# Produced at the Lawrence Livermore National Laboratory
# LLNL-CODE-442911
# All rights reserved.
#
# This file is part of VisIt. For details, see https://visit.llnl.gov/. The
# full copyright notice is contained in the file COPYRIGHT located at the root
# of the VisIt distribution or at http://www.llnl.gov/visit/copyright.html.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice,
# this list of conditions and the disclaimer below.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the disclaimer (as noted below) in the
# documentation and/or other materials provided with the distribution.
# - Neither the name of the LLNS/LLNL nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL LAWRENCE LIVERMORE NATIONAL SECURITY,
# LLC, THE U.S. DEPARTMENT OF ENERGY OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
#*****************************************************************************
"""
file: setup.py
author: Cyrus Harrison <[email protected]>
distutils setup script for the 'visit_utils' module.
"""
import sys
__system_bytecode_setting = sys.dont_write_bytecode
sys.dont_write_bytecode = True
from distutils.core import setup
import sys
import setup_tests
#
# Support running tests w/ visit's cli.
#
using_visit = False
try:
# the command line string passed to cli
# will confuse distutils, so modify
# sys.argv to only have args passed after
# '-s setup.py'
args = Argv()
sys.argv = [__file__]
sys.argv.extend(args)
using_visit = True
except:
pass
setup(name='visit_utils',
version='0.1',
author = 'Cyrus Harrison',
author_email = '[email protected]',
description='VisIt Utilties Module',
package_dir = {'visit_utils':'src'},
packages=['visit_utils','visit_utils.qannote'],
cmdclass = { 'test': setup_tests.ExecuteTests})
if using_visit:
sys.exit(0)
|
[
"bonnell@18c085ea-50e0-402c-830e-de6fd14e8384"
] |
bonnell@18c085ea-50e0-402c-830e-de6fd14e8384
|
fe14c9a96e8cc5ceb988566f3f44b607c74ee60f
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03729/s464510871.py
|
1871472a58b9eb44569fe01d1ae8219a23ea2c08
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 206 |
py
|
def main():
s1,s2,s3 = map(str,input().split())
ans = False
if s1[len(s1)-1] == s2[0]:
if s2[len(s2)-1]==s3[0]:
ans = True
print("YES" if ans else "NO")
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
cc0c913331abe5f50e3e501719d521e817e06232
|
915865db25d918a4b2c3296aaa702fedf784b042
|
/experiments/amplitude/filters_and_envelope.py
|
7fc2b2a7fdaa51ea790575b8d8a11459ab693256
|
[] |
no_license
|
nikolaims/pi_nfb
|
f456484683f31986d12f659ee2a9227a51d5edf4
|
789ad0e20fac7f8d0843b5c3af834e23dcc65e33
|
refs/heads/master
| 2020-04-05T08:09:23.416703 | 2017-07-25T08:55:32 | 2017-07-25T08:55:32 | 81,811,536 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,404 |
py
|
import pylab as plt
from scipy.signal import *
from utils.data.loaders import get_ideal_signal, load_feedback, get_signal
from utils.filters import magic_filter_taps, min_phase_magic_filter
from utils.sinbase import get_base
from itertools import combinations
import numpy as np
n = 10000
n_components = 50
fs = 250
band = (8, 12)
time = np.arange(n) / fs
# load signal
signal = get_signal()[:n, 15]
# IIR filt filt
w = 0.1
gain = [0, 0, 1, 1, 0, 0]
taps = firwin2(1000, [0 , band[0]-w, band[0], band[1], band[1]+w, fs/2], gain, nyq=fs/2)
ideal = filtfilt(taps, 1, signal)
plt.plot(np.abs(ideal), 'b', alpha=0.6)
plt.plot(np.abs(hilbert(ideal)), 'b')
# fft
from scipy.fftpack import rfft, irfft, fftfreq
W = fftfreq(signal.size, d=1/fs*2)
f_signal = rfft(signal)
cut_f_signal = f_signal.copy()
cut_f_signal[(W<8) | (W>12)] = 0
cut_signal = irfft(cut_f_signal)
plt.plot(np.abs(cut_signal), 'k', alpha=0.6)
plt.plot(np.abs(hilbert(cut_signal)), 'k')
print(np.mean((np.abs(hilbert(cut_signal)) - np.abs(hilbert(ideal)))**2)/np.var(np.abs(hilbert(cut_signal))))
# fir minphase
fir_signal = lfilter(min_phase_magic_filter(), 1, signal)[28:]
plt.plot(np.abs(fir_signal), 'g', alpha=0.6)
plt.plot(np.abs(hilbert(fir_signal)), 'g')
# iir fir
fir_signal = lfilter(magic_filter_taps(), 1, signal)[28:]
plt.plot(np.abs(fir_signal), 'r', alpha=0.6)
plt.plot(np.abs(hilbert(fir_signal)), 'r')
plt.show()
|
[
"[email protected]"
] | |
df9acff7102ba7093d1df918c8721d0bccd54c52
|
89ad82bfa5bb3aa3312c815f4199c88e12345974
|
/test/test_ifc_checker_checkplan.py
|
49cf0d81acc925c49a8f75cc27b0302c48ded0e7
|
[] |
no_license
|
lucaslmmanoel/python-api-client
|
020f55251af5d86a895740d806618ba94f1863b0
|
49dbabfddb576d3b816c84d86f5c1f080f553704
|
refs/heads/master
| 2021-02-15T22:45:14.735020 | 2020-03-04T14:02:29 | 2020-03-04T14:02:29 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,110 |
py
|
# coding: utf-8
"""
BIMData API
BIMData API is a tool to interact with your models stored on BIMData’s servers. Through the API, you can manage your projects, the clouds, upload your IFC files and manage them through endpoints. # noqa: E501
The version of the OpenAPI document: v1
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import bimdata_api_client
from bimdata_api_client.models.ifc_checker_checkplan import IfcCheckerCheckplan # noqa: E501
from bimdata_api_client.rest import ApiException
class TestIfcCheckerCheckplan(unittest.TestCase):
"""IfcCheckerCheckplan unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testIfcCheckerCheckplan(self):
"""Test IfcCheckerCheckplan"""
# FIXME: construct object with mandatory attributes with example values
# model = bimdata_api_client.models.ifc_checker_checkplan.IfcCheckerCheckplan() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
01c8ccbe9e6889846c28217f8e9266445fb7c747
|
4d0eae8dc3c7dabe2ff79fd8ad035be056423ac7
|
/weixincl/weixincl/apps/weixin/urls.py
|
beb633d21134a141c05983baea470dfeeeaf34ee
|
[
"MIT"
] |
permissive
|
lionsin/weixincl
|
2cdb83e3e596c7fe168b31ed6f747715f7919024
|
e02f342b00eaea2704a34ca889903747b0fbb167
|
refs/heads/master
| 2020-11-30T00:48:33.053979 | 2019-12-26T14:16:04 | 2019-12-26T14:16:04 | 230,254,861 | 1 | 2 |
MIT
| 2019-12-26T14:54:06 | 2019-12-26T11:45:49 |
Python
|
UTF-8
|
Python
| false | false | 928 |
py
|
from django.conf.urls import url
from weixin import views
urlpatterns = [
# Articles/search?
url(r"^pcnum/search$", views.PcSearchListView.as_view()),
url(r"^collect_list$", views.AddCollectList.as_view()),
url(r"^collect_list/clear$", views.ClearCollectList.as_view()),
url(r"^collect_list/delete$", views.ClearCollectList.as_view()),
url(r"^tasks/add$", views.TaskaddAPIView.as_view()),
url(r"^tasks/list$", views.TasklistAPIView.as_view()),
url(r"^tasks/detail$", views.TaskShowDetailAPIView.as_view()),
url(r"^tasks/delete$", views.TaskdeleteAPIView.as_view()),
# 查看任务文章列表
url(r"^task_arts/list$", views.ArticleShowDetailAPIView.as_view()),
url(r"^history/add$", views.PcSearchHistoryView.as_view()),
url(r"^history$", views.PcSearchHistoryView.as_view()),
# 清空采集列表页
url(r"^history/clear$", views.HistoryClearAPIView.as_view()),
]
|
[
"="
] |
=
|
f85c12fb2141e0a77279dc13b68fe54489ab674f
|
a26554d068f564f2811e7774f5df44326dd6978b
|
/04. FUNCTIONS/04-02-07. Perfect Number.py
|
e62a4aef3a2b7574dae7bd4f64c0788746f07a0e
|
[] |
no_license
|
emma-metodieva/SoftUni_Python_Fundamentals_202009
|
300c8323308af8a7be4efe42836dd6a7866a34b0
|
5f58a2d565dc4c3bf28330888aa6a2d3b1f8125f
|
refs/heads/master
| 2023-01-31T15:28:04.360805 | 2020-12-12T20:39:17 | 2020-12-12T20:39:17 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 437 |
py
|
# 04-02. FUNCTIONS [Exercise]
# 07. Perfect Number
def perfect_number(number):
perfect = False
sum_divisors = 0
if number > 0:
for i in range(1, number):
if number % i == 0:
sum_divisors += i
if sum_divisors == number:
perfect = True
return perfect
if perfect_number(int(input())):
print('We have a perfect number!')
else:
print('It\'s not so perfect.')
|
[
"[email protected]"
] | |
4d8545ec4c2396a029e4df3f841a6a2014442e80
|
4edbeb3e2d3263897810a358d8c95854a468c3ca
|
/python3/urllib/urllib1.py
|
0a567f305976d5ebce0f80d94faea4c0b095cdf5
|
[
"MIT"
] |
permissive
|
jtraver/dev
|
f505d15d45b67a59d11306cc7252114c265f388b
|
2197e3443c7619b856470558b737d85fe1f77a5a
|
refs/heads/master
| 2023-08-06T02:17:58.601861 | 2023-08-01T16:58:44 | 2023-08-01T16:58:44 | 14,509,952 | 0 | 1 |
MIT
| 2020-10-14T18:32:48 | 2013-11-19T00:51:19 |
Python
|
UTF-8
|
Python
| false | false | 479 |
py
|
#!/usr/bin/env python3
#!/usr/bin/python
import urllib.request, urllib.parse, urllib.error
def main():
# url = 'https://screener.finance.yahoo.com/stocks.html'
url = 'https://screener.finance.yahoo.com/b?sc=&im=&prmin=0&prmax=&mcmin=&mcmax=&dvymin=0&dvymax=&betamin=&betamax=&remin=&remax=&pmmin=&pmmax=&pemin=&pemax=&pbmin=&pbmax=&psmin=&psmax=&pegmin=&pegmax=&gr=&grfy=&ar=&vw=1&db=stocks'
html = urllib.request.urlopen(url)
print("%s" % html.read())
main()
|
[
"[email protected]"
] | |
e1450556c70c9b5607a7ff05bc84adab3fea1d72
|
732b0b3e2ae0e6c498cfd2ed893de60b9fc22a32
|
/tests/integration/actions/collections/test_direct_interactive_ee.py
|
efb5c28f74c6641d5927e5003b6c20613e8003f6
|
[
"Apache-2.0"
] |
permissive
|
didib/ansible-navigator
|
eb7b77c1df30b2e90b663383f0f76b6224e92c02
|
62fdbd05f25fb2d79133b3ab207f53ac2f2d6d36
|
refs/heads/main
| 2023-08-30T06:43:42.876079 | 2021-10-14T18:42:17 | 2021-10-14T18:42:17 | 425,540,819 | 0 | 0 |
Apache-2.0
| 2021-11-07T15:27:54 | 2021-11-07T15:27:53 | null |
UTF-8
|
Python
| false | false | 1,161 |
py
|
""" collections direct from cli interactive with ee
"""
import pytest
from .base import BaseClass
CLI = "ansible-navigator collections --execution-environment true"
testdata = [
(0, CLI, "ansible-navigator collections browse window"),
(1, ":0", "Browse testorg.coll_1 plugins window"),
(2, ":0", "lookup_1 plugin docs window"),
(3, ":back", "Back to browse testorg.coll_1 plugins window"),
(4, ":1", "mod_1 plugin docs window"),
(5, ":back", "Back to browse testorg.coll_1 plugins window"),
(6, ":back", "Back to ansible-navigator collections browse window"),
(7, ":1", "Browse testorg.coll_2 plugins window"),
(8, ":0", "lookup_2 plugin docs window"),
(9, ":back", "Back to browse testorg.coll_2 plugins window"),
(10, ":1", "mod_2 plugin docs window"),
(11, ":back", "Back to browse testorg.coll_2 plugins window"),
(12, ":back", "Back to ansible-navigator collections browse window"),
]
@pytest.mark.parametrize("index, user_input, comment", testdata)
class Test(BaseClass):
"""run the tests"""
TEST_FOR_MODE = "interactive"
EXECUTION_ENVIRONMENT_TEST = True
UPDATE_FIXTURES = False
|
[
"[email protected]"
] | |
ff2b2b75ab60f2bcdf63d75afe4f428eebcd8f6a
|
4488e3c26de4291da447d8251c491b43cb810f7c
|
/orgstruct_partner_zip/__openerp__.py
|
a3cafaa4ab0bc9f4d686889567eae43e20dccf4d
|
[] |
no_license
|
smart-solution/odoo-crm-80
|
b19592ce6e374c9c7b0a3198498930ffb1283018
|
85dfd0cc37f81bcba24d2a0091094708a262fe2c
|
refs/heads/master
| 2016-09-06T06:04:35.191924 | 2015-07-14T12:48:28 | 2015-07-14T12:48:28 | 33,174,511 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,426 |
py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Organisation Structure Partner Zip',
'version': '1.0',
'category': 'Base',
'description': """
Manage organisation structure
""",
'author': 'Smart Solotution',
'website': 'http://www.smartsolution.be',
'depends': ['orgstruct'],
'data': [
'orgstruct_partner_zip_view.xml',
],
'installable': True,
'application': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[
"[email protected]"
] | |
3a53cb314b58082e61aced67aaaa888078c41c10
|
4349c9bea560b094c9c84540b539b612bef40953
|
/subliminal/plugins/Subtitulos.py
|
bcb26d7660b42c8945d9ef49c23251f6c806f8e1
|
[] |
no_license
|
fgel/subliminal
|
456c263603cbe5143e6b6343930222ece9c465dc
|
3cf265f6c978506d02e74c87cadd0a8e6c6419fe
|
refs/heads/master
| 2021-01-18T09:21:04.687551 | 2011-11-11T19:37:19 | 2011-11-11T19:37:19 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,288 |
py
|
# -*- coding: utf-8 -*-
#
# Subliminal - Subtitles, faster than your thoughts
# Copyright (c) 2008-2011 Patrick Dessalle <[email protected]>
# Copyright (c) 2011 Antoine Bertin <[email protected]>
#
# This file is part of Subliminal.
#
# Subliminal is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from BeautifulSoup import BeautifulSoup
import guessit
import urllib2
import unicodedata
import re
import PluginBase
from subliminal.classes import Subtitle
class Subtitulos(PluginBase.PluginBase):
site_url = 'http://www.subtitulos.es'
site_name = 'Subtitulos'
server_url = 'http://www.subtitulos.es'
api_based = False
_plugin_languages = {u'English (US)': 'en', u'English (UK)': 'en', u'English': 'en', u'French': 'fr', u'Brazilian': 'pt-br',
u'Portuguese': 'pt', u'Español (Latinoamérica)': 'es', u'Español (España)': 'es', u'Español': 'es', u'Italian': 'it',
u'Català': 'ca'}
def __init__(self, config_dict=None):
super(Subtitulos, self).__init__(self._plugin_languages, config_dict, True)
self.release_pattern = re.compile('Versión (.+) ([0-9]+).([0-9])+ megabytes')
def list(self, filepath, languages):
possible_languages = self.possible_languages(languages)
if not possible_languages:
return []
guess = guessit.guess_file_info(filepath, 'autodetect')
if guess['type'] != 'episode':
self.logger.debug(u'Not an episode')
return []
# add multiple things to the release group set
release_group = set()
if 'releaseGroup' in guess:
release_group.add(guess['releaseGroup'].lower())
else:
if 'title' in guess:
release_group.add(guess['title'].lower())
if 'screenSize' in guess:
release_group.add(guess['screenSize'].lower())
if 'series' not in guess or len(release_group) == 0:
self.logger.debug(u'Not enough information to proceed')
return []
self.release_group = release_group # used to sort results
return self.query(guess['series'], guess['season'], guess['episodeNumber'], release_group, filepath, possible_languages)
def query(self, name, season, episode, release_group, filepath, languages):
sublinks = []
searchname = name.lower().replace(' ', '-')
if isinstance(searchname, unicode):
searchname = unicodedata.normalize('NFKD', searchname).encode('ascii','ignore')
searchurl = '%s/%s/%sx%.2d' % (self.server_url, urllib2.quote(searchname), season, episode)
self.logger.debug(u'Searching in %s' % searchurl)
try:
req = urllib2.Request(searchurl, headers={'User-Agent': self.user_agent})
page = urllib2.urlopen(req, timeout=self.timeout)
except urllib2.HTTPError as inst:
self.logger.info(u'Error: %s - %s' % (searchurl, inst))
return []
except urllib2.URLError as inst:
self.logger.info(u'TimeOut: %s' % inst)
return []
soup = BeautifulSoup(page.read())
for subs in soup('div', {'id': 'version'}):
version = subs.find('p', {'class': 'title-sub'})
sub_teams = self.listTeams([self.release_pattern.search('%s' % version.contents[1]).group(1).lower()], ['.', '_', ' ', '/', '-'])
self.logger.debug(u'Team from website: %s' % sub_teams)
self.logger.debug(u'Team from file: %s' % release_group)
if not release_group.intersection(sub_teams): # On wrong team
continue
for html_language in subs.findAllNext('ul', {'class': 'sslist'}):
sub_language = self.getRevertLanguage(html_language.findNext('li', {'class': 'li-idioma'}).find('strong').contents[0].string.strip())
if not sub_language in languages: # On wrong language
continue
html_status = html_language.findNext('li', {'class': 'li-estado green'})
sub_status = html_status.contents[0].string.strip()
if not sub_status == 'Completado': # On not completed subtitles
continue
sub_link = html_status.findNext('span', {'class': 'descargar green'}).find('a')['href']
result = Subtitle(filepath, self.getSubtitlePath(filepath, sub_language), self.__class__.__name__, sub_language, sub_link, teams=sub_teams)
sublinks.append(result)
sublinks.sort(self._cmpReleaseGroup)
return sublinks
def download(self, subtitle):
self.downloadFile(subtitle.link, subtitle.path)
return subtitle
|
[
"[email protected]"
] | |
6ec582b45c915e09dd744f84899c6718fc1c86f7
|
f6c6e0ebc18b7b1a28c23367f62c960e86194c88
|
/fileIO/hdf5/backup/plot_h5.py
|
0190bfb940e43963a0d6738e3a4fb4c64a2e2b2f
|
[] |
no_license
|
TheGrim1/python_work
|
9316d6fbb71a4be9bd901f104e939949dfd91174
|
5b34277aed4c06b62276644160e0aa97a4260233
|
refs/heads/master
| 2021-01-11T13:54:54.366575 | 2019-03-12T12:38:39 | 2019-03-12T12:38:39 | 94,876,671 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,279 |
py
|
from __future__ import print_function
# home: /data/id13/inhouse2/AJ/skript/fileIO/hdf5/open_h5.py
# global imports
import h5py
import sys, os
import matplotlib.pyplot as plt
import time
# local imports
from open_h5 import open_h5
def plot_h5(data,
index=0,
title = "Title"):
dimension = len(data.shape)
print("dimension of data to be plotted is %s" % dimension)
if dimension == 3:
plt.imshow(data[index,:,:], interpolation = 'none')
elif dimension == 2:
plt.imshow(data[:,:], interpolation = 'none')
elif dimension not in (2,3):
print("invalid data for plotting \ntitle : %s\n%s" % (title, dimension))
# plt.clim(0,0.001)
plt.show()
plt.title(title)
def plotmany_h5(data,index):
ax1 = plt.subplot(1,1,1,axisbg = (0.9, 0.9, 0.95))
ax1.figure.set_size_inches(10,10)
title = "Plotting image no %s of %s"
ax1.title(title % (0, 0))
ax1.ion()
if dimension == 3:
toplot = data[index,:,:]
elif dimension == 2:
toplot = data[:,:]
elif dimension not in (2,3):
print("invalid data for plotting \ntitle : %s\n%s" % (title, dimension))
ax1.pcolor(toplot,norm=LogNorm(vmin=max(data.min(),0.0001),vmax=max(data.max(),0.01)), cmap='PuBu')
nimages = data.shape[0]
plt.show()
def main(args):
'also does the plotting'
for fname in args:
data = open_h5(fname, framelist = [529,530,532], threshold = 5000000)
# print 'the data shape is:'
# print data.shape
if data.ndim != 2:
plotmany_h5(data)
else:
plot_h5(data, title = os.path.basename(fname))
if __name__ == '__main__':
usage =""" \n1) python <thisfile.py> <arg1> <arg2> etc.
\n2) python <thisfile.py> -f <file containing args as lines>
\n3) find <*yoursearch* -> arg1 etc.> | python <thisfile.py>
"""
args = []
if len(sys.argv) > 1:
if sys.argv[1].find("-f")!= -1:
f = open(sys.argv[2])
for line in f:
args.append(line.rstrip())
else:
args=sys.argv[1:]
else:
f = sys.stdin
for line in f:
args.append(line.rstrip())
# print args
main(args)
|
[
"[email protected]"
] | |
bca28e38fcd7944d329ae326037eaa937382d563
|
032144d039ead151804d56910a4059d0fc48c374
|
/civpy/structures/node_load.py
|
f1c958aa95696c84c41c95e1f4f0d620e2bcf427
|
[
"BSD-3-Clause"
] |
permissive
|
mpewsey/civpy
|
038f5bd4a22971864cceb7c4f9568fdcca40a147
|
bbf74b1c04ca9f7604831f5280cc80d796240e67
|
refs/heads/master
| 2022-02-26T06:45:40.087975 | 2019-05-05T04:48:47 | 2019-05-05T04:48:47 | 170,751,326 | 16 | 3 |
BSD-3-Clause
| 2022-02-11T02:30:16 | 2019-02-14T20:09:32 |
Python
|
UTF-8
|
Python
| false | false | 5,178 |
py
|
"""
Copyright (c) 2019, Matt Pewsey
"""
import weakref
import numpy as np
__all__ = ['NodeLoad']
class NodeLoad(np.ndarray):
"""
A class representing a load applied to a node.
Parameters
----------
node : str
The name of the node to which the load will be applied.
fx, fy, fz : float
The applied global node forces.
mx, my, mz : float
The applied global moments.
dx, dy, dz : float
The applied node deflections.
rx, ry, rz : float
The applied node rotations.
"""
def __new__(cls, node, fx=0, fy=0, fz=0, mx=0, my=0, mz=0,
dx=0, dy=0, dz=0, rx=0, ry=0, rz=0):
obj = np.array([fx, fy, fz, mx, my, mz,
dx, dy, dz, rx, ry, rz], dtype='float').view(cls)
obj.node = node
return obj
def __array_finalize__(self, obj):
if obj is None: return
self.node = getattr(obj, 'node', '')
self.node_ref = None
def node():
def fget(self):
return self._node
def fset(self, value):
if not isinstance(value, str):
value = str(value)
self._node = value
def fdel(self):
del self._node
return locals()
node = property(**node())
def node_ref():
def fget(self):
value = self._node_ref
if value is None:
return value
return value()
def fset(self, value):
if value is not None:
value = weakref.ref(value)
self._node_ref = value
def fdel(self):
del self._node_ref
return locals()
node_ref = property(**node_ref())
def fx():
def fget(self):
return self[0]
def fset(self, value):
self[0] = value
return locals()
fx = property(**fx())
def fy():
def fget(self):
return self[1]
def fset(self, value):
self[1] = value
return locals()
fy = property(**fy())
def fz():
def fget(self):
return self[2]
def fset(self, value):
self[2] = value
return locals()
fz = property(**fz())
def mx():
def fget(self):
return self[3]
def fset(self, value):
self[3] = value
return locals()
mx = property(**mx())
def my():
def fget(self):
return self[4]
def fset(self, value):
self[4] = value
return locals()
my = property(**my())
def mz():
def fget(self):
return self[5]
def fset(self, value):
self[5] = value
return locals()
mz = property(**mz())
def dx():
def fget(self):
return self[6]
def fset(self, value):
self[6] = value
def fdel(self):
del self._dx
return locals()
dx = property(**dx())
def dy():
def fget(self):
return self[7]
def fset(self, value):
self[7] = value
def fdel(self):
del self._dy
return locals()
dy = property(**dy())
def dz():
def fget(self):
return self[8]
def fset(self, value):
self[8] = value
def fdel(self):
del self._dz
return locals()
dz = property(**dz())
def rx():
def fget(self):
return self[9]
def fset(self, value):
self[9] = value
def fdel(self):
del self._rx
return locals()
rx = property(**rx())
def ry():
def fget(self):
return self[10]
def fset(self, value):
self[10] = value
def fdel(self):
del self._ry
return locals()
ry = property(**ry())
def rz():
def fget(self):
return self[11]
def fset(self, value):
self[11] = value
def fdel(self):
del self._rz
return locals()
rz = property(**rz())
def __repr__(self):
s = [
'node={!r}'.format(self.node),
'forces={!r}'.format((self.fx, self.fy, self.fz)),
'moments={!r}'.format((self.mx, self.my, self.mz)),
'defl={!r}'.format((self.dx, self.dy, self.dz)),
'rot={!r}'.format((self.rx, self.ry, self.rz))
]
return '{}({})'.format(type(self).__name__, ', '.join(s))
def forces(self):
"""Returns the applied force and moment matrix."""
return self[:6]
def deflections(self):
"""Returns the applied deflection and rotation matrix."""
return self[6:]
def get_node(self):
"""Gets the referenced node."""
if self.node_ref is None:
raise ValueError('Node has not been set.')
return self.node_ref
def set_node(self, ndict):
"""
Sets the node reference.
Parameters
----------
ndict : dict
A dictionary mapping node names to node objects.
"""
self.node_ref = ndict[self.node]
|
[
"[email protected]"
] | |
7996b0260e16f6a5f2b53bf673c0e97d691983cd
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/39/usersdata/136/13246/submittedfiles/dec2bin.py
|
f1bd3e0804d034f78294918ab7ae45017384299e
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 144 |
py
|
n = int(input("Digite um número decimal:"))
i = 0
j = 1
d = n%2
while n>0:
d = n%2
n = n/2
i = i+(d*j)
j = j*10
print("%d"%i)
|
[
"[email protected]"
] | |
e499d64a713706ac756077b6e2c5e825b687d3c7
|
ff941fe046a593189050b6fdf77d44ade0925e8a
|
/lesson_python_basics/default_parameters2.py
|
7a862b12f45c6e47912aa620b568a03c7691b53e
|
[] |
no_license
|
hamk-webdev-intip19x6/petrikuittinen_assignments
|
c0dd02d3465ebf29f4387ab2805b12858c22110b
|
68dc154fbc571d8bc85f8eec0130b49e143c1e51
|
refs/heads/master
| 2021-11-11T04:08:45.963836 | 2021-10-04T15:47:03 | 2021-10-04T15:47:03 | 233,399,492 | 0 | 9 | null | null | null | null |
UTF-8
|
Python
| false | false | 858 |
py
|
def ask(question, choices, correct, retries=2):
i = 1
print(question)
for c in choices:
print(i, c)
i += 1
while retries>0:
try:
guess = int(input("?"))
except ValueError:
continue
if guess==correct:
print("right")
break
print("wrong guess")
retries -= 1
else:
print("the correct reply was", correct, choices[correct-1])
ask("What is the capital of Australia?", \
("London", "Sydney", "Canberra", "Victoria"), 3)
ask("When Finland gained independence?", \
("1900", "1917", "1919", "1939"), 2, 1)
ask(question="What is the chemical symbol of Iron?", \
correct=1, choices=("Fe", "R", "Ir", "I"))
ask("How to delete a variable in Python?", \
("delete", "del", "remove", "destroy"), \
retries=3, correct=2)
|
[
"pkuittinen@ubuntupetrikuittinen.xbp5jv35e4rujjrlarqjj32eqf.fx.internal.cloudapp.net"
] |
pkuittinen@ubuntupetrikuittinen.xbp5jv35e4rujjrlarqjj32eqf.fx.internal.cloudapp.net
|
1b7ef58ff701c085ead9ee493e21e53abfca2806
|
250b997d715c168315a927e28124cf24c77048c0
|
/python3基础/9.Python修炼第九层/day9预习/11 生产者消费者模型.py
|
c20f15d6206f6793cd0c499148df156b6e894097
|
[] |
no_license
|
cuitianfeng/Python
|
c78077e5dcad01ee5fe44c0aa8b61bbc2fa388cf
|
9c9f10f13311116ce0bc60ec128f765ff2ca3078
|
refs/heads/master
| 2023-01-10T23:25:57.158141 | 2020-11-17T15:39:36 | 2020-11-17T15:39:36 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,299 |
py
|
# from multiprocessing import Process,Queue
# import time,os
# def producer(q,name):
# for i in range(3):
# time.sleep(1)
# res='%s%s' %(name,i)
# q.put(res)
# print('\033[45m<%s> 生产了 [%s]\033[0m' %(os.getpid(),res))
#
#
# def consumer(q):
# while True:
# res=q.get()
# if res is None:break
# time.sleep(1.5)
# print('\033[34m<%s> 吃了 [%s]\033[0m' % (os.getpid(), res))
#
# if __name__ == '__main__':
# q=Queue()
# #生产者们:即厨师们
# p1=Process(target=producer,args=(q,'包子'))
# p2=Process(target=producer,args=(q,'饺子'))
# p3=Process(target=producer,args=(q,'馄饨'))
#
# #消费者们:即吃货们
# c1=Process(target=consumer,args=(q,))
# c2=Process(target=consumer,args=(q,))
#
# p1.start()
# p2.start()
# p3.start()
# c1.start()
# c2.start()
#
# p1.join()
# p2.join()
# p3.join()
# q.put(None)
# q.put(None)
#
# print('主')
# from multiprocessing import Process, JoinableQueue
# import time, os
#
#
# def producer(q, name):
# for i in range(3):
# time.sleep(1)
# res = '%s%s' % (name, i)
# q.put(res)
# print('\033[45m<%s> 生产了 [%s]\033[0m' % (os.getpid(), res))
# q.join()
#
# def consumer(q):
# while True:
# res = q.get()
# time.sleep(1.5)
# print('\033[34m<%s> 吃了 [%s]\033[0m' % (os.getpid(), res))
# q.task_done()
#
# if __name__ == '__main__':
# q = JoinableQueue()
#
# # 生产者们:即厨师们
# p1 = Process(target=producer, args=(q, '包子'))
# p2 = Process(target=producer, args=(q, '饺子'))
# p3 = Process(target=producer, args=(q, '馄饨'))
#
# # 消费者们:即吃货们
# c1 = Process(target=consumer, args=(q,))
# c2 = Process(target=consumer, args=(q,))
#
# c1.daemon=True
# c2.daemon=True
# p1.start()
# p2.start()
# p3.start()
# c1.start()
# c2.start()
#
#
# p1.join()
#
# print('主')
# -----------------
# from multiprocessing import Process,Queue
# import time
# import os
#
# def producer(q,name):
# for i in range(3):
# time.sleep(1)
# res='%s%s' %(name,i)
# q.put(res)
# print('\033[45m<%s> 生产了 [%s]\033[0m' %(os.getpid(),res))
#
#
# def consumer(q):
# while True:
# res=q.get()
# if res is None:break
# time.sleep(1.5)
# print('\033[34m<%s> 吃了 [%s]\033[0m' % (os.getpid(), res))
#
# if __name__ == '__main__':
# q=Queue()
# #生产者们:即厨师们
# p1=Process(target=producer,args=(q,'包子'))
# p2=Process(target=producer,args=(q,'饺子'))
# p3=Process(target=producer,args=(q,'馄饨'))
#
# #消费者们:即吃货们
# c1=Process(target=consumer,args=(q,))
# c2=Process(target=consumer,args=(q,))
#
# p1.start()
# p2.start()
# p3.start()
#
# c1.start()
# c2.start()
#
# p1.join()
# p2.join()
# p3.join()
# q.put(None)
# q.put(None)
#
# print('主')
#
from multiprocessing import Process,JoinableQueue
import time
import os
def producer(q,name):
for i in range(3):
time.sleep(1)
res='%s%s' %(name,i)
q.put(res)
print('\033[45m<%s> 生产了 [%s]\033[0m' %(os.getpid(),res))
q.join()
def consumer(q):
while True:
res=q.get()
time.sleep(1.5)
print('\033[34m<%s> 吃了 [%s]\033[0m' % (os.getpid(), res))
q.task_done()
if __name__ == '__main__':
q=JoinableQueue()
#生产者们:即厨师们
p1=Process(target=producer,args=(q,'包子'))
p2=Process(target=producer,args=(q,'饺子'))
p3=Process(target=producer,args=(q,'馄饨'))
#消费者们:即吃货们
c1=Process(target=consumer,args=(q,))
c2=Process(target=consumer,args=(q,))
c1.daemon=True
c2.daemon=True
p1.start()
p2.start()
p3.start()
c1.start()
c2.start()
p1.join()
p2.join()
p3.join()
print('主')
|
[
"[email protected]"
] | |
56e5c49696fd2ab9d295abcdc27255368e6e7461
|
ea83d172b211dad5a5d3680a537e4d2d538f42d9
|
/week2_priority_queues_and_disjoint_sets/1_make_heap/test_build_heap_cases.py
|
a7a62dbb8dc4ded4e681d2d59664d2f5a4b798b0
|
[] |
no_license
|
FluffyFu/UCSD_Algorithms_Course_2
|
9e17e696be14b70da0d221802e4fb8527aeab0aa
|
f56aeee174f89cebffe5df6abb3930bda1fd4709
|
refs/heads/master
| 2022-12-07T00:38:28.499483 | 2020-08-27T15:39:36 | 2020-08-27T15:39:36 | 285,307,911 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 561 |
py
|
from build_heap import build_heap
with open('tests/04') as f:
n = int(f.readline())
data = list(map(int, f.readline().split()))
with open('tests/04.a') as f:
n_swaps = int(f.readline())
results = []
for line in f.readlines():
results.append(tuple(map(int, line.split())))
my_results = build_heap(data)
# my_results = [(b, a) for a, b in my_results]
assert my_results == results, 'my results len: {}, truth len: {}'.format(
len(my_results), len(results))
print('my_results: ', my_results[:10])
print('truth: ', results[:10])
|
[
"[email protected]"
] | |
ebf006c8185185dddd4a84f9ef15bb8c06bb38ac
|
07622a0fb38e843ab0eef4f69bb8fb25d107c06d
|
/pretrained_mol_sim/Theano-master/theano/tests/main.py
|
97fea16ecc4d8d8763b25fdd0fb1d970cab187e3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
andreeadeac22/graph_coattention
|
fa59d77252625e4bee1cb9670e4a0fd0fec98135
|
23781fedaa942ca5614054f965cb7b6543e533fa
|
refs/heads/master
| 2023-08-08T01:51:51.368457 | 2020-02-19T04:56:59 | 2020-02-19T04:56:59 | 207,414,336 | 15 | 4 |
MIT
| 2023-07-22T15:47:39 | 2019-09-09T22:13:34 |
Python
|
UTF-8
|
Python
| false | false | 6,222 |
py
|
from __future__ import absolute_import, print_function, division
import os
import unittest
import sys
from numpy.testing.nosetester import NoseTester
# This class contains code adapted from NumPy,
# numpy/testing/nosetester.py,
# Copyright (c) 2005-2011, NumPy Developers
class TheanoNoseTester(NoseTester):
"""
Nose test runner.
This class enables running nose tests from inside Theano,
by calling theano.test().
This version is more adapted to what we want than Numpy's one.
"""
def _test_argv(self, verbose, extra_argv):
"""
Generate argv for nosetest command
:type verbose: int
:param verbose: Verbosity value for test outputs, in the range 1-10.
Default is 1.
:type extra_argv: list
:param extra_argv: List with any extra arguments to pass to nosetests.
"""
# self.package_path = os.path.abspath(self.package_path)
argv = [__file__, self.package_path]
argv += ['--verbosity', str(verbose)]
if extra_argv:
argv += extra_argv
return argv
def _show_system_info(self):
import theano
print("Theano version %s" % theano.__version__)
theano_dir = os.path.dirname(theano.__file__)
print("theano is installed in %s" % theano_dir)
super(TheanoNoseTester, self)._show_system_info()
def prepare_test_args(self, verbose=1, extra_argv=None, coverage=False,
capture=True, knownfailure=True):
"""
Prepare arguments for the `test` method.
Takes the same arguments as `test`.
"""
import nose.plugins.builtin
# compile argv
argv = self._test_argv(verbose, extra_argv)
# numpy way of doing coverage
if coverage:
argv += ['--cover-package=%s' % self.package_name,
'--with-coverage', '--cover-tests',
'--cover-inclusive', '--cover-erase']
# Capture output only if needed
if not capture:
argv += ['-s']
# construct list of plugins
plugins = []
if knownfailure:
from numpy.testing.noseclasses import KnownFailure
plugins.append(KnownFailure())
plugins += [p() for p in nose.plugins.builtin.plugins]
return argv, plugins
def test(self, verbose=1, extra_argv=None, coverage=False, capture=True,
knownfailure=True):
"""
Run tests for module using nose.
:type verbose: int
:param verbose: Verbosity value for test outputs, in the range 1-10.
Default is 1.
:type extra_argv: list
:param extra_argv: List with any extra arguments to pass to nosetests.
:type coverage: bool
:param coverage: If True, report coverage of Theano
code. Default is False.
:type capture: bool
:param capture: If True, capture the standard output of the tests, like
nosetests does in command-line. The output of failing
tests will be displayed at the end. Default is True.
:type knownfailure: bool
:param knownfailure: If True, tests raising KnownFailureTest will
not be considered Errors nor Failure, but reported as
"known failures" and treated quite like skipped tests.
Default is True.
:returns: Returns the result of running the tests as a
``nose.result.TextTestResult`` object.
"""
from nose.config import Config
from nose.plugins.manager import PluginManager
from numpy.testing.noseclasses import NumpyTestProgram
# Many Theano tests suppose device=cpu, so we need to raise an
# error if device==gpu.
if not os.path.exists('theano/__init__.py'):
try:
from theano import config
if config.device != "cpu":
raise ValueError("Theano tests must be run with device=cpu."
" This will also run GPU tests when possible.\n"
" If you want GPU-related tests to run on a"
" specific GPU device, and not the default one,"
" you should use the init_gpu_device theano flag.")
except ImportError:
pass
# cap verbosity at 3 because nose becomes *very* verbose beyond that
verbose = min(verbose, 3)
self._show_system_info()
cwd = os.getcwd()
if self.package_path in os.listdir(cwd):
# The tests give weird errors if the package to test is
# in current directory.
raise RuntimeError((
"This function does not run correctly when, at the time "
"theano was imported, the working directory was theano's "
"parent directory. You should exit your Python prompt, change "
"directory, then launch Python again, import theano, then "
"launch theano.test()."))
argv, plugins = self.prepare_test_args(verbose, extra_argv, coverage,
capture, knownfailure)
# The "plugins" keyword of NumpyTestProgram gets ignored if config is
# specified. Moreover, using "addplugins" instead can lead to strange
# errors. So, we specify the plugins in the Config as well.
cfg = Config(includeExe=True, plugins=PluginManager(plugins=plugins))
t = NumpyTestProgram(argv=argv, exit=False, config=cfg)
return t.result
def main(modulename):
if 0:
unittest.main()
elif len(sys.argv) == 2 and sys.argv[1] == "--debug":
module = __import__(modulename)
tests = unittest.TestLoader().loadTestsFromModule(module)
tests.debug()
elif len(sys.argv) == 1:
module = __import__(modulename)
tests = unittest.TestLoader().loadTestsFromModule(module)
unittest.TextTestRunner(verbosity=2).run(tests)
else:
print("options: [--debug]")
|
[
"[email protected]"
] | |
6372135fc16eea5d5a7e51601a76f80eaa89bac6
|
c61b1c1b65034215e85266db918c39946a3bafe7
|
/athletes/serializers.py
|
474ff1d5651ddb2dd3fc4d48d21a27ada56cb54d
|
[] |
no_license
|
enias-oliveira/olympicHistory
|
06cdc64f0ef06cf6b33472872539a0c57831f52c
|
e02f648d2bd127c8ae16c976fb8610005ac27604
|
refs/heads/master
| 2023-08-16T03:36:33.261214 | 2021-10-05T18:11:52 | 2021-10-05T18:11:52 | 411,797,836 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,494 |
py
|
from rest_framework import serializers
from medals.models import Medal
from medals.serializers import MedalSerializer
from games.models import Event
from games.serializers import EventSerializer
from .models import Athlete, Country
class AthleteMedalSerializer(MedalSerializer):
class Meta:
model = Medal
fields = ["id", "medal_class", "event"]
class CountrySerializer(serializers.ModelSerializer):
class Meta:
model = Country
fields = ["id", "name", "noc"]
class AthleteCountrySerializer(CountrySerializer):
class Meta:
model = Country
fields = ["noc", "name"]
class AthleteEventSerializer(EventSerializer):
competition = serializers.SlugRelatedField(
slug_field="name",
read_only=True,
)
class Meta:
model = Event
fields = "__all__"
class AthleteRelatedField(serializers.RelatedField):
def to_representation(self, value):
serialized_data = self.serializer_class(value)
return serialized_data.data
def to_internal_value(self, data):
return self.queryset.get(pk=data)
class AthleteCountryField(AthleteRelatedField):
queryset = Country.objects.all()
serializer_class = AthleteCountrySerializer
def to_internal_value(self, data):
return self.queryset.get(name=data)
class AthleteMedalsField(AthleteRelatedField):
queryset = Medal.objects.all()
serializer_class = AthleteMedalSerializer
class AthleteEventsField(AthleteRelatedField):
queryset = Event.objects.all()
serializer_class = AthleteEventSerializer
def to_representation(self, value):
value.sport = value.competition.sport.name
serialized_data = self.serializer_class(value)
return serialized_data.data
class AthleteSerializer(serializers.ModelSerializer):
medals = AthleteMedalsField(many=True, required=False)
country = AthleteCountryField()
class Meta:
model = Athlete
fields = "__all__"
def to_representation(self, instance):
current_representation = super().to_representation(instance)
current_representation["sex"] = dict(Athlete.SEX_CHOICES)[
current_representation["sex"]
]
return current_representation
def to_internal_value(self, data):
if data.get("sex"):
season_full_to_short = {v: k for k, v in Athlete.SEX_CHOICES}
data["sex"] = season_full_to_short[data["sex"]]
return data
|
[
"[email protected]"
] | |
2ba268102fe8f16978356394926d7b179fdf9dc9
|
69819f1d554e48b6d1f280e4d4785b35734e6235
|
/py_learning/test3_4.py
|
55da6b2b3bb96778e1fca613dd93c9a228e22b41
|
[] |
no_license
|
SamKaiYang/python_code
|
8b60b4b68ab7cd4261e237972c0f6e45fc2dced5
|
fb16471aa96082efc906fd8129cecd3a8b19e8a0
|
refs/heads/master
| 2020-04-26T03:26:38.041377 | 2019-04-11T06:38:09 | 2019-04-11T06:38:09 | 173,267,336 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,202 |
py
|
import numpy
###--------------------ch.3-------------------
years_list = [1996 ,1997 ,1998 ,1999 ,2000]
print("----------------ch3----------------------------")
print("3.1:",years_list)
print("-----------------------------------------------")
print("3.2:",years_list[2])
print("-----------------------------------------------")
print("3.3:",years_list[-1])
print("-----------------------------------------------")
thing = ["mozzarella" , "cinderella" ,"salmonella"]
print("3.4:",thing)
print("-----------------------------------------------")
print("3.5:",thing[1].capitalize())
print("-----------------------------------------------")
thing[0].capitalize()
thing[0] = thing[0].upper()
print("3.6:",thing)
print("-----------------------------------------------")
del thing[2]
print("3.7:",thing)
print("-----------------------------------------------")
surprise = ["Croucho","Chico","Harpo"]
print("3.8:",surprise)
print("-----------------------------------------------")
def reverse(str):
out = ''
li = list(str)
for i in range(len(li), 0, -1):
out += ''.join(li[i-1])
return out
surprise[2] =surprise[2].lower()
surprise[2] =reverse(surprise[2])
surprise[2]= surprise[2].capitalize()
print("3.9:",surprise)
print("-----------------------------------------------")
e2f = {"dog": 'chien', "cat": 'chat', "walrus": 'morse'}
print("3.10:",e2f)
print("-----------------------------------------------")
print("3.11:",e2f["walrus"])
print("-----------------------------------------------")
f2e = {v: k for k, v in e2f.items()}
print("3.12:",e2f.items())
print("-----------------------------------------------")
print("3.13:",f2e["chien"])
print("-----------------------------------------------")
print("3.14:",e2f.keys())
print("-----------------------------------------------")
life = {"animals": {'cats':["Henri","Grumpy","Lucy"],'octopi':None,'emus':None},"plants": None, "other": None, }
print("3.15:",life)
###--------------------ch.4------------------
print("-----------------ch4---------------------------")
guess_me = 7
print("4.1:")
if guess_me < 7 :
print("too low")
elif guess_me >7:
print("too high")
elif guess_me == 7:
print("just righ")
print("-----------------------------------------------")
print("4.2:")
start = 1
while(1):
if start < guess_me:
print("too low")
elif start == guess_me:
print("found it")
break
elif start > guess_me:
print("Oops!!")
break
start += 1
print("-----------------------------------------------")
num = [3,2,1,0]
for i in range(len(num)):
print("4.3:",num[i])
print("-----------------------------------------------")
number_list = [i for i in range(0,10) if i%2==0]
print("4.4:",number_list)
print("-----------------------------------------------")
word = 'squares'
letter_count = {letter:pow(word.count(letter),2) for letter in set(word)}
print("4.5:", letter_count)
print("-----------------------------------------------")
set1 = {number for number in range(10) if number%2 == 1}
print("4.6:", set1)
print("-----------------------------------------------")
def good():
list_a = ['Harry','Ron','hermione']
return(list_a)
print("4.8:",good())
|
[
"[email protected]"
] | |
905a119de0d7d34cc786486287e9cd094818c475
|
c82cefee68d557b0790483500b58e7e2988bd33b
|
/lib/web.py
|
2b001db7cb369359b02c9788fdcdc55fce7b4de8
|
[
"MIT"
] |
permissive
|
amadeobrands/electrum
|
c5def78731faf6908cf48342ce3d291547d1a52f
|
459100eda0f3cdcf6d75e304d08345a627364078
|
refs/heads/master
| 2021-08-29T11:38:29.221991 | 2017-12-13T11:00:16 | 2017-12-13T21:05:53 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,964 |
py
|
# Electrum - lightweight Bitcoin client
# Copyright (C) 2011 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from decimal import Decimal
import os
import re
import shutil
import urllib
from .address import Address
from . import bitcoin
from .networks import NetworkConstants
mainnet_block_explorers = {
'Blockchair.com': ('https://blockchair.com/bitcoin-cash',
Address.FMT_LEGACY,
{'tx': 'transaction', 'addr': 'address'}),
}
testnet_block_explorers = {
'Blocktrail.com': ('https://www.blocktrail.com/tBCC',
Address.FMT_LEGACY,
{'tx': 'tx', 'addr': 'address'}),
'system default': ('blockchain:',
Address.FMT_LEGACY,
{'tx': 'tx', 'addr': 'address'}),
}
def BE_info():
if NetworkConstants.TESTNET:
return testnet_block_explorers
return mainnet_block_explorers
def BE_tuple(config):
return BE_info().get(BE_from_config(config))
def BE_from_config(config):
return config.get('block_explorer', 'Blockchair.com')
def BE_URL(config, kind, item):
be_tuple = BE_tuple(config)
if not be_tuple:
return
url_base, addr_fmt, parts = be_tuple
kind_str = parts.get(kind)
if not kind_str:
return
if kind == 'addr':
assert isinstance(item, Address)
item = item.to_string(addr_fmt)
return "/".join([url_base, kind_str, item])
def BE_sorted_list():
return sorted(BE_info())
def create_URI(addr, amount, message):
if not isinstance(addr, Address):
return ""
path = addr.to_ui_string()
query = []
if amount:
query.append('amount=%s'%format_satoshis_plain(amount))
if message:
query.append('message=%s'%urllib.parse.quote(message))
p = urllib.parse.ParseResult(scheme='bitcoincash', netloc='', path=path, params='', query='&'.join(query), fragment='')
return urllib.parse.urlunparse(p)
# URL decode
#_ud = re.compile('%([0-9a-hA-H]{2})', re.MULTILINE)
#urldecode = lambda x: _ud.sub(lambda m: chr(int(m.group(1), 16)), x)
def parse_URI(uri, on_pr=None):
if ':' not in uri:
# Test it's valid
Address.from_string(uri)
return {'address': uri}
u = urllib.parse.urlparse(uri)
if u.scheme != 'bitcoincash':
raise BaseException("Not a bitcoincash URI")
address = u.path
# python for android fails to parse query
if address.find('?') > 0:
address, query = u.path.split('?')
pq = urllib.parse.parse_qs(query)
else:
pq = urllib.parse.parse_qs(u.query)
for k, v in pq.items():
if len(v)!=1:
raise Exception('Duplicate Key', k)
out = {k: v[0] for k, v in pq.items()}
if address:
Address.from_string(address)
out['address'] = address
if 'amount' in out:
am = out['amount']
m = re.match('([0-9\.]+)X([0-9])', am)
if m:
k = int(m.group(2)) - 8
amount = Decimal(m.group(1)) * pow(10, k)
else:
amount = Decimal(am) * bitcoin.COIN
out['amount'] = int(amount)
if 'message' in out:
out['message'] = out['message']
out['memo'] = out['message']
if 'time' in out:
out['time'] = int(out['time'])
if 'exp' in out:
out['exp'] = int(out['exp'])
if 'sig' in out:
out['sig'] = bh2u(bitcoin.base_decode(out['sig'], None, base=58))
r = out.get('r')
sig = out.get('sig')
name = out.get('name')
if on_pr and (r or (name and sig)):
def get_payment_request_thread():
from . import paymentrequest as pr
if name and sig:
s = pr.serialize_request(out).SerializeToString()
request = pr.PaymentRequest(s)
else:
request = pr.get_payment_request(r)
if on_pr:
on_pr(request)
t = threading.Thread(target=get_payment_request_thread)
t.setDaemon(True)
t.start()
return out
def check_www_dir(rdir):
if not os.path.exists(rdir):
os.mkdir(rdir)
index = os.path.join(rdir, 'index.html')
if not os.path.exists(index):
print_error("copying index.html")
src = os.path.join(os.path.dirname(__file__), 'www', 'index.html')
shutil.copy(src, index)
files = [
"https://code.jquery.com/jquery-1.9.1.min.js",
"https://raw.githubusercontent.com/davidshimjs/qrcodejs/master/qrcode.js",
"https://code.jquery.com/ui/1.10.3/jquery-ui.js",
"https://code.jquery.com/ui/1.10.3/themes/smoothness/jquery-ui.css"
]
for URL in files:
path = urllib.parse.urlsplit(URL).path
filename = os.path.basename(path)
path = os.path.join(rdir, filename)
if not os.path.exists(path):
print_error("downloading ", URL)
urllib.request.urlretrieve(URL, path)
|
[
"[email protected]"
] | |
99851ffa805c3ed010783c1fa4bcefd5dc0b55af
|
f07a42f652f46106dee4749277d41c302e2b7406
|
/Data Set/bug-fixing-5/f05c02c4b8d4e423e57d453c4bd699dc5ff7eaa7-<test_quote_value>-fix.py
|
b76461e1c58bbd3cbb3da8d06195ffab39dbfdfe
|
[] |
no_license
|
wsgan001/PyFPattern
|
e0fe06341cc5d51b3ad0fe29b84098d140ed54d1
|
cc347e32745f99c0cd95e79a18ddacc4574d7faa
|
refs/heads/main
| 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 433 |
py
|
def test_quote_value(self):
import MySQLdb
editor = connection.schema_editor()
tested_values = [('string', "'string'"), (42, '42'), (1.754, ('1.754e0' if (MySQLdb.version_info >= (1, 3, 14)) else '1.754')), (False, (b'0' if (MySQLdb.version_info >= (1, 4, 0)) else '0'))]
for (value, expected) in tested_values:
with self.subTest(value=value):
self.assertEqual(editor.quote_value(value), expected)
|
[
"[email protected]"
] | |
b6a5ed0416da616a2c7f584e7ad77d7f96bb9d7a
|
65e73c6c4a9e66715be2cbdd93339ebcab93976e
|
/windmill/fundo/migrations/0032_vertice_corretora.py
|
a3eb1329352877a6515e34208cf08afdae56e014
|
[] |
no_license
|
AnimaTakeshi/windmill-django
|
3577f304d5e7f74750c7d95369e87d37209f1ac6
|
78bde49ace1ed215f6238fe94c142eac16e164dc
|
refs/heads/master
| 2022-12-13T11:13:21.859012 | 2019-02-07T20:50:01 | 2019-02-07T20:50:01 | 150,470,109 | 0 | 0 | null | 2022-12-08T01:29:36 | 2018-09-26T18:13:54 |
Python
|
UTF-8
|
Python
| false | false | 493 |
py
|
# Generated by Django 2.0 on 2018-11-29 19:24
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('fundo', '0031_auto_20181129_1104'),
]
operations = [
migrations.AddField(
model_name='vertice',
name='corretora',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='fundo.Corretora'),
),
]
|
[
"[email protected]"
] | |
a164d0312f3a5559e47223aa73fad366d912287a
|
ce6739d9e76f16d1cbdfbb3292cdabac62d23ee1
|
/mod_2/lesson_8/homework_2/main.py
|
388347f374c64f6a6f86d8e8dd8ed5640d330d2c
|
[] |
no_license
|
adrbed/python_poczatek
|
1a863c0dc40c2315ced8985cf0b31d1b84e3f940
|
134626ccbe2255b0a28183e7d5d212db12c7deb4
|
refs/heads/master
| 2023-02-10T15:06:51.888544 | 2021-01-09T18:23:56 | 2021-01-09T18:23:56 | 328,218,686 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,654 |
py
|
# Dodaj do programu obsługę polityki rabatowej.
# Zaimplementuj funkcje reprezentujące politykę podatkową i przekaż je do konstruktora zamówienia.
# Jeżeli polityka została przekazana to podczas liczenia łącznej kwoty zamówienia należy naliczyć rabat.
# Jeżeli nie - obliczamy łączną kwotę jak dotychczas.
# Zaimplementuj dwie polityki rabatowe:
# Dla stałych klientów: 5% rabatu na każdą pozycję
# Rabat świąteczny: rabat 20 PLN dla zamówień o łącznej kwocie powyżej 100 PLN
import random
from shop.order_element import OrderElement
from shop.product import Product
from shop.discount_policy import loyal_customer_policy, christmas_policy
from shop.order import Order
def generate_order_elements():
order_elements = []
for product_number in range(5):
product_name = f"Produkt-{product_number}"
category_name = "Inne"
unit_price = random.randint(1, 30)
product = Product(product_name, category_name, unit_price)
quantity = random.randint(1, 10)
order_elements.append(OrderElement(product, quantity))
return order_elements
def run_homework():
first_name = "Mikołaj"
last_name = "Lewandowski"
order_elements = generate_order_elements()
normal_order = Order(first_name, last_name, order_elements)
loyal_customer_order = Order(first_name, last_name, order_elements, discount_policy=loyal_customer_policy)
christmas_order = Order(first_name, last_name, order_elements, discount_policy=christmas_policy)
print(normal_order)
print(loyal_customer_order)
print(christmas_order)
if __name__ == '__main__':
run_homework()
|
[
"[email protected]"
] | |
e9e9799b02db2f0a528239390a1408c0943ce0df
|
c3e776b885ac9552ad4b96b63594ab759f12cc3a
|
/test/test_shape.py
|
c641e71e3eca4afe9db6faef214267b55670c856
|
[] |
no_license
|
wing328/petstore-python
|
e9ff3c2a5ff0fdddb1e6b0f12fc7e1d48f65590a
|
fa65c111bb2a040ebb0ed0db3ff9c52c8821922e
|
refs/heads/master
| 2021-10-20T22:22:33.677235 | 2021-10-17T10:33:24 | 2021-10-17T10:33:24 | 54,948,706 | 1 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 955 |
py
|
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import petstore_api
from petstore_api.model.quadrilateral import Quadrilateral
from petstore_api.model.triangle import Triangle
globals()['Quadrilateral'] = Quadrilateral
globals()['Triangle'] = Triangle
from petstore_api.model.shape import Shape
class TestShape(unittest.TestCase):
"""Shape unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testShape(self):
"""Test Shape"""
# FIXME: construct object with mandatory attributes with example values
# model = Shape() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
6964898e391447b3d1130c2528211f4e8de6b23d
|
cdbf40c2de7b22a10e07a1f612cb49059a3fc858
|
/main/cache.py
|
970f75c7d612d8b1dcad2a10c9422a214c3a4833
|
[
"MIT"
] |
permissive
|
riccitensor/gae-init
|
2e86a77e5a99a8a0ad4889a275974409945a1940
|
d9061cf32a26ebc03873431838ead0b1150374b9
|
refs/heads/master
| 2020-12-31T03:35:49.650513 | 2014-09-15T23:45:57 | 2014-09-15T23:45:57 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 486 |
py
|
# coding: utf-8
from google.appengine.api import memcache
###############################################################################
# Helpers
###############################################################################
def bump_counter(key, time=3600, limit=4):
client = memcache.Client()
for _ in range(limit):
counter = client.gets(key)
if counter is None:
client.set(key, 0, time=time)
counter = 0
if client.cas(key, counter + 1):
break
|
[
"[email protected]"
] | |
32b125c421cc16caea121a8692ce403e096ba7ff
|
3f4b535e537666b669b4cfbfca05c7529d2fb631
|
/Algorithms/generate_palindromes_recursion.py
|
4b79f741f374bae1608a3aa0949e2745f2171c29
|
[] |
no_license
|
iliachigogidze/Python
|
ded0a78a1751a536fcdf1fd864fc296ef52f6164
|
6db759b3ee4f4b866421b2cb3a775b7aec32b0c9
|
refs/heads/master
| 2020-04-09T08:15:07.107069 | 2019-03-11T10:35:23 | 2019-03-11T10:35:23 | 160,187,366 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 796 |
py
|
'''
18. Given a list of distinct letters and a positive number N, generate all possible palindromes
having the length not greater than N. Single word can be used multiple times per palindrome. Example: ['a', 'b'], N=3 a aa aaa b bb bbb aba bab
'''
def main(ls, n):
print('List: ', ls)
word = 'a' * n
return recursion(word, ls)
def recursion(word,ls):
if len(word) == 0: return ['']
elif len(word) == 1: return [c for c in ls]
else:
word = word[1:-1]
word = recursion(word,ls)
palindromes = []
for w in word:
palindromes.append(w)
for c in ls:
if c is w:
palindromes.append(c+w)
palindromes.append(c+w+c)
return palindromes
print(main(['a','b'], 4))
|
[
"[email protected]"
] | |
85c201f5f5de17ab3e1365558c6188fbb929728d
|
1825283527f5a479204708feeaf55f4ab6d1290b
|
/leetcode/python/841/841.keys-and-rooms.py
|
0656b2ebe9f552d97e1b6e8129c65305972f48bf
|
[] |
no_license
|
frankieliu/problems
|
b82c61d3328ffcc1da2cbc95712563355f5d44b5
|
911c6622448a4be041834bcab25051dd0f9209b2
|
refs/heads/master
| 2023-01-06T14:41:58.044871 | 2019-11-24T03:47:22 | 2019-11-24T03:47:22 | 115,065,956 | 1 | 0 | null | 2023-01-04T07:25:52 | 2017-12-22T02:06:57 |
HTML
|
UTF-8
|
Python
| false | false | 1,525 |
py
|
#
# @lc app=leetcode id=841 lang=python3
#
# [841] Keys and Rooms
#
# https://leetcode.com/problems/keys-and-rooms/description/
#
# algorithms
# Medium (58.74%)
# Total Accepted: 24.4K
# Total Submissions: 41.6K
# Testcase Example: '[[1],[2],[3],[]]'
#
# There are N rooms and you start in room 0. Each room has a distinct number
# in 0, 1, 2, ..., N-1, and each room may have some keys to access the next
# room.
#
# Formally, each room i has a list of keys rooms[i], and each key rooms[i][j]
# is an integer in [0, 1, ..., N-1] where N = rooms.length. A key rooms[i][j]
# = v opens the room with number v.
#
# Initially, all the rooms start locked (except for room 0).
#
# You can walk back and forth between rooms freely.
#
# Return true if and only if you can enter every room.
#
#
#
#
# Example 1:
#
#
# Input: [[1],[2],[3],[]]
# Output: true
# Explanation:
# We start in room 0, and pick up key 1.
# We then go to room 1, and pick up key 2.
# We then go to room 2, and pick up key 3.
# We then go to room 3. Since we were able to go to every room, we return
# true.
#
#
# Example 2:
#
#
# Input: [[1,3],[3,0,1],[2],[0]]
# Output: false
# Explanation: We can't enter the room with number 2.
#
#
# Note:
#
#
# 1 <= rooms.length <= 1000
# 0 <= rooms[i].length <= 1000
# The number of keys in all rooms combined is at most 3000.
#
#
#
class Solution:
def canVisitAllRooms(self, rooms):
"""
:type rooms: List[List[int]]
:rtype: bool
"""
|
[
"[email protected]"
] | |
0ba08eb41cfb2e76bd9b6aa1e05cd05ecd2ebf2b
|
9047aec2400933376e71fdc24d087d2ad35b4d45
|
/flipAndInvertImage_832.py
|
0145883c6e9c652019e80f3433fce0bc3df0edd1
|
[] |
no_license
|
sasankyadavalli/leetcode
|
a8c3a4b63970cfa67a8bbec5d1fb7cca818f7ea9
|
555931bc5a74e0031726070be90c945da9cb3251
|
refs/heads/master
| 2021-02-07T12:12:06.938562 | 2020-07-28T18:25:10 | 2020-07-28T18:25:10 | 244,024,453 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 252 |
py
|
class Solution:
def flipAndInvertImage(self, A: List[List[int]]) -> List[List[int]]:
for i in range(len(A)):
A[i] = A[i][::-1]
for j in range(len(A[i])):
A[i][j] ^= 1
return A
|
[
"[email protected]"
] | |
43c1a91b4e3f8f288ce744bcc1e36236880f0919
|
9268c07cd58f68fd20daf5e38880ef452eeca331
|
/10219-.py
|
7e62d546d53bceb63e73315a6a81a245e42f14d1
|
[] |
no_license
|
rkdr055/BOJ
|
bbdbdcf8a9df399c3655eea3d9f337275b1ae504
|
d90465cfa80d124aba8f9572b9bd5e4bb2a62031
|
refs/heads/master
| 2021-04-09T14:26:04.811468 | 2018-03-18T10:05:20 | 2018-03-18T10:05:20 | 125,712,186 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 226 |
py
|
case=int(input())
list=[]
list2=[]
for z in range(case):
h,w=map(int,input().split())
for i in range(h):
list.append(input())
list2.append(list[i][::-1])
for i in range(len(list2)):
print(list2[i])
|
[
"[email protected]"
] | |
b109faeccfded125ae603f0cd2ccb693475c2cdf
|
31f9333012fd7dad7b8b12c1568f59f33420b0a5
|
/Alessandria/env/lib/python3.8/site-packages/django/template/utils.py
|
41921a09a2c9c6ce444863547c4c915b4f03271c
|
[] |
no_license
|
jcmloiacono/Django
|
0c69131fae569ef8cb72b135ab81c8e957d2a640
|
20b9a4a1b655ae4b8ff2a66d50314ed9732b5110
|
refs/heads/master
| 2022-11-15T22:18:57.610642 | 2020-07-14T14:43:16 | 2020-07-14T14:43:16 | 255,125,001 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,566 |
py
|
import functools
from collections import Counter
from pathlib import Path
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
class InvalidTemplateEngineError(ImproperlyConfigured):
pass
class EngineHandler:
def __init__(self, templates=None):
"""
templates is an optional list of template engine definitions
(structured like settings.TEMPLATES).
"""
self._templates = templates
self._engines = {}
@cached_property
def templates(self):
if self._templates is None:
self._templates = settings.TEMPLATES
templates = {}
backend_names = []
for tpl in self._templates:
try:
# This will raise an exception if 'BACKEND' doesn't exist or
# isn't a string containing at least one dot.
default_name = tpl['BACKEND'].rsplit('.', 2)[-2]
except Exception:
invalid_backend = tpl.get('BACKEND', '<not defined>')
raise ImproperlyConfigured(
"Invalid BACKEND for a template engine: {}. Check "
"your TEMPLATES setting.".format(invalid_backend))
tpl = {
'NAME': default_name,
'DIRS': [],
'APP_DIRS': False,
'OPTIONS': {},
**tpl,
}
templates[tpl['NAME']] = tpl
backend_names.append(tpl['NAME'])
counts = Counter(backend_names)
duplicates = [alias for alias, count in counts.most_common() if count > 1]
if duplicates:
raise ImproperlyConfigured(
"Template engine aliases aren't unique, duplicates: {}. "
"Set a unique NAME for each engine in settings.TEMPLATES."
.format(", ".join(duplicates)))
return templates
def __getitem__(self, alias):
try:
return self._engines[alias]
except KeyError:
try:
params = self.templates[alias]
except KeyError:
raise InvalidTemplateEngineError(
"Could not find config for '{}' "
"in settings.TEMPLATES".format(alias))
# If importing or initializing the backend raises an exception,
# self._engines[alias] isn't set and this code may get executed
# again, so we must preserve the original params. See #24265.
params = params.copy()
backend = params.pop('BACKEND')
engine_cls = import_string(backend)
engine = engine_cls(params)
self._engines[alias] = engine
return engine
def __iter__(self):
return iter(self.templates)
def all(self):
return [self[alias] for alias in self]
@functools.lru_cache()
def get_app_template_dirs(dirname):
"""
Return an iterable of paths of directories to load app2 templates from.
dirname is the name of the subdirectory containing templates inside
installed applications.
"""
template_dirs = [
str(Path(app_config.path) / dirname)
for app_config in apps.get_app_configs()
if app_config.path and (Path(app_config.path) / dirname).is_dir()
]
# Immutable return value because it will be cached and shared by callers.
return tuple(template_dirs)
|
[
"[email protected]"
] | |
b408eb1c942cd29c1efe13d3c57a2c4785cf6995
|
95cdf7753fc4022be239666a902df217a93f7125
|
/dangdang/test_case/test_login_2.py
|
5af74b59fe9373ec7b2a931f7cda17f213c6b17c
|
[] |
no_license
|
he9mei/python_appium
|
ffe1b872d3732b9e8510da0dd24f7a791c534be0
|
9fc5dcb67769d2d103756b9fca82d2cfeae40e72
|
refs/heads/master
| 2022-06-01T03:24:36.821931 | 2022-05-22T07:55:58 | 2022-05-22T07:55:58 | 223,714,095 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,002 |
py
|
import os
import pytest
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
from dangdang.base.base_try_2py import BasePre #导入包时应该from到py文件
class TestLogin(BasePre):
def test_01(self):
#进入登录页面
driver=self.driver
# driver.find_element_by_id("com.dangdang.reader:id/tab_personal_iv").click()
#尝试做代码与元素的分离
# by_el = By.ID("com.dangdang.reader:id/tab_personal_iv") 用法不对,这个是java中的用法
el="com.dangdang.reader:id/tab_personal_iv"
driver.find_element(By.ID,el).click()
driver.find_element_by_id("com.dangdang.reader:id/nickname_tv").click()
@pytest.mark.skip()
def test_02(self):
driver = self.driver
# 默认如果没有进入账号密码登录,先点击账号密码登录
try:
el_pw_login = driver.find_element_by_id("com.dangdang.reader:id/custom_login_tv")
if el_pw_login .is_displayed():
print("默认进入短信验证码登录,找到了账号登录按钮!")
el_pw_login .click()
# except Exception as e: #如果不打印异常会提示异常太宽泛
except NoSuchElementException:
print("可能默认就是账号密码登录!")
# print(e)
# 切换输入法
os.system("adb shell ime set io.appium.android.ime/.UnicodeIME")
el_name_input=driver.find_element_by_id("com.dangdang.reader:id/name_edit")
el_name_input.clear()
el_name_input.send_keys("18500000005")
el_pw_input = driver.find_element_by_id("com.dangdang.reader:id/password_et")
el_pw_input.clear()
el_pw_input.send_keys("111111")
# 遇到问题:输入之后,键盘没有关闭,挡住了登录按钮
driver.press_keycode(4)
driver.find_element_by_id("com.dangdang.reader:id/login_tv").click()
|
[
"[email protected]"
] | |
0e2e8ebd7cc2d7103bce5e551dfe8fdb46f11f37
|
039f2c747a9524daa1e45501ada5fb19bd5dd28f
|
/ARC070/ARC070e.py
|
180283ee92bad6826c10a5ff60317df1e96f5613
|
[
"Unlicense"
] |
permissive
|
yuto-moriizumi/AtCoder
|
86dbb4f98fea627c68b5391bf0cc25bcce556b88
|
21acb489f1594bbb1cdc64fbf8421d876b5b476d
|
refs/heads/master
| 2023-03-25T08:10:31.738457 | 2021-03-23T08:48:01 | 2021-03-23T08:48:01 | 242,283,632 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 136 |
py
|
#ARC070e
def main():
import sys
input=sys.stdin.readline
sys.setrecursionlimit(10**6)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
d3794f2b28a9458808b14c3906f02f49f7c018d0
|
622f779c8ed0bf589b947c5550c11f64f438444a
|
/src/108-convert-sorted-array-to-binary-search-tree.py
|
2b05af9f48d9a0ecb0fa80a84c03597351f4918c
|
[
"MIT"
] |
permissive
|
sahilrider/LeetCode-Solutions
|
9290f69096af53e5c7212fe1e3820131a89257d0
|
9cac844c27b5dbf37a70c2981a09cd92457f7ff1
|
refs/heads/master
| 2023-01-03T22:35:28.377721 | 2020-10-25T16:15:49 | 2020-10-25T16:15:49 | 232,368,847 | 2 | 0 |
MIT
| 2020-10-25T16:15:50 | 2020-01-07T16:37:43 |
Python
|
UTF-8
|
Python
| false | false | 806 |
py
|
'''https://leetcode.com/problems/convert-sorted-array-to-binary-search-tree/'''
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def sortedArrayToBST(self, nums: List[int]) -> TreeNode:
def helper(low, high):
m = (low+high)//2
node = TreeNode(nums[m])
if m-1>=low:
node.left = helper(low, m-1)
if m+1<=high:
node.right = helper(m+1, high)
return node
if nums is None or len(nums)==0:
return None
if len(nums)==1:
return TreeNode(nums[0])
ans = helper( 0, len(nums)-1)
return ans
|
[
"[email protected]"
] | |
e7d71e403f48b53d8264ec423ce14f51ac3ee2fa
|
72f6abca19153ebc58921b148f51c09967e131d7
|
/examples/correlated_gaussians/run_test.py
|
83e0aa23cd49c234198a0e984df5977af54c1706
|
[] |
no_license
|
mboudiaf/Mutual-Information-Variational-Bounds
|
3e2693a5b2a06d65988ded931e0a5116456697e6
|
56e8e3f2369ed107cd363d854d9f155fbf242930
|
refs/heads/master
| 2023-04-08T00:03:46.946564 | 2019-10-31T18:48:17 | 2019-10-31T18:48:17 | 216,642,876 | 38 | 5 | null | 2023-03-24T23:13:25 | 2019-10-21T18:55:11 |
Python
|
UTF-8
|
Python
| false | false | 851 |
py
|
#!/usr/bin/env python
"""run_tests.py: Script to execute in order to make sure basic features work properly."""
import os
import itertools
regularizers = ["nce", "mine", "nwj"]
critic_types = ["joint", "separate"]
comb_test_params = itertools.product(regularizers, critic_types)
name_test_params = ['regularizer', 'critic_type']
fixed_params = {"epochs": 1, "rho_points": 2, "data_size": 1000}
for param_comb in comb_test_params:
command_line = "python3 demo_gaussian.py "
for fix_param_name, fix_param_value in fixed_params.items():
command_line += "--{} {} ".format(fix_param_name, fix_param_value)
for i, param in enumerate(param_comb):
command_line += "--{} {} ".format(name_test_params[i], param)
print(command_line)
os.system(command_line)
print("All tests were successfully passed. ")
|
[
"[email protected]"
] | |
34752630bb94a41e0623b3e17a229516d7faaf70
|
fc772efe3eccb65e4e4a8da7f2b2897586b6a0e8
|
/Controller/glance/tests/functional/v1/test_multiprocessing.py
|
bfbb88b6aaa9ce22c2afe7d5cd7e4980f891b31c
|
[] |
no_license
|
iphonestack/Openstack_Kilo
|
9ae12505cf201839631a68c9ab4c041f737c1c19
|
b0ac29ddcf24ea258ee893daf22879cff4d03c1f
|
refs/heads/master
| 2021-06-10T23:16:48.372132 | 2016-04-18T07:25:40 | 2016-04-18T07:25:40 | 56,471,076 | 0 | 2 | null | 2020-07-24T02:17:46 | 2016-04-18T02:32:43 |
Python
|
UTF-8
|
Python
| false | false | 2,497 |
py
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import httplib2
import psutil
from glance.tests import functional
from glance.tests.utils import execute
class TestMultiprocessing(functional.FunctionalTest):
"""Functional tests for the bin/glance CLI tool"""
def setUp(self):
self.workers = 2
super(TestMultiprocessing, self).setUp()
def test_multiprocessing(self):
"""Spin up the api servers with multiprocessing on"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(200, response.status)
self.assertEqual('{"images": []}', content)
self.stop_servers()
def _get_children(self):
api_pid = self.api_server.process_pid
process = psutil.Process(api_pid)
children = process.get_children()
pids = [str(child.pid) for child in children]
return pids
def test_interrupt_avoids_respawn_storm(self):
"""
Ensure an interrupt signal does not cause a respawn storm.
See bug #978130
"""
self.start_servers(**self.__dict__.copy())
children = self._get_children()
cmd = "kill -INT %s" % ' '.join(children)
execute(cmd, raise_error=True)
for _ in range(9):
# Yeah. This totally isn't a race condition. Randomly fails
# set at 0.05. Works most of the time at 0.10
time.sleep(0.10)
# ensure number of children hasn't grown
self.assertTrue(len(children) >= len(self._get_children()))
for child in self._get_children():
# ensure no new children spawned
self.assertTrue(child in children, child)
self.stop_servers()
|
[
"[email protected]"
] | |
582971ce3a9a478be81b73546667dd8843b56322
|
97543ae8e1ad7bf3d17dd87171aaac04f6737b5f
|
/bibliopixel/util/server_cache.py
|
6752a1a164b698f46bdec0887fe6f9a44ffab3a0
|
[
"MIT"
] |
permissive
|
dr-aryone/BiblioPixel
|
a3c630bf1cd5db2b014b86775d283c61565a193e
|
fd97e6c651a4bbcade64733847f4eec8f7704b7c
|
refs/heads/master
| 2020-05-27T16:19:15.043592 | 2019-03-23T08:52:37 | 2019-03-25T11:10:39 | 188,698,414 | 2 | 1 |
MIT
| 2019-05-26T15:12:38 | 2019-05-26T15:12:37 | null |
UTF-8
|
Python
| false | false | 3,322 |
py
|
import errno
from . import log
class StaticCache:
SERVER_CLASS = None
SERVER_KWDS = {}
CACHE = None
@classmethod
def cache(cls):
if not cls.CACHE:
cls.CACHE = ServerCache(cls.SERVER_CLASS, **cls.SERVER_KWDS)
return cls.CACHE
@classmethod
def close_all(cls):
if cls.CACHE:
cls.CACHE.close_all()
cls.CACHE = None
class ServerCache:
"""
A class that caches servers by key so you don't keep closing and re-opening
the same server and interrupting your connection.
The exact nature of the key depends on the sort of server.
For example, for a server socket like SimPixel, it would be just a port
number, whereas for a UDP connection like Art-Net, it would be a
port, ip_address pair.
"""
def __init__(self, constructor, **kwds):
"""
:param constructor: a function which takes a key and some keywords,
and returns a new server
:param kwds: keywords to the ``constructor`` function
"""
self.servers = {}
self.constructor = constructor
self.kwds = kwds
def get_server(self, key, **kwds):
"""
Get a new or existing server for this key.
:param int key: key for the server to use
"""
kwds = dict(self.kwds, **kwds)
server = self.servers.get(key)
if server:
# Make sure it's the right server.
server.check_keywords(self.constructor, kwds)
else:
# Make a new server
server = _CachedServer(self.constructor, key, kwds)
self.servers[key] = server
return server
def close(self, key):
server = self.servers.pop(key, None)
if server:
server.server.close()
return True
def close_all(self):
for key in list(self.servers.keys()):
self.close(key)
class _CachedServer:
def __init__(self, constructor, key, kwds):
try:
self.server = constructor(key, **kwds)
except OSError as e:
if e.errno == errno.EADDRINUSE:
e.strerror += ADDRESS_IN_USE_ERROR.format(key)
e.args = (e.errno, e.strerror)
raise
self.key = key
self.constructor = constructor
self.kwds = kwds
def check_keywords(self, constructor, kwds):
if self.constructor != constructor:
raise ValueError(CACHED_SERVER_ERROR.format(
key=self.key,
new_type=str(constructor),
old_type=str(self.constructor)))
if self.kwds != kwds:
log.warning(CACHED_KWDS_WARNING.format(server=self, kwds=kwds))
def close(self):
pass
def __getattr__(self, key):
# Pass through all other attributes to the server.
return getattr(self.server, key)
ADDRESS_IN_USE_ERROR = """
Cached server {0} on your machine is already in use.
Perhaps BiblioPixel is already running on your machine?
"""
CACHED_SERVER_ERROR = """
Tried to open server of type {new_type} on {port}, but there was already
a server of type {old_type} running there.
"""
CACHED_KWDS_WARNING = """
Cached server for {server.port} had keywords {server.kwds},
but keywords {kwds} were requested.
"""
|
[
"[email protected]"
] | |
131ab62769f61bb7383f1f1debef3512ce034caf
|
b47c136e077f5100478338280495193a8ab81801
|
/Lights/adafruit-circuitpython-bundle-6.x-mpy-20210310/examples/trellism4_neopixel_toggle.py
|
96f58e006b9ea62fbbd51578016af63ab64293da
|
[
"Apache-2.0"
] |
permissive
|
IanSMoyes/SpiderPi
|
22cd8747cc389f674cc8d95f32b4d86f9b7b2d8e
|
cc3469980ae87b92d0dc43c05dbd579f0fa8c4b1
|
refs/heads/master
| 2023-03-20T22:30:23.362137 | 2021-03-12T17:37:33 | 2021-03-12T17:37:33 | 339,555,949 | 16 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,207 |
py
|
# SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries
# SPDX-License-Identifier: MIT
import adafruit_trellism4
trellis = adafruit_trellism4.TrellisM4Express()
def wheel(pos):
if pos < 0 or pos > 255:
return 0, 0, 0
if pos < 85:
return int(255 - pos * 3), int(pos * 3), 0
if pos < 170:
pos -= 85
return 0, int(255 - pos * 3), int(pos * 3)
pos -= 170
return int(pos * 3), 0, int(255 - (pos * 3))
led_on = []
for x in range(trellis.pixels.width):
led_on.append([])
for y in range(trellis.pixels.height):
led_on[x].append(False)
trellis.pixels.fill((0, 0, 0))
current_press = set()
while True:
pressed = set(trellis.pressed_keys)
for press in pressed - current_press:
x, y = press
if not led_on[x][y]:
print("Turning on:", press)
pixel_index = (x + (y * 8)) * 256 // 32
trellis.pixels[x, y] = wheel(pixel_index & 255)
led_on[x][y] = True
else:
print("Turning off:", press)
trellis.pixels[x, y] = (0, 0, 0)
led_on[x][y] = False
current_press = pressed
|
[
"[email protected]"
] | |
81c3fe0c21b388da7fe81eb05e8d462f98493ed4
|
d081f87beb90e0e3697045b9139bde67253faf4d
|
/capsule_em/cap_datasets/dataset_utils.py
|
e51817113705f395f1898dfceabc979141a42d4c
|
[] |
no_license
|
hannarud/deep_learning
|
db9ed75c9fedd70dc3cb3935bba3947c818095ee
|
b5d1c91e8dcb6252e24f3ab35797ca4130381ec0
|
refs/heads/master
| 2020-03-28T03:21:20.849132 | 2019-05-08T08:43:23 | 2019-05-08T08:43:23 | 147,638,984 | 0 | 0 | null | 2018-09-06T08:02:52 | 2018-09-06T08:02:52 | null |
UTF-8
|
Python
| false | false | 4,679 |
py
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains utilities for downloading and converting datasets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import tarfile
from six.moves import urllib
import tensorflow as tf
LABELS_FILENAME = 'labels.txt'
def int64_feature(values):
"""Returns a TF-Feature of int64s.
Args:
values: A scalar or list of values.
Returns:
A TF-Feature.
"""
if not isinstance(values, (tuple, list)):
values = [values]
return tf.train.Feature(int64_list=tf.train.Int64List(value=values))
def bytes_feature(values):
"""Returns a TF-Feature of bytes.
Args:
values: A string.
Returns:
A TF-Feature.
"""
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[values]))
def float_feature(values):
"""Returns a TF-Feature of floats.
Args:
values: A scalar of list of values.
Returns:
A TF-Feature.
"""
if not isinstance(values, (tuple, list)):
values = [values]
return tf.train.Feature(float_list=tf.train.FloatList(value=values))
def image_to_tfexample(image_data, image_format, height, width, class_id):
return tf.train.Example(features=tf.train.Features(feature={
'image/encoded': bytes_feature(image_data),
'image/format': bytes_feature(image_format),
'image/class/label': int64_feature(class_id),
'image/height': int64_feature(height),
'image/width': int64_feature(width),
}))
def download_and_uncompress_tarball(tarball_url, dataset_dir):
"""Downloads the `tarball_url` and uncompresses it locally.
Args:
tarball_url: The URL of a tarball file.
dataset_dir: The directory where the temporary files are stored.
"""
filename = tarball_url.split('/')[-1]
filepath = os.path.join(dataset_dir, filename)
def _progress(count, block_size, total_size):
sys.stdout.write('\r>> Downloading %s %.1f%%' % (
filename, float(count * block_size) / float(total_size) * 100.0))
sys.stdout.flush()
filepath, _ = urllib.request.urlretrieve(tarball_url, filepath, _progress)
print()
statinfo = os.stat(filepath)
print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')
tarfile.open(filepath, 'r:gz').extractall(dataset_dir)
def write_label_file(labels_to_class_names, dataset_dir,
filename=LABELS_FILENAME):
"""Writes a file with the list of class names.
Args:
labels_to_class_names: A map of (integer) labels to class names.
dataset_dir: The directory in which the labels file should be written.
filename: The filename where the class names are written.
"""
labels_filename = os.path.join(dataset_dir, filename)
with tf.gfile.Open(labels_filename, 'w') as f:
for label in labels_to_class_names:
class_name = labels_to_class_names[label]
f.write('%d:%s\n' % (label, class_name))
def has_labels(dataset_dir, filename=LABELS_FILENAME):
"""Specifies whether or not the dataset directory contains a label map file.
Args:
dataset_dir: The directory in which the labels file is found.
filename: The filename where the class names are written.
Returns:
`True` if the labels file exists and `False` otherwise.
"""
return tf.gfile.Exists(os.path.join(dataset_dir, filename))
def read_label_file(dataset_dir, filename=LABELS_FILENAME):
"""Reads the labels file and returns a mapping from ID to class name.
Args:
dataset_dir: The directory in which the labels file is found.
filename: The filename where the class names are written.
Returns:
A map from a label (integer) to class name.
"""
labels_filename = os.path.join(dataset_dir, filename)
with tf.gfile.Open(labels_filename, 'rb') as f:
lines = f.read().decode()
lines = lines.split('\n')
lines = filter(None, lines)
labels_to_class_names = {}
for line in lines:
index = line.index(':')
labels_to_class_names[int(line[:index])] = line[index+1:]
return labels_to_class_names
|
[
"[email protected]"
] | |
9a24299854d18bdf5acbd798a36d3a8c59c91c20
|
8ef8e6818c977c26d937d09b46be0d748022ea09
|
/cv/super_resolution/ttvsr/pytorch/mmedit/datasets/pipelines/__init__.py
|
c13aa348d3e6cc61b5ca58e39f9d48692875f339
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
Deep-Spark/DeepSparkHub
|
eb5996607e63ccd2c706789f64b3cc0070e7f8ef
|
9d643e88946fc4a24f2d4d073c08b05ea693f4c5
|
refs/heads/master
| 2023-09-01T11:26:49.648759 | 2023-08-25T01:50:18 | 2023-08-25T01:50:18 | 534,133,249 | 7 | 6 |
Apache-2.0
| 2023-03-28T02:54:59 | 2022-09-08T09:07:01 |
Python
|
UTF-8
|
Python
| false | false | 2,111 |
py
|
from .augmentation import (BinarizeImage, Flip, GenerateFrameIndices,
GenerateFrameIndiceswithPadding,
GenerateSegmentIndices, MirrorSequence, Pad,
RandomAffine, RandomJitter, RandomMaskDilation,
RandomTransposeHW, Resize, TemporalReverse)
from .compose import Compose
from .crop import (Crop, CropAroundCenter, CropAroundFg, CropAroundUnknown,
CropLike, FixedCrop, ModCrop, PairedRandomCrop)
from .formating import (Collect, FormatTrimap, GetMaskedImage, ImageToTensor,
ToTensor)
from .generate_assistant import GenerateCoordinateAndCell, GenerateHeatmap
from .loading import (GetSpatialDiscountMask, LoadImageFromFile,
LoadImageFromFileList, LoadMask, LoadPairedImageFromFile,
RandomLoadResizeBg)
from .matting_aug import (CompositeFg, GenerateSeg, GenerateSoftSeg,
GenerateTrimap, GenerateTrimapWithDistTransform,
MergeFgAndBg, PerturbBg, TransformTrimap)
from .normalization import Normalize, RescaleToZeroOne
from .random_down_sampling import RandomDownSampling
__all__ = [
'Collect', 'FormatTrimap', 'LoadImageFromFile', 'LoadMask',
'RandomLoadResizeBg', 'Compose', 'ImageToTensor', 'ToTensor',
'GetMaskedImage', 'BinarizeImage', 'Flip', 'Pad', 'RandomAffine',
'RandomJitter', 'RandomMaskDilation', 'RandomTransposeHW', 'Resize',
'Crop', 'CropAroundCenter', 'CropAroundUnknown', 'ModCrop',
'PairedRandomCrop', 'Normalize', 'RescaleToZeroOne', 'GenerateTrimap',
'MergeFgAndBg', 'CompositeFg', 'TemporalReverse', 'LoadImageFromFileList',
'GenerateFrameIndices', 'GenerateFrameIndiceswithPadding', 'FixedCrop',
'LoadPairedImageFromFile', 'GenerateSoftSeg', 'GenerateSeg', 'PerturbBg',
'CropAroundFg', 'GetSpatialDiscountMask', 'RandomDownSampling',
'GenerateTrimapWithDistTransform', 'TransformTrimap',
'GenerateCoordinateAndCell', 'GenerateSegmentIndices', 'MirrorSequence',
'CropLike', 'GenerateHeatmap'
]
|
[
"[email protected]"
] | |
26130706f87e5a118dc66d2a02de049ab432949c
|
5878d1482e9780a2d4cdcaf6ec5a624fa62b0283
|
/cars/urls.py
|
330b556f8d24123d0479fba1b76cd0118021ed6e
|
[] |
no_license
|
solar1um/caarsssss
|
2d5c3f466d681739a26d69f8caba0773b9e3bf06
|
e7d826f8984fa90832c099ce1b4d5efb74578c5e
|
refs/heads/main
| 2023-07-13T03:44:07.805703 | 2021-08-19T13:15:11 | 2021-08-19T13:15:11 | 398,232,307 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 683 |
py
|
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.auth import login
from django.urls import path
from .views import main_page, create_ad, all_cars, car_detail, user_cars, car_update, car_delete, search
urlpatterns = [
path('search/', search, name='search'),
path('car-delete/<int:pk>/', car_delete, name='car_delete'),
path('car-update/<int:pk>/', car_update, name='car_update'),
path('my-cars/', user_cars, name='user_cars'),
path('car/<int:pk>/', car_detail, name='car_detail'),
path('cars', all_cars, name='cars'),
path('create', create_ad, name='create_ad'),
path('', main_page, name='main_page'),
]
|
[
"[email protected]"
] | |
2ce0493d54e9a48fe0d474e508f638583259cd3c
|
fa4e5513fc069f0b7aeed1d86b78081c05253822
|
/jophiel/spiders/scrapy/contrib/memdebug.py
|
d4b1570b0a210936e70355383db0849da8d99b32
|
[] |
no_license
|
dreamfrog/jophiel
|
b0f00c62a8061030cd4a5a075cc8faf9686d44b8
|
1c51a16eca7201c0dec9bb94acd25f5a4038a862
|
refs/heads/master
| 2021-01-01T05:47:43.157268 | 2013-09-23T12:15:18 | 2013-09-23T12:15:18 | 3,557,963 | 0 | 1 | null | 2023-03-20T11:56:29 | 2012-02-27T06:31:50 |
Python
|
UTF-8
|
Python
| false | false | 1,568 |
py
|
"""
MemoryDebugger extension
See documentation in docs/topics/extensions.rst
"""
import gc
from scrapy.xlib.pydispatch import dispatcher
from scrapy import signals
from scrapy.exceptions import NotConfigured
from scrapy.stats import stats
from scrapy.utils.trackref import live_refs
from scrapy.middleware import BaseMiddleware
from scrapy.meta import BooleanField
class MemoryDebugger(BaseMiddleware):
memdebug_enable = BooleanField(default=False)
track_refs = BooleanField(default=False)
def __init__(self, settings):
super(MemoryDebugger, self).__init__(settings)
try:
import libxml2
self.libxml2 = libxml2
except ImportError:
self.libxml2 = None
if not self.memdebug_enable.to_value():
raise NotConfigured
dispatcher.connect(self.engine_started, signals.engine_started)
dispatcher.connect(self.engine_stopped, signals.engine_stopped)
def engine_started(self):
if self.libxml2:
self.libxml2.debugMemory(1)
def engine_stopped(self):
if self.libxml2:
self.libxml2.cleanupParser()
stats.set_value('memdebug/libxml2_leaked_bytes', self.libxml2.debugMemory(1))
gc.collect()
stats.set_value('memdebug/gc_garbage_count', len(gc.garbage))
if self.track_refs.to_value():
for cls, wdict in live_refs.iteritems():
if not wdict:
continue
stats.set_value('memdebug/live_refs/%s' % cls.__name__, len(wdict))
|
[
"[email protected]"
] | |
dcb88fcac088e2f235e2bfafaf887c4e4e052c50
|
d420b5780e26071caa34de8bc0cfcaf431ae4dcd
|
/articles/pd/gencsv.py
|
4de6bf7a26c26b4e6f1566f0ad547fe8d673eb83
|
[
"MIT"
] |
permissive
|
parrt/stratx
|
2a09fdacd6b7a0f9b065feada1ecdfdfa0f0b513
|
97741afcead00c83dbe7fce37a42c4a4d4890a1c
|
refs/heads/master
| 2022-04-30T13:57:50.037114 | 2022-04-24T16:55:58 | 2022-04-24T16:55:58 | 185,261,236 | 61 | 14 |
MIT
| 2019-10-21T21:14:14 | 2019-05-06T19:46:39 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 630 |
py
|
# Code to gen .csv files for use with R
from articles.pd.support import *
X, y = load_bulldozer(n=10_000)
df = pd.concat([X, y], axis=1)
df.to_csv("bulldozer10k.csv", index=False)
X, y = load_bulldozer(n=20_000)
df = pd.concat([X, y], axis=1)
df.to_csv("bulldozer20k.csv", index=False)
X, y = load_rent(n=10_000)
df = pd.concat([X, y], axis=1)
df.to_csv("rent10k.csv", index=False)
df = toy_weather_data()
df.sort_values('state').to_csv("weather.csv", index=False)
X, y, df, eqn = toy_weight_data(2000)
df.to_csv("weight.csv", index=False)
df = synthetic_interaction_data(n=2000)
df.to_csv("interaction.csv", index=False)
|
[
"[email protected]"
] | |
8ebd725ae76370b7b0bf9bffa31bab21e7446f39
|
a2e638cd0c124254e67963bda62c21351881ee75
|
/Extensions/AMWIDealTaker/FPythonCode/FClearingBusinessProcessAdaptation.py
|
15a7a2b48961c3ed020da2732bd32996beaf139c
|
[] |
no_license
|
webclinic017/fa-absa-py3
|
1ffa98f2bd72d541166fdaac421d3c84147a4e01
|
5e7cc7de3495145501ca53deb9efee2233ab7e1c
|
refs/heads/main
| 2023-04-19T10:41:21.273030 | 2021-05-10T08:50:05 | 2021-05-10T08:50:05 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,905 |
py
|
"""------------------------------------------------------------------------
MODULE
FClearingBusinessProcessAdaptation -
DESCRIPTION:
The additionalInfos that need to be set/ trade attributes that need to be set while the business process is associated to a trade can be achieved here.
Please refer to documentation for further details
VERSION: 1.0.30
--------------------------------------------------------------------------"""
def getResultingClearingStatus(acmTrade):
"""the key of this dictionary is trade status and value is
corresponding clearing status that needs to be set for that Trade Status
The user can use the trade object sent into this function for conditional check
and deciding on the instrument type associated to the trade can return dictionary
for setting trade status
"""
tradeNClearingStatus = {'BO Confirmed': 'Agreed'}
return tradeNClearingStatus
def getClearingProcessDictionary():
"""This values in this dictionary for each workflow mentioned if match found, will be used to set the values on trade"""
#The attributes that are supported are: cHouse, acquirer, repository, cBroker, middleware
#The significance of each attribute is as follows:
#cHouse: The Clearing House to which trade would be reported for clearing purposes
#acquirer: The acquirer of this trade (could be specific on the basis of the Business Process being associated on the trade)
#repository: The repository for the trade
#cBroker: The clearing broker for the trade
#middleware: The middleware for the deal
clDict = {
'LCH.Clearnet': {'cHouse':'LCH.Clearnet','acquirer':'Credit Desk', 'middleware':'MarkitWire', 'repository':'DTCC Copper'},
'Bilateral': {'acquirer':'Swap Desk', 'cHouse':'Clearstream', 'cBroker': 'BrokerD'}
}
return clDict
|
[
"[email protected]"
] | |
5b7fde37c03368ce9cd12fd8b7638884e0891f63
|
987a68b9c196f39ba1810a2261cd4a08c35416a3
|
/DynamicProgramming/343-integer-break.py
|
5e3e2720b0fc1d758103f49e3a9a3171172eee0d
|
[] |
no_license
|
xizhang77/LeetCode
|
c26e4699fbe1f2d2c4706b2e5ee82131be066ee5
|
ce68f5af57f772185211f4e81952d0345a6d23cb
|
refs/heads/master
| 2021-06-05T15:33:22.318833 | 2019-11-19T06:53:24 | 2019-11-19T06:53:24 | 135,076,199 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 990 |
py
|
# -*- coding: utf-8 -*-
'''
Given a positive integer n, break it into the sum of at least two positive integers and maximize the product of those integers. Return the maximum product you can get.
Example 1:
Input: 2
Output: 1
Explanation: 2 = 1 + 1, 1 × 1 = 1.
Example 2:
Input: 10
Output: 36
Explanation: 10 = 3 + 3 + 4, 3 × 3 × 4 = 36.
Note: You may assume that n is not less than 2 and not larger than 58.
'''
# Solution 1: DP
# Time: O(n^2); Space: O(n)
class Solution(object):
def integerBreak(self, n):
"""
:type n: int
:rtype: int
"""
ans = [1, 1]
for num in range(3, n+2):
i, j = 0, len(ans) - 1
temp = num - 1
while i <= j:
temp = max( temp, ans[i]*ans[j], (i+1)*(j+1), (i+1)*ans[j], (j+1)*ans[i] )
i += 1
j -= 1
ans.append(temp)
return ans[n-1]
# Solution 2: Math & Recursive
# Time & Space: at most O(n)
|
[
"[email protected]"
] | |
092024187dc7bb38f56d29f6ee511ec6fdf2b1cc
|
69da803ffcba97b079bce6f8e3a73903b0a7e5b5
|
/jasmine_zinc/__init__.py
|
43c7edec784605cc93d009303383088623dfbd03
|
[
"MIT"
] |
permissive
|
aoirint/jasmine_zinc
|
612c3efcca199ef3e26af0a84af3fdf8744e6479
|
d70fbdbc0273015e73962be4239be0a7cbdd2bd4
|
refs/heads/main
| 2023-07-17T16:59:15.914680 | 2021-09-03T00:55:56 | 2021-09-03T00:55:56 | 402,569,583 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 488 |
py
|
__VERSION__ = '20210903.14'
from .Avatar import (
Avatar,
dict2avatar,
)
from .get_avatars import (
get_avatars,
)
from .Talk import (
Talk,
TalkEffects,
TalkEmotions,
talk2dict,
)
from .talk_on_server import (
talk_on_server,
TalkOnServerResponse,
)
from .record_talk import (
record_talk,
RecordTalkResponse,
)
from .add_talk_arguments import (
add_talk_arguments,
add_talk_arguments_effects,
add_talk_arguments_emotions,
)
|
[
"[email protected]"
] | |
eff8f8643d4b57a7e031e552085f7c818008c9cb
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/benchmark/startQiskit_QC1688.py
|
76f588bacee542205822753e644a580d1e04d3e7
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,768 |
py
|
# qubit number=5
# total number=59
import cirq
import qiskit
from qiskit import IBMQ
from qiskit.providers.ibmq import least_busy
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2,floor, sqrt, pi
import numpy as np
import networkx as nx
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f^\pm
# NOTE: use U1 gate (P gate) with \lambda = 180 ==> CZ gate
# or multi_control_Z_gate (issue #127)
controls = QuantumRegister(n, "ofc")
oracle = QuantumCircuit(controls, name="Zf")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.h(controls[n])
if n >= 2:
oracle.mcu1(pi, controls[1:], controls[0])
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[0]) # number=3
prog.h(input_qubit[1]) # number=4
prog.h(input_qubit[2]) # number=5
prog.h(input_qubit[3]) # number=6
prog.h(input_qubit[4]) # number=21
prog.h(input_qubit[0]) # number=43
prog.cz(input_qubit[4],input_qubit[0]) # number=44
prog.h(input_qubit[0]) # number=45
prog.cx(input_qubit[4],input_qubit[0]) # number=46
prog.z(input_qubit[4]) # number=47
prog.cx(input_qubit[4],input_qubit[0]) # number=48
prog.h(input_qubit[0]) # number=37
prog.cz(input_qubit[4],input_qubit[0]) # number=38
prog.h(input_qubit[0]) # number=39
Zf = build_oracle(n, f)
repeat = floor(sqrt(2 ** n) * pi / 4)
for i in range(repeat):
prog.append(Zf.to_gate(), [input_qubit[i] for i in range(n)])
prog.h(input_qubit[0]) # number=1
prog.rx(-1.0430087609918113,input_qubit[4]) # number=36
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=56
prog.cz(input_qubit[1],input_qubit[0]) # number=57
prog.h(input_qubit[0]) # number=58
prog.cx(input_qubit[1],input_qubit[0]) # number=52
prog.x(input_qubit[0]) # number=53
prog.cx(input_qubit[1],input_qubit[0]) # number=54
prog.h(input_qubit[0]) # number=49
prog.cz(input_qubit[1],input_qubit[0]) # number=50
prog.h(input_qubit[0]) # number=51
prog.x(input_qubit[1]) # number=10
prog.rx(-0.06597344572538572,input_qubit[3]) # number=27
prog.cx(input_qubit[0],input_qubit[2]) # number=22
prog.x(input_qubit[2]) # number=23
prog.h(input_qubit[2]) # number=28
prog.cz(input_qubit[0],input_qubit[2]) # number=29
prog.h(input_qubit[2]) # number=30
prog.x(input_qubit[3]) # number=12
if n>=2:
prog.mcu1(pi,input_qubit[1:],input_qubit[0])
prog.x(input_qubit[0]) # number=13
prog.x(input_qubit[1]) # number=14
prog.x(input_qubit[2]) # number=15
prog.x(input_qubit[3]) # number=16
prog.h(input_qubit[4]) # number=35
prog.h(input_qubit[0]) # number=17
prog.rx(2.4912829742967055,input_qubit[2]) # number=26
prog.h(input_qubit[1]) # number=18
prog.h(input_qubit[2]) # number=19
prog.h(input_qubit[2]) # number=55
prog.h(input_qubit[2]) # number=25
prog.h(input_qubit[3]) # number=20
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
key = "00000"
f = lambda rep: str(int(rep == key))
prog = make_circuit(5,f)
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = least_busy(provider.backends(filters=lambda x: x.configuration().n_qubits >= 2 and not x.configuration().simulator and x.status().operational == True))
sample_shot =7924
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_QC1688.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.depth(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
|
[
"[email protected]"
] | |
9ec9f95899aec565605da89f2d4e864571d320d9
|
d554b1aa8b70fddf81da8988b4aaa43788fede88
|
/5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/223/users/3113/codes/1679_1100.py
|
a55b3972ba6f87e44e9892302818bd9371515644
|
[] |
no_license
|
JosephLevinthal/Research-projects
|
a3bc3ca3b09faad16f5cce5949a2279cf14742ba
|
60d5fd6eb864a5181f4321e7a992812f3c2139f9
|
refs/heads/master
| 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 416 |
py
|
# Teste seu código aos poucos.
# Não teste tudo no final, pois fica mais difícil de identificar erros.
# Ao testar sua solução, não se limite ao caso de exemplo.
X=int(input(""))
print("Entrada:",X)
if(X==2):
Y="Tartaruga"
elif(X==5):
Y="Garca"
elif(X==10):
Y="Arara"
elif(X==20):
Y="Mico-leao-dourado"
elif(X==50):
Y="Onca-pintada"
elif(X==100):
Y="Garoupa"
else:
Y="Invalido"
print("Animal:",Y)
|
[
"[email protected]"
] | |
b9464a2e0ee57271ea62fdd620998eaf44debd29
|
10d98fecb882d4c84595364f715f4e8b8309a66f
|
/covid_epidemiology/src/models/shared/output_utils.py
|
774adb21cb78d9ee754464ef2a9691cbcce537a1
|
[
"CC-BY-4.0",
"Apache-2.0"
] |
permissive
|
afcarl/google-research
|
51c7b70d176c0d70a5ee31ea1d87590f3d6c6f42
|
320a49f768cea27200044c0d12f394aa6c795feb
|
refs/heads/master
| 2021-12-02T18:36:03.760434 | 2021-09-30T20:59:01 | 2021-09-30T21:07:02 | 156,725,548 | 1 | 0 |
Apache-2.0
| 2018-11-08T15:13:53 | 2018-11-08T15:13:52 | null |
UTF-8
|
Python
| false | false | 4,592 |
py
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for dealing with model output including Tensorboard."""
import json
import logging
import os
import random
import time
from dataclasses import dataclass
from google import api_core
from google.cloud import storage
from covid_epidemiology.src import constants
@dataclass
class TensorboardConfig:
"""Class for holding Tensorboard configuration information."""
mode: str
gcs_directory: str = 'unknown_experiment/unknown_run'
gcs_bucket_name: str = constants.TENSORBOARD_BUCKET
local_directory: str = constants.TENSORBOARD_LOCAL_DIR
log_iterations: int = constants.TENSORBOARD_LOG_ITERATIONS
# The profiler only avilable in TensorFlow >= 2.2
use_profiler: bool = True
profiler_start: int = 10
profiler_end: int = 15
def bucket(self, project=None):
client = storage.Client(project)
return client.bucket(self.gcs_bucket_name)
@property
def log_location(self):
return self.local_directory if self.mode == 'LOCAL' else self.gcs_url
@property
def enabled(self):
return self.mode != constants.TENSORBOARD_OFF
@property
def gcs_url(self):
return os.path.join(
'gs://',
self.gcs_bucket_name,
self.gcs_directory,
)
def upload_local_directory_to_gcs(local_directory, bucket,
gcs_path):
"""Uploads all files in a directory to GCS recursively.
Args:
local_directory: The base directory.
bucket: The GCS bucket object to upload the data
gcs_path: The base "directory" to upload the files to.
"""
for entry in os.scandir(local_directory,):
local_path = entry.path
local_subdir = local_path[1 + len(local_directory):]
if entry.is_file():
remote_path = os.path.join(gcs_path, local_subdir)
blob = bucket.blob(remote_path)
blob.upload_from_filename(local_path)
elif entry.is_dir():
upload_local_directory_to_gcs(local_path, bucket,
os.path.join(gcs_path, local_subdir))
def write_tensorboard_metadata(tensorboard_config,
output_filename):
"""Writes the metadata for tensorboard if it is enabled."""
if tensorboard_config.enabled:
if tensorboard_config.mode == 'LOCAL':
upload_local_directory_to_gcs(
tensorboard_config.local_directory,
tensorboard_config.bucket(),
tensorboard_config.gcs_directory,
)
# This only works with a GCS path. It does not work with a local path.
metadata = {
'outputs': [{
'type': 'tensorboard',
'source': tensorboard_config.gcs_url,
}]
}
with open(output_filename, 'w') as f:
json.dump(metadata, f)
def upload_string_to_gcs(
output_data,
output_bucket,
output_filename,
gcs_client = None,
num_retries = 5,
base_retry_delay = 4.0,
max_retry_delay = 120,
):
"""Uploads a string to GCS with retries.
Retries using exponential backoff with full jitter when a handled exception is
thrown.
Args:
output_data: The string to be uploaded.
output_bucket: The bucket where the string will be uploaded.
output_filename: The file's name.
gcs_client: The storage client for the project.
num_retries: The maximum number of retries
base_retry_delay: The maximum initial time to wait before retrying.
max_retry_delay: The maximum total time to wait before retrying.
"""
if gcs_client is None:
gcs_client = storage.Client(project=constants.PROJECT_ID_MODEL_TRAINING)
for retry in range(num_retries + 1):
try:
output_bucket = gcs_client.get_bucket(output_bucket)
output_blob = output_bucket.blob(output_filename)
output_blob.upload_from_string(output_data)
break
except api_core.exceptions.Forbidden as e:
if retry >= num_retries:
raise
logging.warning('Retrying GCS upload: %s', e)
time.sleep(
random.uniform(0, min(max_retry_delay, base_retry_delay * 2**retry)))
|
[
"[email protected]"
] | |
41c598c7131b453e6ff11fbb7c4216da4a3bad8b
|
73758dde83d1a1823c103e1a4ba71e7c95168f71
|
/nsd2008/py01/day05/login.py
|
41b2a5b3856835bdf645d86b39c1113f9812d3e2
|
[] |
no_license
|
tonggh220/md_5_nsd_notes
|
07ffdee7c23963a7a461f2a2340143b0e97bd9e1
|
a58a021ad4c7fbdf7df327424dc518f4044c5116
|
refs/heads/master
| 2023-07-02T01:34:38.798929 | 2021-05-12T08:48:40 | 2021-05-12T08:48:40 | 393,885,415 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,094 |
py
|
import getpass
userdb = {}
def register():
'用于新用户注册'
user = input("username: ").strip()
if user == "":
print("用户名不能为空")
elif user in userdb:
print("用户已存在")
else:
password = input("password: ")
userdb[user] = password
print("注册成功")
def login():
'用于登陆'
user = input("username: ").strip()
passwd = getpass.getpass("password: ")
# if (user in userdb) and (userdb[user] == passwd):
if userdb.get(user) == passwd:
print("登陆成功")
else:
print("登陆失败")
def show_menu():
'程序主体,实现代码逻辑'
funcs = {'0': register, '1': login}
prompt = """(0) 注册
(1) 登陆
(2) 退出
请选择(0/1/2): """
while 1:
choice = input(prompt).strip()
if choice not in ['0', '1', '2']:
print("无效的输入,请重试。")
continue
if choice == '2':
print('Bye-bye')
break
funcs[choice]()
if __name__ == '__main__':
show_menu()
|
[
"[email protected]"
] | |
597e3fae0245c167c66c0802ad2ab108945c5cad
|
18d55703fe368ddeb568833cecf4138d2dab2b5c
|
/faqs/migrations/0002_auto_20190227_1937.py
|
90b461793cb76881c19cd2530bedda3409bfd6d1
|
[] |
no_license
|
Rood17/CAWebApp
|
b660f5d0c01cb2c37f274b9d310922058150c2ec
|
de5f06c571782d68d8ca9afced8d7c70408ada99
|
refs/heads/master
| 2022-11-24T22:27:14.088809 | 2019-08-07T23:23:25 | 2019-08-07T23:23:25 | 188,956,728 | 0 | 0 | null | 2022-11-22T02:57:00 | 2019-05-28T04:58:37 |
HTML
|
UTF-8
|
Python
| false | false | 1,078 |
py
|
# Generated by Django 2.0.2 on 2019-02-28 01:37
import ckeditor.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('faqs', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='faqs',
fields=[
('id_faqs', models.AutoField(primary_key=True, serialize=False)),
('pregunta', ckeditor.fields.RichTextField(max_length=8000, verbose_name='Información')),
('respuesta', ckeditor.fields.RichTextField(max_length=8000, verbose_name='Información')),
('created', models.DateField(auto_now_add=True, verbose_name='Fecha de Creación')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Fecha de Edición')),
],
options={
'verbose_name': 'faq',
'verbose_name_plural': 'Faqs',
'ordering': ['-created'],
},
),
migrations.DeleteModel(
name='about',
),
]
|
[
"[email protected]"
] | |
c39130c949648637391d0534dc3556ac91430aaf
|
07bf4a6fc924dfc5198ab55c7185bf8f1b36865b
|
/django_crud/articles/migrations/0007_comment_question_question.py
|
2ba202d31838f01f57b7b79874fe93157ef9aa68
|
[] |
no_license
|
tesschung/django
|
c73d29a5cece83f852766ff66c635c82d3337f30
|
231a6131532757a66ac3dd038a2f2f1024faf14f
|
refs/heads/master
| 2020-07-04T14:15:57.246652 | 2020-02-12T09:23:23 | 2020-02-12T09:23:23 | 202,301,189 | 4 | 1 | null | 2019-12-05T00:37:33 | 2019-08-14T07:48:52 |
Python
|
UTF-8
|
Python
| false | false | 940 |
py
|
# Generated by Django 2.2.5 on 2019-09-25 08:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('articles', '0006_school_student'),
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Comment_question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment_question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comment_questions', to='articles.Question')),
],
),
]
|
[
"[email protected]"
] | |
839d9030156fe6510670b1c7827942fb927c1ebd
|
c47340ae6bcac6002961cc2c6d2fecb353c1e502
|
/controlm_py/models/variables.py
|
c26b170ca12b2bb0246d5a53137761c803f3be1b
|
[
"MIT"
] |
permissive
|
rafaeldelrey/controlm_py
|
6d9f56b8b6e72750f329d85b932ace6c41002cbd
|
ed1eb648d1d23e587321227217cbfcc5065535ab
|
refs/heads/main
| 2023-04-23T09:01:32.024725 | 2021-05-19T00:25:53 | 2021-05-19T00:25:53 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,352 |
py
|
# coding: utf-8
"""
Control-M Services
Provides access to BMC Control-M Services # noqa: E501
OpenAPI spec version: 9.20.115
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class Variables(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'variables': 'list[dict(str, str)]'
}
attribute_map = {
'variables': 'variables'
}
def __init__(self, variables=None): # noqa: E501
"""Variables - a model defined in Swagger""" # noqa: E501
self._variables = None
self.discriminator = None
if variables is not None:
self.variables = variables
@property
def variables(self):
"""Gets the variables of this Variables. # noqa: E501
Key value map where key is pool variables in format %%\\\\PoolName\\AUTOVarInPool. HIDDEN. # noqa: E501
:return: The variables of this Variables. # noqa: E501
:rtype: list[dict(str, str)]
"""
return self._variables
@variables.setter
def variables(self, variables):
"""Sets the variables of this Variables.
Key value map where key is pool variables in format %%\\\\PoolName\\AUTOVarInPool. HIDDEN. # noqa: E501
:param variables: The variables of this Variables. # noqa: E501
:type: list[dict(str, str)]
"""
self._variables = variables
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Variables, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Variables):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"[email protected]"
] | |
666971d8d10dd3a8574ea38e045f5d89f1b53419
|
fee12edf8fed01439d1801ce7aad9ac38d4ec079
|
/files-working2.py
|
9f79888120e60234fbb25c5a9fa5d2c4b4e9826a
|
[] |
no_license
|
frclasso/python_examples_three
|
18fd855f60644a52507fe3cffa3a4a3445435f1a
|
05c013aadd6d953283928f525aff051c6f04f5d6
|
refs/heads/master
| 2020-12-31T06:08:47.156170 | 2017-02-01T16:06:10 | 2017-02-01T16:06:10 | 80,636,736 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 425 |
py
|
#!/usr/bin/env python3
def main():
buffersize = 50000
infile = open('bigfile.txt', 'r') #segundo argumento define a ação, r = read, w = write, a = append
outfile = open('new.txt', 'w')
buffer = infile.read(buffersize)
while len(buffer):
outfile.write(buffer)
print ('.', end='')
buffer = infile.read(buffersize)
print()
print('Done!!')
if __name__=='__main__':main()
|
[
"[email protected]"
] | |
2f8164d7bafe20bd474ad14f2c94e8c354756b2c
|
86fcf88a4a4fe94871bc835580059d140e6d4ce5
|
/1_Prepare_Data/load_redis_q.py
|
294c30bc3dcdba48de6a41da263d5b640e190fc0
|
[
"MIT"
] |
permissive
|
BL-Labs/19Cbooks
|
576fc8eb40f7df5c8284fdb0ff425b7e5d74dddc
|
3166c7adad151d5e5194171cd5b0397bd910b669
|
refs/heads/master
| 2020-07-02T05:09:05.881257 | 2013-09-17T13:18:25 | 2013-09-17T13:18:25 | 12,895,637 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 473 |
py
|
import os, time
from redis import Redis
r = Redis()
root = "md"
q = "q"
LIMIT = 3000
PAUSE = 15
cur = 0.0
st = time.time()
for div in os.listdir(root):
for ident in os.listdir(os.path.join(root, div)):
cur += 1
r.lpush(q, ident)
if not cur % 100:
print("{0} added. {1}/s".format(cur, (time.time()-st) / float(cur) ))
if r.llen(q) > LIMIT:
print("Input Queue over {0}, pausing for {1}s".format(LIMIT, PAUSE))
time.sleep(PAUSE)
|
[
"[email protected]"
] | |
85a9e832946350559a5ea18d4d1eccba4f712d9c
|
6f3b1ed2385f484c39fccb47d9f75bc1179859aa
|
/src/main/resources/terraxld/organizations.py
|
b2ce7cdb9b1aaa6d6b67a54e97e3211c717b1a10
|
[
"MIT"
] |
permissive
|
xebialabs-community/xld-terraform-enterprise-plugin
|
f3302aa0648d4886147248fac4611497a4b508bd
|
d0c478c604fcaca483e68fc9d593b82ed7399eaa
|
refs/heads/master
| 2021-06-24T06:06:49.580042 | 2021-04-28T06:37:28 | 2021-04-28T06:37:28 | 222,692,769 | 1 | 1 | null | 2019-11-19T12:37:54 | 2019-11-19T12:37:53 | null |
UTF-8
|
Python
| false | false | 1,960 |
py
|
#
# Copyright 2020 XEBIALABS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
"""
Module for Terraform Enterprise API Endpoint: Organizations.
"""
import json
import requests
from .endpoint import TFEEndpoint
class TFEOrganizations(TFEEndpoint):
"""
The Organizations API is used to list, show, create, update, and destroy organizations.
https://www.terraform.io/docs/enterprise/api/organizations.html
"""
def __init__(self, base_url, organization, headers):
super(TFEOrganizations, self).__init__(base_url, organization, headers)
self._org_base_url = "{base_url}/organizations".format(base_url=base_url)
def lst(self):
"""
GET /organizations
"""
return self._ls(self._org_base_url)
def show(self, organization_name):
"""
GET /organizations/:organization_name
"""
url = "{0}/{1}".format(self._org_base_url, organization_name)
return self._show(url)
|
[
"[email protected]"
] | |
36f370bfc178c0aa306640b63a65da98ab663980
|
3c52eda991b4a37e2b807dd1e05f07139637c758
|
/examples/chat_sock_client.py
|
fa28e7decb30698e7e07a783163e3c8203bfc7ec
|
[
"Apache-2.0"
] |
permissive
|
pgiri/pycos
|
ebea05b045f15f505eff5cf175798c0cf2b4a1db
|
6594c311a02490ae0701fa741b508c335f305816
|
refs/heads/master
| 2022-12-25T21:53:15.091319 | 2022-12-18T17:27:05 | 2022-12-18T17:27:05 | 91,977,091 | 52 | 9 |
NOASSERTION
| 2020-02-19T01:47:09 | 2017-05-21T17:58:23 |
Python
|
UTF-8
|
Python
| false | false | 1,798 |
py
|
#!/usr/bin/env python
# run at least two instances of this program on either same node or multiple
# nodes on local network, along with 'chat_sock_server.py'; text typed in a
# client is sent to the all other clients. To use server on another computer,
# give network IP address as argument (and port as additional argument if
# necessary)
import sys, socket
import pycos
def client_recv(conn, task=None):
task.set_daemon()
while True:
line = yield conn.recv_msg()
if not line:
break
print(line.decode())
def client_send(conn, task=None):
task.set_daemon()
while True:
msg = yield task.recv()
yield conn.send_msg(msg)
if __name__ == '__main__':
# pycos.logger.setLevel(pycos.logger.DEBUG)
# optional arg 1 is host IP address and arg 2 is port to use
host, port = '', 3456
if len(sys.argv) > 1:
host = sys.argv[1]
if len(sys.argv) > 2:
port = int(sys.argv[2])
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((host, port))
sock = pycos.AsyncSocket(sock)
# same connection is used to receive messages in one task and to send
# messages in another task
pycos.Task(client_recv, sock)
sender = pycos.Task(client_send, sock)
if sys.version_info.major > 2:
read_input = input
else:
read_input = raw_input
while True:
try:
line = read_input().strip()
if line.lower() in ('quit', 'exit'):
break
if not line:
continue
except:
break
# use message passing to send message to sender which will use
# connection's asynchronous socket method to transmit it
sender.send(line.encode())
sock.close()
|
[
"[email protected]"
] | |
45dffbeb7c7a300afca0e26cb23b61451df816bf
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2474/60832/271152.py
|
83c2b7b872d226c351dce414d3b13e6efbb81199
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 260 |
py
|
All = int(input())
for q in range(0, All):
s=input()
stack=[]
ans=0
for c in s:
if c=='(':
stack.append(c)
elif c==')':
if len(stack)!=0:
ans+=2
stack.pop()
print(ans)
|
[
"[email protected]"
] | |
eac0725aee74355dd77a533cc1a5991aabb1eff4
|
9c90b9b3c831c57b55e0a545bf047d298118e4bb
|
/test/functional/mempool_limit.py
|
044db3a1f783578e02d47d7757b043de70f0ab09
|
[
"MIT"
] |
permissive
|
wolfoxonly/dk
|
c320958d778cb2831ae6b5f28798238c632218f6
|
090c9862a1a14c187eefcb8285e43601db5ed35b
|
refs/heads/master
| 2021-09-14T01:38:54.560917 | 2018-05-07T09:19:49 | 2018-05-07T09:19:49 | 125,704,169 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,514 |
py
|
#!/usr/bin/env python3
# Copyright (c) 2014-2017 The Dealtoken Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool limiting together/eviction with the wallet."""
from test_framework.test_framework import DealtokenTestFramework
from test_framework.util import *
class MempoolLimitTest(DealtokenTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-maxmempool=5", "-spendzeroconfchange=0"]]
def run_test(self):
txouts = gen_return_txouts()
relayfee = self.nodes[0].getnetworkinfo()['relayfee']
self.log.info('Check that mempoolminfee is minrelytxfee')
assert_equal(self.nodes[0].getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
assert_equal(self.nodes[0].getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
txids = []
utxos = create_confirmed_utxos(relayfee, self.nodes[0], 91)
self.log.info('Create a mempool tx that will be evicted')
us0 = utxos.pop()
inputs = [{ "txid" : us0["txid"], "vout" : us0["vout"]}]
outputs = {self.nodes[0].getnewaddress() : 0.0001}
tx = self.nodes[0].createrawtransaction(inputs, outputs)
self.nodes[0].settxfee(relayfee) # specifically fund this tx with low fee
txF = self.nodes[0].fundrawtransaction(tx)
self.nodes[0].settxfee(0) # return to automatic fee selection
txFS = self.nodes[0].signrawtransaction(txF['hex'])
txid = self.nodes[0].sendrawtransaction(txFS['hex'])
relayfee = self.nodes[0].getnetworkinfo()['relayfee']
base_fee = relayfee*100
for i in range (3):
txids.append([])
txids[i] = create_lots_of_big_transactions(self.nodes[0], txouts, utxos[30*i:30*i+30], 30, (i+1)*base_fee)
self.log.info('The tx should be evicted by now')
assert(txid not in self.nodes[0].getrawmempool())
txdata = self.nodes[0].gettransaction(txid)
assert(txdata['confirmations'] == 0) #confirmation should still be 0
self.log.info('Check that mempoolminfee is larger than minrelytxfee')
assert_equal(self.nodes[0].getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
assert_greater_than(self.nodes[0].getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
if __name__ == '__main__':
MempoolLimitTest().main()
|
[
"[email protected]"
] | |
2b2dce53205515424c5bb11c71552d4553094d37
|
3a298c93b67386392d3dee243671f2c101decf01
|
/leetcode/learn-cards/array-101/12_move_zeros.py
|
b550db407a56d2417d7e7300073945f2bd13d3af
|
[] |
no_license
|
Zahidsqldba07/coding-problems-2
|
ffbc8408e4408fc846c828af2ec50a9d72e799bc
|
020bffbd14ca9993f1e678181ee7df761f1533de
|
refs/heads/master
| 2023-06-26T11:05:34.089697 | 2021-07-21T15:16:10 | 2021-07-21T15:16:10 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 295 |
py
|
class Solution:
def moveZeroes(self, nums):
"""
Do not return anything, modify nums in-place instead.
"""
z = 0
for i in range(len(nums)):
if nums[i] != 0:
nums[i], nums[z] = nums[z], nums[i]
z += 1
|
[
"[email protected]"
] | |
070f9494314e7d8a7ce8283fe45bb2b13ae5e7d8
|
9f9f4280a02f451776ea08365a3f119448025c25
|
/plans/hsppw/lcut_hsp-s_070_pwde_mlpc_hs.py
|
b7b1a1ccf1a62ce508fefc6b8b40da3238c1b831
|
[
"BSD-2-Clause"
] |
permissive
|
dbis-uibk/hit-prediction-code
|
6b7effb2313d2499f49b2b14dd95ae7545299291
|
c95be2cdedfcd5d5c27d0186f4c801d9be475389
|
refs/heads/master
| 2023-02-04T16:07:24.118915 | 2022-09-22T12:49:50 | 2022-09-22T12:49:50 | 226,829,436 | 2 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,159 |
py
|
"""Plan using all features."""
import os.path
from dbispipeline.evaluators import CvEpochEvaluator
from sklearn.neural_network import MLPClassifier
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import MinMaxScaler
import hit_prediction_code.common as common
from hit_prediction_code.dataloaders import ClassLoaderWrapper
from hit_prediction_code.dataloaders import CutLoaderWrapper
from hit_prediction_code.dataloaders import EssentiaLoader
import hit_prediction_code.evaluations as evaluations
from hit_prediction_code.models.pairwise import PairwiseOrdinalModel
from hit_prediction_code.result_handlers import print_results_as_json
from hit_prediction_code.transformers.label import compute_hit_score_on_df
PATH_PREFIX = 'data/hit_song_prediction_msd_bb_lfm_ab/processed'
number_of_classes = 70
dataloader = ClassLoaderWrapper(
wrapped_loader=CutLoaderWrapper(
wrapped_loader=EssentiaLoader(
dataset_path=os.path.join(
PATH_PREFIX,
'hsp-s_acousticbrainz.parquet',
),
features=[
*common.all_no_year_list(),
],
label='yang_hit_score',
nan_value=0,
data_modifier=lambda df: compute_hit_score_on_df(
df,
pc_column='lastfm_playcount',
lc_column='lastfm_listener_count',
hit_score_column='yang_hit_score',
),
),
number_of_bins=number_of_classes,
),
labels=list(range(number_of_classes)),
)
pipeline = Pipeline([
('scale', MinMaxScaler()),
('model',
PairwiseOrdinalModel(
wrapped_model=MLPClassifier(
hidden_layer_sizes=(256, 128, 128, 128, 64),
verbose=True,
),
pairs_factor=3.,
threshold_type='average',
pair_strategy='random',
pair_encoding='delta',
threshold_sample_training=False,
)),
])
evaluator = CvEpochEvaluator(
cv=evaluations.cv(),
scoring=evaluations.metrics.ordinal_classifier_scoring(),
scoring_step_size=1,
)
result_handlers = [
print_results_as_json,
]
|
[
"[email protected]"
] | |
010044719defff9a149b002bb54cdbca81295588
|
929f00c386b8686e1c802aa622875c62d295e216
|
/spikeforest/forestview/recording_views/testplotlyview.py
|
61f739cfd85644eeaf32c4bad8d6f58d58d8258e
|
[
"Apache-2.0"
] |
permissive
|
mhhennig/spikeforest
|
e0d6cbb47d15131e683545c1978abc6f99c51dc5
|
5b4507ead724af3de0be5d48a3b23aaedb0be170
|
refs/heads/master
| 2020-05-31T11:03:58.438693 | 2019-06-04T18:06:37 | 2019-06-04T18:06:37 | 190,254,208 | 0 | 0 |
Apache-2.0
| 2019-06-04T18:05:28 | 2019-06-04T18:05:28 | null |
UTF-8
|
Python
| false | false | 2,824 |
py
|
import vdomr as vd
import time
import multiprocessing
import sys
from .stdoutsender import StdoutSender
import mtlogging
import numpy as np
class TestPlotlyView(vd.Component):
def __init__(self, context):
vd.Component.__init__(self)
self._context = context
self._size = (100, 100)
self._test_plotly_widget = None
self._connection_to_init, connection_to_parent = multiprocessing.Pipe()
self._init_process = multiprocessing.Process(target=_initialize, args=(context, connection_to_parent))
self._init_process.start()
self._init_log_text = ''
vd.set_timeout(self._check_init, 0.5)
def _on_init_completed(self, init):
self._test_plotly_widget = TestPlotlyWidget()
self._test_plotly_widget.setSize(self._size)
self.refresh()
def setSize(self, size):
self._size = size
if self._test_plotly_widget:
self._test_plotly_widget.setSize(size)
def size(self):
return self._size
def tabLabel(self):
return 'Test plotly'
def render(self):
if self._test_plotly_widget:
return vd.div(
self._test_plotly_widget
)
else:
return vd.div(
vd.h3('Initializing...'),
vd.pre(self._init_log_text)
)
def _check_init(self):
if not self._test_plotly_widget:
if self._connection_to_init.poll():
msg = self._connection_to_init.recv()
if msg['name'] == 'log':
self._init_log_text = self._init_log_text + msg['text']
self.refresh()
elif msg['name'] == 'result':
self._on_init_completed(msg['result'])
return
vd.set_timeout(self._check_init, 1)
class TestPlotlyWidget(vd.Component):
def __init__(self):
vd.Component.__init__(self)
self._size = (100, 100)
self._plot = None
self._update_plot()
def setSize(self, size):
self._size = size
self._update_plot()
def _update_plot(self):
xx = np.linspace(0, 1, 10)
yy = np.cos((10 * xx)**2)
self._plot = vd.components.PlotlyPlot(
data=dict(x=xx, y=yy),
layout=dict(margin=dict(t=5)),
config=dict(),
size=self._size
)
self.refresh()
def render(self):
if not self._plot:
return vd.div('no plot.')
return self._plot
# Initialization in a worker thread
mtlogging.log(root=True)
def _initialize(context, connection_to_parent):
with StdoutSender(connection=connection_to_parent):
pass
connection_to_parent.send(dict(
name='result',
result=dict()
))
|
[
"[email protected]"
] | |
9a179c09c2ccd31e9d0d55efe8784ca707ccebf0
|
2efa640e2c089a601a3c748d5ec4c80d65cb9695
|
/src/ploomber/dag/dagclients.py
|
45a7f88f2b788e3e2e5c68606b8dd5f15f4a8368
|
[
"Apache-2.0"
] |
permissive
|
BigRLab/ploomber
|
19d35345cc8548b79f73f026674186969f1c3d4e
|
e2732be116507128ec900e4ef6195f529f639358
|
refs/heads/master
| 2023-05-08T03:37:11.384226 | 2021-05-31T20:57:37 | 2021-05-31T20:57:37 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,054 |
py
|
from inspect import isclass
from collections.abc import MutableMapping
from ploomber.tasks.abc import Task
from ploomber.products.Product import Product
from ploomber.validators.string import get_suggestion, str_to_class
class DAGClients(MutableMapping):
"""
A dict-like object with validations
1. __setitem__, __getitem__ work with strings (e.g., clients['SQLScript'])
2. __setitem__ validates the key is a Task or Product subclass
"""
def __init__(self, mapping=None):
self._mapping = mapping or dict()
def __getitem__(self, key):
if isinstance(key, str):
key_obj = str_to_class(key)
else:
key_obj = key
if key_obj is None:
error = repr(key)
suggestion = get_suggestion(key)
if suggestion and str_to_class(suggestion) in self:
error += f'. Did you mean {suggestion!r}?'
raise KeyError(error)
return self._mapping[key_obj]
def __setitem__(self, key, value):
if isinstance(key, str):
key_obj = str_to_class(key)
if key_obj is None:
maybe = get_suggestion(key)
msg = (f'Could not set DAG-level client {value!r}. '
f'{key!r} is not a valid Task or '
'Product class name')
if maybe:
msg += f'. Did you mean {maybe!r}?'
raise ValueError(msg)
else:
key_obj = key
if not isclass(key_obj) or not issubclass(key_obj, (Task, Product)):
raise ValueError('DAG client keys must be Tasks '
f'or Products, value {key_obj!r} is not')
self._mapping[key_obj] = value
def __delitem__(self, key):
del self._mapping[key]
def __iter__(self):
for item in self._mapping:
yield item
def __len__(self):
return len(self._mapping)
def __repr__(self):
return f'{type(self).__name__}({self._mapping!r})'
|
[
"[email protected]"
] | |
0c08520810a73883b54bd1055179f58e7e018a84
|
cc873161235502933845cfdaa7b2bfd9006b70c8
|
/week7/coffeehouse/menu_api/migrations/0003_special_created_by.py
|
5eba7622b1e13064f614216ac101479e0582a734
|
[] |
no_license
|
taddeimania/class_notes
|
d8a7f72ac9abf927768072a253effd35e521fb6d
|
1cb321782caf9d823eee69fa43175cf31fd6b34f
|
refs/heads/master
| 2020-04-10T03:55:44.311163 | 2016-11-08T14:22:41 | 2016-11-08T14:22:41 | 68,213,614 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 695 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-27 14:02
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('menu_api', '0002_auto_20161026_1447'),
]
operations = [
migrations.AddField(
model_name='special',
name='created_by',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
preserve_default=False,
),
]
|
[
"[email protected]"
] | |
713c2b5154472452b4241477e6f47c0611a1fe82
|
4d5e6e0a7057123ddd7cb97027e667117e1be143
|
/control/type_casting_v2.py
|
f0aefebd5cad5c7423fdfb73587172c2743d6a1d
|
[] |
no_license
|
shubhomedia/Learn_Python
|
cee48990c04521fcbb7dbf5ad120c69170dcd1be
|
01e0a8e3dc2de87b09c963e7cb9fc5e246831ddb
|
refs/heads/master
| 2021-07-01T08:53:51.151326 | 2021-01-02T17:31:36 | 2021-01-02T17:31:36 | 204,191,119 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 142 |
py
|
x = str("s1") # x will be 's1'
y = str(2) # y will be '2'
z = str(3.0) # z will be '3.0'
print(x,y,z)
# all print type will be string type
|
[
"[email protected]"
] | |
e5d01453be61f2b329f66383a47ae1bd9104c98e
|
288a00d2ab34cba6c389b8c2444455aee55a8a95
|
/tests/data23/recipe-576938.py
|
252a8acccd4b737098c0a74b541a95691ad026fb
|
[
"BSD-2-Clause"
] |
permissive
|
JohannesBuchner/pystrict3
|
ffd77b7bbc378bd4d8f21b5c6bd69a0d64a52ddb
|
18b0dd369082422f9bf0f89c72e7acb53a49849c
|
refs/heads/master
| 2023-08-14T06:37:37.954880 | 2023-07-13T11:16:38 | 2023-07-13T11:16:38 | 268,571,175 | 1 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,153 |
py
|
# -*- coding: iso-8859-1 -*-
# laplace.py with mpmath
# appropriate for high precision
# Talbot suggested that the Bromwich line be deformed into a contour that begins
# and ends in the left half plane, i.e., z \to \infty at both ends.
# Due to the exponential factor the integrand decays rapidly
# on such a contour. In such situations the trapezoidal rule converge
# extraordinarily rapidly.
# For example here we compute the inverse transform of F(s) = 1/(s+1) at t = 1
#
# >>> error = Talbot(1,24)-exp(-1)
# >>> error
# (3.3306690738754696e-015+0j)
#
# Talbot method is very powerful here we see an error of 3.3e-015
# with only 24 function evaluations
#
# Created by Fernando Damian Nieuwveldt
# email:[email protected]
# Date : 25 October 2009
#
# Adapted to mpmath and classes by Dieter Kadelka
# email: [email protected]
# Date : 27 October 2009
#
# Reference
# L.N.Trefethen, J.A.C.Weideman, and T.Schmelzer. Talbot quadratures
# and rational approximations. BIT. Numerical Mathematics,
# 46(3):653 670, 2006.
from mpmath import mpf,mpc,pi,sin,tan,exp
# testfunction: Laplace-transform of exp(-t)
def F(s):
return 1.0/(s+1.0)
class Talbot(object):
def __init__(self,F=F,shift=0.0):
self.F = F
# test = Talbot() or test = Talbot(F) initializes with testfunction F
self.shift = shift
# Shift contour to the right in case there is a pole on the
# positive real axis :
# Note the contour will not be optimal since it was originally devoloped
# for function with singularities on the negative real axis For example
# take F(s) = 1/(s-1), it has a pole at s = 1, the contour needs to be
# shifted with one unit, i.e shift = 1.
# But in the test example no shifting is necessary
self.N = 24
# with double precision this constant N seems to best for the testfunction
# given. For N = 22 or N = 26 the error is larger (for this special
# testfunction).
# With laplace.py:
# >>> test.N = 500
# >>> print test(1) - exp(-1)
# >>> -2.10032517928e+21
# Huge (rounding?) error!
# with mp_laplace.py
# >>> mp.dps = 100
# >>> test.N = 500
# >>> print test(1) - exp(-1)
# >>> -5.098571435907316903360293189717305540117774982775731009465612344056911792735539092934425236391407436e-64
def __call__(self,t):
if t == 0:
print("ERROR: Inverse transform can not be calculated for t=0")
return ("Error");
# Initiate the stepsize
h = 2*pi/self.N
ans = 0.0
# parameters from
# T. Schmelzer, L.N. Trefethen, SIAM J. Numer. Anal. 45 (2007) 558-571
c1 = mpf('0.5017')
c2 = mpf('0.6407')
c3 = mpf('0.6122')
c4 = mpc('0','0.2645')
# The for loop is evaluating the Laplace inversion at each point theta i
# which is based on the trapezoidal rule
for k in range(self.N):
theta = -pi + (k+0.5)*h
z = self.shift + self.N/t*(c1*theta/tan(c2*theta) - c3 + c4*theta)
dz = self.N/t * (-c1*c2*theta/sin(c2*theta)**2 + c1/tan(c2*theta)+c4)
ans += exp(z*t)*self.F(z)*dz
return ((h/(2j*pi))*ans).real
|
[
"[email protected]"
] | |
5f6f1d4d9488f159cbe77963ab23c55884831ffc
|
181af10fcf40b824fe92d3b8f72fd15d6d1490c2
|
/Contests/101-200/week 200/1536. Minimum Swaps to Arrange a Binary Grid/Minimum Swaps to Arrange a Binary Grid.py
|
3945b9170c8ea867c0294760570f9df5e6239462
|
[] |
no_license
|
wangyendt/LeetCode
|
402c59a0b7b7f5b3a672231ea5dad8056ade36af
|
4a3ba15284c45b2d8bf38306c8c8526ae174615c
|
refs/heads/master
| 2023-08-10T06:27:54.995152 | 2023-08-10T02:22:27 | 2023-08-10T02:22:27 | 176,651,399 | 6 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 919 |
py
|
#!/usr/bin/env python
# encoding: utf-8
"""
@author: Wayne
@contact: [email protected]
@software: PyCharm
@file: Minimum Swaps to Arrange a Binary Grid
@time: 2020/08/03 04:39
"""
class Solution:
def minSwaps(self, A: list(list())) -> int:
m, n = len(A), len(A[0])
res = [0] * m
for i in range(m):
for j in range(n):
if not A[i][~j]:
res[i] += 1
else:
break
ret = 0
for i, r in enumerate(res):
target = m - 1 - i
if res[i] >= target: continue
for j in range(i + 1, m):
if res[j] >= target:
ret += j - i
res[i + 1:j + 1] = res[i:j]
break
else:
return -1
return ret
so = Solution()
print(so.minSwaps([[0, 0, 1], [1, 1, 0], [1, 0, 0]]))
|
[
"[email protected]"
] | |
aea2948697eef4b3cd89e905116f4a3832e63170
|
38ac429d63369922e12e19cdda042b08b8123027
|
/test/test_json_view.py
|
c5b01994dc9e1d97795a7c689c0a2b5dd2bb5dcb
|
[] |
no_license
|
aviv-julienjehannet/collibra_apiclient
|
0dfebe5df2eb929645b87eba42fab4c06ff0a6be
|
10a89e7acaf56ab8c7417698cd12616107706b6b
|
refs/heads/master
| 2021-09-12T16:52:19.803624 | 2018-04-19T01:35:20 | 2018-04-19T01:35:20 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 911 |
py
|
# coding: utf-8
"""
\"Data Governance Center: REST API v2\"
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.json_view import JsonView # noqa: E501
from swagger_client.rest import ApiException
class TestJsonView(unittest.TestCase):
"""JsonView unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testJsonView(self):
"""Test JsonView"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.json_view.JsonView() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
3b8d5fa6b4cced71be6df8eb6d0a7e4f9cbb5ac9
|
ed1dd7bc3837cf4059a529d71f43b53d7c6a65d8
|
/RosieGUI.py
|
cb83e9b57f6ff93bdfdee5437d9ca7db7b2e8604
|
[] |
no_license
|
amininger/rosiethor
|
dbc290e8684e2b1a73962af0fb84ad6c65956f1e
|
789396f08e10d6e46a684622cd95e7d309d9a246
|
refs/heads/master
| 2021-04-28T14:25:19.807467 | 2019-03-05T16:49:21 | 2019-03-05T16:49:21 | 121,964,339 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,356 |
py
|
from tkinter import *
import tkinter.font
import sys
from rosiethor import *
class RosieGUI(Frame):
def create_widgets(self):
self.grid(row=0, column=0, sticky=N+S+E+W)
self.columnconfigure(0, weight=3, minsize=600)
self.columnconfigure(1, weight=1, minsize=400)
self.columnconfigure(2, weight=1, minsize=100)
self.rowconfigure(0, weight=10, minsize=400)
self.rowconfigure(1, weight=1, minsize=50)
self.messages_list = Listbox(self, font=("Times", "12"))
self.scrollbar = Scrollbar(self.messages_list)
self.messages_list.config(yscrollcommand=self.scrollbar.set)
self.scrollbar.config(command=self.messages_list.yview)
self.messages_list.grid(row=0, column=0, sticky=N+S+E+W)
self.scrollbar.pack(side=RIGHT, fill=Y)
self.script_frame = Frame(self)
self.script_frame.grid(row=0, column=1, sticky=N+S+E+W)
self.chat_entry = Entry(self, font=("Times", "16"))
self.chat_entry.bind('<Return>', lambda key: self.on_submit_click())
self.chat_entry.bind('<Up>', lambda key: self.scroll_history(-1))
self.chat_entry.bind('<Down>', lambda key: self.scroll_history(1))
self.chat_entry.grid(row=1, column=0, sticky=N+S+E+W)
self.submit_button = Button(self, text="Send", font=("Times", "24"))
self.submit_button["command"] = self.on_submit_click
self.submit_button.grid(row=1, column=1, sticky=N+S+E+W)
self.run_button = Button(self, text="Run", font=("Times", "24"))
self.run_button["command"] = self.on_run_click
self.run_button.grid(row=1, column=2, sticky=N+S+E+W)
def init_soar_agent(self, config_file):
self.agent = RosieThorAgent(self.sim, config_filename=config_file)
self.agent.connectors["language"].register_message_callback(self.receive_message)
self.agent.connect()
self.sim.start(self.agent.scene)
def create_script_buttons(self):
self.script = []
if self.agent.messages_file != None:
with open(self.agent.messages_file, 'r') as f:
self.script = [ line.rstrip('\n') for line in f.readlines() if len(line.rstrip('\n')) > 0 and line[0] != '#']
row = 0
for message in self.script:
button = Button(self.script_frame, text=message[:30], font=("Times", "16"))
button["command"] = lambda message=message: self.send_message(message)
button.grid(row=row, column=0, sticky=N+S+E+W)
row += 1
def send_message(self, message):
self.messages_list.insert(END, message)
self.chat_entry.delete(0, END)
if len(self.message_history) == 0 or self.message_history[-1] != message:
self.message_history.append(message)
self.history_index = len(self.message_history)
self.agent.connectors["language"].send_message(message)
def receive_message(self, message):
self.messages_list.insert(END, message)
def on_submit_click(self):
self.send_message(self.chat_entry.get())
def on_run_click(self):
self.agent.start()
def scroll_history(self, delta):
if self.history_index == 0 and delta == -1:
return
if self.history_index == len(self.message_history) and delta == 1:
return
self.history_index += delta
self.chat_entry.delete(0, END)
if self.history_index < len(self.message_history):
self.chat_entry.insert(END, self.message_history[self.history_index])
def on_exit(self):
self.agent.kill()
root.destroy()
def __init__(self, rosie_config, master=None):
Frame.__init__(self, master, width=800, height=600)
master.columnconfigure(0, weight=1)
master.rowconfigure(0, weight=1)
self.message_history = []
self.history_index = 0
self.create_widgets()
self.sim = Ai2ThorSimulator()
self.init_soar_agent(rosie_config)
self.create_script_buttons()
controller_gui = ControllerGUI(self.sim, master=self)
if len(sys.argv) == 1:
print("Need to specify rosie config file as argument")
else:
root = Tk()
rosie_gui = RosieGUI(sys.argv[1], master=root)
root.protocol("WM_DELETE_WINDOW", rosie_gui.on_exit)
root.mainloop()
|
[
"[email protected]"
] | |
4116173f3381c4d0ec24d7a2542a504531fa2eb0
|
60a831fb3c92a9d2a2b52ff7f5a0f665d4692a24
|
/IronPythonStubs/release/stubs.min/System/__init___parts/EntryPointNotFoundException.py
|
cd35b92ca551fc01347a5e98978af60cbbbfdd4f
|
[
"MIT"
] |
permissive
|
shnlmn/Rhino-Grasshopper-Scripts
|
a9411098c5d1bbc55feb782def565d535b27b709
|
0e43c3c1d09fb12cdbd86a3c4e2ba49982e0f823
|
refs/heads/master
| 2020-04-10T18:59:43.518140 | 2020-04-08T02:49:07 | 2020-04-08T02:49:07 | 161,219,695 | 11 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,221 |
py
|
class EntryPointNotFoundException(TypeLoadException,ISerializable,_Exception):
"""
The exception that is thrown when an attempt to load a class fails due to the absence of an entry method.
EntryPointNotFoundException()
EntryPointNotFoundException(message: str)
EntryPointNotFoundException(message: str,inner: Exception)
"""
def add_SerializeObjectState(self,*args):
""" add_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def remove_SerializeObjectState(self,*args):
""" remove_SerializeObjectState(self: Exception,value: EventHandler[SafeSerializationEventArgs]) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,message=None,inner=None):
"""
__new__(cls: type)
__new__(cls: type,message: str)
__new__(cls: type,message: str,inner: Exception)
__new__(cls: type,info: SerializationInfo,context: StreamingContext)
"""
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
|
[
"[email protected]"
] | |
44b42dfbde5aabbad49f01d0c40eae805b3bd01f
|
0d61f90e3a7877e91d72fed71b0895c7070dc046
|
/final_project/.history/project/account_app/forms_20210104104511.py
|
4936c5cd23264028d4004e9275b0dd27cb819201
|
[] |
no_license
|
lienusrob/final_project
|
44d7d90dc0b7efc0cf55501549a5af0110d09b3b
|
4164769626813f044ec2af3e7842514b5699ef77
|
refs/heads/master
| 2023-02-10T16:36:33.439215 | 2021-01-05T09:34:01 | 2021-01-05T09:34:01 | 325,002,104 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 603 |
py
|
from django.forms import ModelForm, widgets
from django.forms import Textarea
from .models import Contact, AnonymousReview
from django import forms
# class ContactForm(forms.Form):
# subject = forms.CharField(max_length=100)
# message = forms.CharField(widget=forms.Textarea)
# sender = forms.EmailField()
# cc_myself = forms.BooleanField(required=False)
class ReviewsFrom(forms.ModelForm)
name = forms.CharField(max_length= 100)
details = forms.CharField(widget=forms.Textarea)
date = forms.DateTimeField(required=True, input_formats=["%Y-%m-%dT%H:%M"])
|
[
"[email protected]"
] | |
386494348a69dc42f26350743415cea70795bbb9
|
a03a7935a191d63bee76fd3b85a61ee27f98904a
|
/test/tests/databases/bov.py
|
d2400c2618fb6604ee069a960e95c77fb50876f3
|
[] |
no_license
|
cchriste/visit
|
57091c4a512ab87efd17c64c7494aa4cf01b7e53
|
c72c413f571e56b52fb7221955219f11f4ba19e3
|
refs/heads/master
| 2020-04-12T06:25:27.458132 | 2015-10-12T15:41:49 | 2015-10-12T15:41:49 | 10,111,791 | 5 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,436 |
py
|
# ----------------------------------------------------------------------------
# CLASSES: nightly
#
# Test Case: bov.py
#
# Tests: mesh - 3D rectilinear, multiple domain
# plots - Pseudocolor, Subset, Label, Contour
# operators - Slice
#
# Programmer: Brad Whitlock
# Date: Fri Mar 17 14:37:45 PST 2006
#
# Modifications:
# Brad Whitlock, Thu May 4 14:02:29 PST 2006
# Added testing of INT and DOUBLE BOV files.
#
# ----------------------------------------------------------------------------
def SaveTestImage(name):
# Save these images somewhat larger than a regular test case image
# since the images contain a lot of text.
backup = GetSaveWindowAttributes()
swa = SaveWindowAttributes()
swa.width = 500
swa.height = 500
swa.screenCapture = 0
Test(name, swa)
SetSaveWindowAttributes(backup)
def TestBOVDivide(prefix, db, doSubset):
# Take a picture to make sure that the division took. There will be
# a lot of bricks.
OpenDatabase(db)
if doSubset:
AddPlot("Subset", "bricks")
subAtts = SubsetAttributes()
subAtts.legendFlag = 0
SetPlotOptions(subAtts)
else:
AddPlot("Pseudocolor", "myvar")
DrawPlots()
v = View3DAttributes()
v.viewNormal = (0.534598, 0.40012, 0.744385)
v.focus = (15, 15, 15)
v.viewUp = (-0.228183, 0.916444, -0.32873)
v.viewAngle = 30
v.parallelScale = 8.66025
v.nearPlane = -17.3205
v.farPlane = 17.3205
v.imagePan = (0, 0)
v.imageZoom = 1
v.perspective = 1
v.eyeAngle = 2
v.centerOfRotationSet = 0
v.centerOfRotation = (15, 15, 15)
SetView3D(v)
Test(prefix + "00")
# Make sure there are the right number of zones.
Query("NumZones")
TestText(prefix + "01", GetQueryOutputString())
# Let's slice a few times to make sure that crucial areas have the
# right values
AddPlot("Mesh", "mesh")
AddPlot("Label", "myvar")
L = LabelAttributes()
L.textHeight1 = 0.03
L.textHeight2 = 0.03
SetPlotOptions(L)
SetActivePlots((0,1,2))
AddOperator("Slice")
s = SliceAttributes()
s.originType = s.Intercept # Point, Intercept, Percent, Zone, Node
s.originIntercept = 10.001
s.normal = (0, 0, 1)
s.axisType = s.ZAxis # XAxis, YAxis, ZAxis, Arbitrary
s.upAxis = (0, 1, 0)
s.project2d = 1
SetOperatorOptions(s)
DrawPlots()
v2 = GetView2D()
v2.windowCoords = (12.0201, 13.0004, 9.99781, 10.9888)
v2.viewportCoords = (0.2, 0.95, 0.15, 0.95)
v2.fullFrameActivationMode = v2.Auto # On, Off, Auto
v2.fullFrameAutoThreshold = 100
SetView2D(v2)
SaveTestImage(prefix+"02")
# Move to another slice on the far edge that will have the max zone #
s.originIntercept = 19.998
SetOperatorOptions(s)
v3 = View2DAttributes()
v3.windowCoords = (19.2017, 20.0179, 19.1966, 20.0217)
v3.viewportCoords = (0.2, 0.95, 0.15, 0.95)
v3.fullFrameActivationMode = v3.Auto # On, Off, Auto
v3.fullFrameAutoThreshold = 100
SetView2D(v3)
SaveTestImage(prefix+"03")
# Move to another slice in the middle.
s.originIntercept = 15.01
SetOperatorOptions(s)
v4 = View2DAttributes()
v4.windowCoords = (14.6419, 15.361, 15.638, 16.365)
v4.viewportCoords = (0.2, 0.95, 0.15, 0.95)
v4.fullFrameActivationMode = v4.Auto # On, Off, Auto
v4.fullFrameAutoThreshold = 100
SetView2D(v4)
SaveTestImage(prefix+"04")
DeleteAllPlots()
# Test that ghost zones are right.
AddPlot("Pseudocolor", "myvar")
p = PseudocolorAttributes()
p.SetOpacityType(p.Constant)
p.opacity = 0.25
SetPlotOptions(p)
DrawPlots()
v5 = View3DAttributes()
v5.viewNormal = (0.772475, 0.402431, 0.491255)
v5.focus = (15, 15, 15)
v5.viewUp = (-0.355911, 0.915018, -0.18992)
v5.viewAngle = 30
v5.parallelScale = 8.66025
v5.nearPlane = -17.3205
v5.farPlane = 17.3205
v5.imagePan = (-0.0253114, 0.0398304)
v5.imageZoom = 1.20806
v5.perspective = 1
v5.eyeAngle = 2
v5.centerOfRotationSet = 0
v5.centerOfRotation = (15, 15, 15)
SetView3D(v5)
Test(prefix+"05")
# Zoom in on a contour plot to make sure that there are no tears.
# This means that the ghost zones were created properly.
ClearWindow()
p.SetOpacityType(p.FullyOpaque)
SetPlotOptions(p)
AddOperator("Isosurface")
iso = IsosurfaceAttributes()
iso.variable = "radial"
SetOperatorOptions(iso)
DrawPlots()
v6 = View3DAttributes()
v6.viewNormal = (0.373168, 0.412282, 0.831125)
v6.focus = (15, 15, 15)
v6.viewUp = (-0.181836, 0.910964, -0.370244)
v6.viewAngle = 30
v6.parallelScale = 8.66025
v6.nearPlane = -17.3205
v6.farPlane = 17.3205
v6.imagePan = (0.0994254, 0.0810457)
v6.imageZoom = 1.94126
v6.perspective = 1
v6.eyeAngle = 2
v6.centerOfRotationSet = 0
v6.centerOfRotation = (15, 15, 15)
SetView3D(v6)
Test(prefix+"06")
DeleteAllPlots()
CloseDatabase(db)
def TestBOVType(bovtype, prefixes):
# Test the original BOV file without it being divided.
TestSection("Reading BOV file of %s" % bovtype)
TestBOVDivide(prefixes[0], data_path("bov_test_data/%s_indices.bov") % bovtype, 0)
#
# Test 2 BOV files that are being subdivided into smaller bricks
# by the BOV plugin so that there are multiple domains that
# can be processed in parallel.
#
TestSection("Decomposing BOV of %s into smaller bricks" % bovtype)
TestBOVDivide(prefixes[1], data_path("bov_test_data/%s_indices_div.bov") % bovtype, 1)
TestSection("Decomposing BOV of %s with small header into smaller bricks" % bovtype)
TestBOVDivide(prefixes[2], data_path("bov_test_data/%s_indices_div_with_header.bov") % bovtype, 1)
def main():
# Define some expressions
DefineScalarExpression("x", "coord(mesh)[0]")
DefineScalarExpression("y", "coord(mesh)[1]")
DefineScalarExpression("z", "coord(mesh)[2]")
DefineScalarExpression("dx", "x - 15.")
DefineScalarExpression("dy", "y - 15.")
DefineScalarExpression("dz", "z - 15.")
DefineScalarExpression("radial", "sqrt(dx*dx + dy*dy + dz*dz)")
TestBOVType("FLOAT", ("bov_0_", "bov_1_", "bov_2_"))
TestBOVType("DOUBLE", ("bov_3_", "bov_4_", "bov_5_"))
TestBOVType("INT", ("bov_6_", "bov_7_", "bov_8_"))
Exit()
main()
|
[
"bonnell@18c085ea-50e0-402c-830e-de6fd14e8384"
] |
bonnell@18c085ea-50e0-402c-830e-de6fd14e8384
|
44a9e486f57f5d09e2a7f03dcd8cab278f223b96
|
8f9f6a5348b832e9f12ef6baf6bcdd8842ff1c83
|
/core/migrations/0002_profile.py
|
987bf0b7af1dcd2c764ae64e1df702d401441278
|
[] |
no_license
|
jbrit/raffle
|
20b48d016ac50082733c7c34f3891beb268c4eb9
|
2ee83ffe564f59bc7afd6b12740ea3a98c42986e
|
refs/heads/main
| 2023-06-04T14:22:09.808595 | 2021-03-19T14:13:38 | 2021-03-19T14:13:38 | 343,774,020 | 1 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 702 |
py
|
# Generated by Django 3.1.7 on 2021-03-02 23:09
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email_confirmed', models.BooleanField(default=False)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"[email protected]"
] | |
f3197d14dbee34f7d0ebfe6c8268d9e4f61c5fde
|
00ccdc877771cb0cf493526d1e201e0f625bf5e7
|
/dohq_teamcity/api/test_api.py
|
1335616d3fe97e39870d49b309ae190556a049db
|
[
"MIT"
] |
permissive
|
expobrain/teamcity
|
a52928045166bb5d34f4a0396cb840bfee8f43d5
|
9f04c0692a2c5b277a608c2f11cc1fb48e0c87e2
|
refs/heads/master
| 2020-04-13T13:11:07.270515 | 2018-10-18T01:40:06 | 2018-10-18T01:40:06 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,708 |
py
|
# coding: utf-8
"""
TeamCity REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 10.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
from dohq_teamcity.custom.base_model import TeamCityObject
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from dohq_teamcity.models.test import Test # noqa: F401,E501
from dohq_teamcity.models.tests import Tests # noqa: F401,E501
class TestApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
base_name = 'Test'
def __init__(self, api_client=None):
self.api_client = api_client
def get_tests(self, **kwargs): # noqa: E501
"""get_tests # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tests(async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str locator:
:param str fields:
:return: Tests
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__get_tests_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.__get_tests_with_http_info(**kwargs) # noqa: E501
return data
def serve_instance(self, test_locator, **kwargs): # noqa: E501
"""serve_instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.serve_instance(test_locator, async_req=True)
>>> result = thread.get()
:param async_req: bool
:param str test_locator: (required)
:param str fields:
:return: Test
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.__serve_instance_with_http_info(test_locator, **kwargs) # noqa: E501
else:
(data) = self.__serve_instance_with_http_info(test_locator, **kwargs) # noqa: E501
return data
def __get_tests_with_http_info(self, **kwargs): # noqa: E501
"""get_tests # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__get_tests_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str locator:
:param str fields:
:return: Tests
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tests" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'locator' in params:
query_params.append(('locator', params['locator'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/tests', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tests', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def __serve_instance_with_http_info(self, test_locator, **kwargs): # noqa: E501
"""serve_instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.__serve_instance_with_http_info(test_locator, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str test_locator: (required)
:param str fields:
:return: Test
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['test_locator', 'fields'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method serve_instance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'test_locator' is set
if ('test_locator' not in params or
params['test_locator'] is None):
raise ValueError("Missing the required parameter `test_locator` when calling `serve_instance`") # noqa: E501
collection_formats = {}
path_params = {}
if 'test_locator' in params:
if isinstance(params['test_locator'], TeamCityObject):
path_params['testLocator'] = params['test_locator'].locator_id
else:
path_params['testLocator'] = params['test_locator'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/app/rest/tests/{testLocator}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Test', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
[
"[email protected]"
] | |
b15b14e0a3c393b327f48b7c2211d0d7ea88c5fa
|
cd2d3b6be41eb9b96ecc3a22dc730325c21f22e6
|
/charalog/log/qaswsq19.cgi
|
e5245d1c362c4cf17fe5f3a09188c1705fb8ddce
|
[] |
no_license
|
cappuu/TC
|
c61f235349e9a68d472fa85bbea1adbef3ea154a
|
def08d09219e11bee2135f6b796569b769ee21c1
|
refs/heads/master
| 2021-09-10T19:37:33.847161 | 2018-03-31T22:56:05 | 2018-03-31T22:56:05 | 124,523,296 | 0 | 0 | null | null | null | null |
UHC
|
Python
| false | false | 1,944 |
cgi
|
11월 : 남피의 기술을 <font color=red>+9</font> 개발했습니다.(15일23시38분)
10월 : 남피의 기술을 <font color=red>+7</font> 개발했습니다.(15일22시38분)
9월 : 남피의 기술을 <font color=red>+6</font> 개발했습니다.(15일21시38분)
8월 : 현재 기운이 충만한 상태입니다.(15일20시38분)
7월 : 남피의 기술을 <font color=red>+7</font> 개발했습니다.(15일19시39분)
7월 : 수확으로 <font color=red>2834</font>의 식량을 수확했습니다. [봉토추가봉록:34](15일19시39분)
7월 : [<font color=red>레벨업</font>] Lv.13이 되었다! 봉록이 <font color=red> 2950 </font>로 늘어났다!(15일19시39분)
7월 : [<font color=red>레벨업</font>] 무력이 1포인트 올랐습니다!(15일19시39분)
6월 : 남피의 기술을 <font color=red>+8</font> 개발했습니다.(15일18시38분)
5월 : 남피의 기술을 <font color=red>+9</font> 개발했습니다.(15일17시38분)
4월 : 남피의 기술을 <font color=red>+6</font> 개발했습니다.(15일16시38분)
3월 : 남피의 기술을 <font color=red>+6</font> 개발했습니다.(15일15시38분)
2월 : <font color=red>[상승] </font>:진등의 지력이 1포인트 올랐다.(15일14시40분)
2월 : 남피의 기술을 <font color=red>+8</font> 개발했습니다.(15일14시40분)
2월 : 기술치부대는 대장의 명령에 의해 남피성에 집결했습니다.(15일14시8분)
1월 : 북평의 기술을 <font color=red>+6</font> 개발했습니다.(15일13시40분)
1월 : 세금으로 <font color=red>3300</font>의 돈을 징수했습니다. [관직추가봉록:200] [봉토추가봉록:300](15일13시40분)
12월 : 북평의 기술을 <font color=red>+6</font> 개발했습니다.(15일12시39분)
11월 : 북평의 기술을 <font color=red>+9</font> 개발했습니다.(15일11시39분)
10월 : 북평의 기술을 <font color=red>+9</font> 개발했습니다.(15일10시38분)
|
[
"[email protected]"
] | |
8782c7c8bb56b0118c1f5cd84e030e48d23be1d5
|
6a0abe2f4172f680415d83f1946baaf85e5711b7
|
/aliyun-python-sdk-slb/aliyunsdkslb/request/v20140515/DescribeAccessControlListsRequest.py
|
d63e244ab470b758431d5842a789854ee82f0557
|
[
"Apache-2.0"
] |
permissive
|
brw123/aliyun-openapi-python-sdk
|
905556b268cbe4398f0f57b48422b713d9e89a51
|
8c77db6fd6503343cffa3c86fcb9d11770a64ca2
|
refs/heads/master
| 2020-05-01T16:26:49.291948 | 2019-03-21T09:11:55 | 2019-03-21T09:11:55 | 177,572,187 | 1 | 0 | null | 2019-03-25T11:21:59 | 2019-03-25T11:21:59 | null |
UTF-8
|
Python
| false | false | 2,788 |
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DescribeAccessControlListsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Slb', '2014-05-15', 'DescribeAccessControlLists','slb')
def get_access_key_id(self):
return self.get_query_params().get('access_key_id')
def set_access_key_id(self,access_key_id):
self.add_query_param('access_key_id',access_key_id)
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_AclName(self):
return self.get_query_params().get('AclName')
def set_AclName(self,AclName):
self.add_query_param('AclName',AclName)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_AddressIPVersion(self):
return self.get_query_params().get('AddressIPVersion')
def set_AddressIPVersion(self,AddressIPVersion):
self.add_query_param('AddressIPVersion',AddressIPVersion)
def get_PageNumber(self):
return self.get_query_params().get('PageNumber')
def set_PageNumber(self,PageNumber):
self.add_query_param('PageNumber',PageNumber)
def get_Tags(self):
return self.get_query_params().get('Tags')
def set_Tags(self,Tags):
self.add_query_param('Tags',Tags)
def get_PageSize(self):
return self.get_query_params().get('PageSize')
def set_PageSize(self,PageSize):
self.add_query_param('PageSize',PageSize)
|
[
"[email protected]"
] | |
660fb803c45459f81ec12dded0ca2bcdc1611bde
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_016/ch31_2020_03_30_19_43_47_008102.py
|
d7542220ac3217770604bf2f6d5102dfb4b58bc3
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 149 |
py
|
def eh_primo(x):
r = x%2
return r
exercicio = True
while exercicio:
if r == 0:
print('True')
else:
exercicio = False
|
[
"[email protected]"
] | |
cc14b6eaaffadd74381b2c758e2324c947a2db53
|
3a6cbe6940b657ac6b608ce93d8d41ffeb6b9e65
|
/rocon_python_comms/src/rocon_python_comms/subscriber_proxy.py
|
307b3a19e2da6b0f0d8015cf88ad17466399e082
|
[] |
no_license
|
robotics-in-concert/rocon_tools
|
cdfc4ccfc04b79262fb151640966a33bd0b5f498
|
1f182537b26e8622eefaf6737d3b3d18b1741ca6
|
refs/heads/devel
| 2021-01-17T01:58:12.163878 | 2018-02-06T15:20:29 | 2018-02-06T15:20:29 | 15,774,638 | 7 | 22 | null | 2017-08-16T06:39:47 | 2014-01-09T18:02:42 |
Python
|
UTF-8
|
Python
| false | false | 4,285 |
py
|
#
# License: BSD
# https://raw.github.com/robotics-in-concert/rocon_tools/license/LICENSE
#
##############################################################################
# Description
##############################################################################
"""
.. module:: subscriber_proxy
:platform: Unix
:synopsis: Request-response style communication with a latched publisher.
This module provides a means of interacting with a ros latched publisher
in the same style as you would a ros service (request-response).
----
"""
##############################################################################
# Imports
##############################################################################
import time
import rospy
import threading
##############################################################################
# Subscriber Proxy
##############################################################################
class SubscriberProxy():
'''
Works like a service proxy, but using a latched subscriber instead (regular
subscribers will also work, but this is especially useful for latched
subscribers since they typically always provide data).
If no timeout is specified when calling, it blocks indefinitely on a
100ms loop until a message arrives. Alternatively it will return with None
if a specified timeout is reached.
**Usage:**
.. code-block:: python
from rocon_python_comms import SubscriberProxy
try:
gateway_info = SubscriberProxy('gateway_info', gateway_msgs.GatewayInfo)(rospy.Duration(0.5))
if gateway_info is not None:
# do something
except rospy.exceptions.ROSInterruptException: # make sure to handle a Ros shutdown
# react something
:todo: upgrade to make use of python events instead of manual loops
'''
def __init__(self, topic, msg_type):
'''
:param str topic: the topic name to subscriber to
:param str msg_type: any ros message type (e.g. std_msgs/String)
'''
self._data = None
self._lock = threading.Lock()
self._subscriber = rospy.Subscriber(topic, msg_type, self._callback)
def __call__(self, timeout=None):
'''
Returns immediately with the latest data or blocks to a timeout/indefinitely
until the next data arrives.
:param rospy.Duration timeout: time to wait for data, polling at 10Hz (None = /infty)
:returns: msg type data or None
:rtype: same as the msg type specified in the arg or None
:returns: latest data or None
'''
if timeout is not None:
# everything in floating point calculations
timeout_time = time.time() + timeout.to_sec()
with self._lock:
data = self._data
while not rospy.is_shutdown() and data is None:
rospy.rostime.wallsleep(0.1)
if timeout is not None:
if time.time() > timeout_time:
return None
# check to see if there is new data
with self._lock:
data = self._data
return data
def wait_for_next(self, timeout=None):
'''
Makes sure any current data is cleared and waits for new data.
:param rospy.Duration timeout: time to wait for data, polling at 10Hz.
:returns: latest data or None
'''
self._data = None
return self.__call__(timeout)
def wait_for_publishers(self):
'''
Blocks until publishers are seen.
:raises: rospy.ROSInterruptException if we are in shutdown.
'''
r = rospy.Rate(10)
while not rospy.is_shutdown():
if self._subscriber.get_num_connections() != 0:
return
else:
r.sleep()
# we are shutting down
raise rospy.exceptions.ROSInterruptException
def _callback(self, data):
with self._lock:
self._data = data
def unregister(self):
'''
Unregister the subscriber so future instantiations of this class can pull a
fresh subscriber (important if the data is latched).
'''
self._subscriber.unregister()
|
[
"[email protected]"
] | |
816f7c9573750b418355b515fae6a322cac506a0
|
d842a95213e48e30139b9a8227fb7e757f834784
|
/gcloud/google-cloud-sdk/lib/surface/spanner/databases/create.py
|
85d2e132651f4d9072e81b0f73b363d8ae6a24bf
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
bopopescu/JobSniperRails
|
f37a15edb89f54916cc272884b36dcd83cdc868a
|
39e7f871887176770de0f4fc6789e9ddc7f32b1f
|
refs/heads/master
| 2022-11-22T18:12:37.972441 | 2019-09-20T22:43:14 | 2019-09-20T22:43:14 | 282,293,504 | 0 | 0 |
MIT
| 2020-07-24T18:47:35 | 2020-07-24T18:47:34 | null |
UTF-8
|
Python
| false | false | 2,348 |
py
|
# -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for spanner databases create."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.spanner import database_operations
from googlecloudsdk.api_lib.spanner import databases
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.spanner import flags
from googlecloudsdk.command_lib.spanner import resource_args
class Create(base.CreateCommand):
"""Create a Cloud Spanner database."""
@staticmethod
def Args(parser):
"""See base class."""
resource_args.AddDatabaseResourceArg(parser, 'to create')
flags.Ddl(help_text='Semi-colon separated DDL (data definition language) '
'statements to run inside the '
'newly created database. If there is an error in any statement, '
'the database is not created. Full DDL specification is at '
'https://cloud.google.com/spanner/docs/data-definition-language'
).AddToParser(parser)
base.ASYNC_FLAG.AddToParser(parser)
parser.display_info.AddCacheUpdater(flags.DatabaseCompleter)
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
Some value that we want to have printed later.
"""
database_ref = args.CONCEPTS.database.Parse()
instance_ref = database_ref.Parent()
op = databases.Create(instance_ref, args.database,
flags.SplitDdlIntoStatements(args.ddl or []))
if args.async:
return op
return database_operations.Await(op, 'Creating database')
|
[
"[email protected]"
] | |
47d4c7cb7eb9933b0369b1e45e59ea87c9b72b5f
|
f1d01d9074ace289e7e43530079b0c34ccdde4c4
|
/ontology/creation/data/patents/concatenate_features.py
|
5d008e8f60662834f64b614c5873f3ab50efcfec
|
[] |
no_license
|
techknowledgist/techknowledgist
|
4889300a92aad8fa940d1246ddd75036d90a6563
|
7d422ac38a9212670d0ce6e26e1446fb46740837
|
refs/heads/master
| 2021-04-26T22:35:50.390037 | 2015-11-23T19:52:26 | 2015-11-23T19:52:26 | 124,117,298 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 731 |
py
|
"""
Takes a corpus and concatenates all phr_feats files into a single file
Usage:
$ python concatenate_features.py CORPUS OUTFILE
TODO: this script, and others, should probably live in another directory
"""
import os, sys, codecs, glob, gzip
corpus = sys.argv[1]
outfile = sys.argv[2]
fh_out = codecs.open(outfile, 'w', encoding='utf-8')
feats_dir = os.path.join(corpus, 'data', 'd3_phr_feats', '01', 'files')
regexp = "%s/WoS.out.*/*.xml.gz" % feats_dir
fnames = glob.glob(regexp)
count = 0
for fname in fnames:
count += 1
print "%05d %s" % (count, fname)
gzipfile = gzip.open(fname, 'rb')
reader = codecs.getreader('utf-8')
fh = reader(gzipfile)
for line in fh:
fh_out.write(line)
|
[
"[email protected]"
] | |
1b82e86c123a177a8f2ea505f676028ecdf4d35f
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/otherforms/_caretakers.py
|
24865dfee38f564211700a1151c979c6df6fd6e9
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 234 |
py
|
#calss header
class _CARETAKERS():
def __init__(self,):
self.name = "CARETAKERS"
self.definitions = caretaker
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['caretaker']
|
[
"[email protected]"
] | |
10f3f0ca057e6383b33ac0929fc7f212d2521e61
|
b9e4bf5c00ac0d6c1a6e6038e8dc18041819ff99
|
/Python3/0224_Basic_Calculator.py
|
8dc3ecb6218b2b585509e0dca74cd886059e4f2d
|
[] |
no_license
|
kiranani/playground
|
98fdb70a3ca651436cc1eede0d2ba1b1ea9aba1d
|
12f62a218e827e6be2578b206dee9ce256da8d3d
|
refs/heads/master
| 2021-06-03T12:43:29.388589 | 2020-06-12T15:43:45 | 2020-06-12T15:43:45 | 149,614,802 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,125 |
py
|
from operator import add, sub
class Solution:
def calculate(self, s: str) -> int:
ops = {"+": add, "-": sub}
i, n = 0, len(s)
def helper(i):
values, opers = [0], []
while i < n:
c = s[i]
if c.isdigit():
j = i
while j + 1 < n and s[j + 1].isdigit():
j += 1
values.append(int(s[i:j + 1]))
i = j
elif c in ops:
while opers and len(values) > 1:
r = values.pop()
values.append(ops[opers.pop()](values.pop(), r))
opers.append(c)
elif c == "(":
i, value = helper(i + 1)
values.append(value)
elif c == ")":
break
i += 1
while opers and len(values) > 1:
r = values.pop()
values.append(ops[opers.pop()](values.pop(), r))
return i, values[-1]
return helper(0)[1]
|
[
"[email protected]"
] | |
4bd3fea9df0d339760410abcb7bc705831ee1022
|
b40e5c6c1787dd222702b3dc817537c059e3abf7
|
/interlink/migrations/0004_auto__add_field_incomingmail_owner.py
|
bca828735df8a4344e1f4d980a41e9d1b5389875
|
[
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] |
permissive
|
tevatia/nadine
|
2e1d4908cab37c2f893f68ce59d939b5621c14a0
|
e1db914ae70d3f7f3de719c8fb54b0e2198e2b56
|
refs/heads/master
| 2021-01-18T11:15:48.083446 | 2014-04-18T22:39:42 | 2014-04-18T22:39:42 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,572 |
py
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'IncomingMail.owner'
db.add_column('interlink_incomingmail', 'owner', self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['auth.User'], null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'IncomingMail.owner'
db.delete_column('interlink_incomingmail', 'owner_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'interlink.incomingmail': {
'Meta': {'object_name': 'IncomingMail'},
'body': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mailing_list': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'incoming_mails'", 'to': "orm['interlink.MailingList']"}),
'origin_address': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'sent_time': ('django.db.models.fields.DateTimeField', [], {}),
'state': ('django.db.models.fields.CharField', [], {'default': "'raw'", 'max_length': '10'}),
'subject': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'interlink.mailinglist': {
'Meta': {'object_name': 'MailingList'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_opt_out': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderators': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'moderated_mailing_lists'", 'blank': 'True', 'to': "orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'pop_host': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'pop_port': ('django.db.models.fields.IntegerField', [], {'default': '995'}),
'smtp_host': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'smtp_port': ('django.db.models.fields.IntegerField', [], {'default': '587'}),
'subject_prefix': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'subscribers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'subscribed_mailing_lists'", 'blank': 'True', 'to': "orm['auth.User']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '1024'})
},
'interlink.outgoingmail': {
'Meta': {'ordering': "['-created']", 'object_name': 'OutgoingMail'},
'attempts': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'body': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_attempt': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'mailing_list': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'outgoing_mails'", 'to': "orm['interlink.MailingList']"}),
'moderators_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'original_mail': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['interlink.IncomingMail']", 'blank': 'True'}),
'sent': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'subject': ('django.db.models.fields.TextField', [], {'blank': 'True'})
}
}
complete_apps = ['interlink']
|
[
"[email protected]"
] | |
b3c615e65519ecbd43687d83e8216c9f86283458
|
4bf7abfb2d02be6cce23828d0554ccb575b91c3a
|
/step02/bookings.py
|
bc2ee7c8312d9977f6c5975d40040a7a883b6119
|
[] |
no_license
|
OneScreenfulOfPython/booking-system
|
5c6471ec1e4c0103bd871d1e7a45a474a30f7e91
|
e4daf5d3eb6e8620acb164f35ad09cb9081612aa
|
refs/heads/master
| 2021-06-01T23:14:25.649607 | 2014-09-24T08:01:32 | 2014-09-24T08:01:32 | 24,347,319 | 10 | 14 | null | 2020-10-01T16:53:56 | 2014-09-22T22:04:53 |
Python
|
UTF-8
|
Python
| false | false | 3,003 |
py
|
#!python3
import os, sys
import sqlite3
#
# Ensure we're using the same database filename throughout.
# It doesn't matter what this is called or where it is:
# sqlite3 will just accept anything.
#
DATABASE_FILEPATH = "bookings.db"
def create_database():
"""Connect to the database, read the CREATE statements and split
them at the semicolon into individual statements. Once each
statement has been executed, close the connection.
"""
#
# Since we might be re-running this, delete the file and rebuild
# it if necessary.
#
if os.path.exists(DATABASE_FILEPATH):
os.remove(DATABASE_FILEPATH)
#
# A database cursor the the Python mechanism for running something
# against any database. You create a cursor and then .execute
# SQL statements through it.
#
db = sqlite3.connect(DATABASE_FILEPATH)
q = db.cursor()
#
# Read all the contents of create.sql in one gulp
#
sql = open("create.sql").read()
#
# Split it into individual statements, breaking on the semicolon
#
statements = sql.split(";")
#
# Execute each of the individual statements against the database
#
for statement in statements:
q.execute(statement)
#
# Close everything
#
q.close()
db.commit()
db.close()
def populate_database():
"""Populate the database with some valid test data
"""
db = sqlite3.connect(DATABASE_FILEPATH)
q = db.cursor()
sql = "INSERT INTO users(id, name, email_address) VALUES(?, ?, ?)"
q.execute(sql, [1, "Mickey Mouse", "[email protected]"])
q.execute(sql, [2, "Donald Duck", "[email protected]"])
q.execute(sql, [3, "Kermit the Frog", None])
sql = "INSERT INTO rooms(id, name, location) VALUES(?, ?, ?)"
q.execute(sql, [1, "Room A", "Next to the stairway"])
q.execute(sql, [2, "Room B", "On the Second Floor"])
q.execute(sql, [3, "Main Hall", None])
#
# Triple-quoted strings can cross lines
# NB the column order doesn't matter if you specify it
#
sql = """
INSERT INTO
bookings
(
room_id, user_id, booked_on, booked_from, booked_to
)
VALUES(
?, ?, ?, ?, ?
)"""
q.execute(sql, [1, 1, '2014-09-25', '09:00', '10:00']) # Room A (1) booked by Mickey (1) from 9am to 10am on 25th Sep 2014
q.execute(sql, [3, 1, '2015-09-25', None, None]) # Main Hall (3) booked by Mickey (1) from all day on 25th Sep 2014
q.execute(sql, [2, 3, '2014-09-22', '12:00', None]) # Room B (2) booked by Kermit (3) from midday onwards on 22nd Sep 2014
q.execute(sql, [1, 2, '2015-02-14', '09:30', '10:00']) # Room A (1) booked by Donald (2) from 9.30am to 10am on 15th Feb 2014
q.close()
db.commit()
db.close()
if __name__ == '__main__':
print("About to create database %s" % DATABASE_FILEPATH)
create_database()
print("About to populate database %s" % DATABASE_FILEPATH)
populate_database()
print("Finished")
|
[
"[email protected]"
] | |
6689bda17969ce7d5c74c76e31523e8c509ba831
|
ebd24e400986c57b4bb1b9578ebd8807a6db62e8
|
/InstaGrade-FormBuilder/xlsxwriter/test/worksheet/test_extract_filter_tokens.py
|
f42d6ade4765867a416adf7c334a45d6f2c4965c
|
[] |
no_license
|
nate-parrott/ig
|
6abed952bf32119a536a524422037ede9b431926
|
6e0b6ac0fb4b59846680567150ce69a620e7f15d
|
refs/heads/master
| 2021-01-12T10:15:15.825004 | 2016-12-13T21:23:17 | 2016-12-13T21:23:17 | 76,399,529 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,714 |
py
|
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2014, John McNamara, [email protected]
#
import unittest
from ...compatibility import StringIO
from ...worksheet import Worksheet
class TestExtractFilterTokens(unittest.TestCase):
"""
Test the Worksheet _extract_filter_tokens() method.
"""
def setUp(self):
self.fh = StringIO()
self.worksheet = Worksheet()
self.worksheet._set_filehandle(self.fh)
def test_extract_filter_tokens(self):
"""Test the _extract_filter_tokens() method"""
testcases = [
[
None,
[],
],
[
'',
[],
],
[
'0 < 2001',
['0', '<', '2001'],
],
[
'x < 2000',
['x', '<', '2000'],
],
[
'x > 2000',
['x', '>', '2000'],
],
[
'x == 2000',
['x', '==', '2000'],
],
[
'x > 2000 and x < 5000',
['x', '>', '2000', 'and', 'x', '<', '5000'],
],
[
'x = "goo"',
['x', '=', 'goo'],
],
[
'x = moo',
['x', '=', 'moo'],
],
[
'x = "foo baz"',
['x', '=', 'foo baz'],
],
[
'x = "moo "" bar"',
['x', '=', 'moo " bar'],
],
[
'x = "foo bar" or x = "bar foo"',
['x', '=', 'foo bar', 'or', 'x', '=', 'bar foo'],
],
[
'x = "foo "" bar" or x = "bar "" foo"',
['x', '=', 'foo " bar', 'or', 'x', '=', 'bar " foo'],
],
[
'x = """"""""',
['x', '=', '"""'],
],
[
'x = Blanks',
['x', '=', 'Blanks'],
],
[
'x = NonBlanks',
['x', '=', 'NonBlanks'],
],
[
'top 10 %',
['top', '10', '%'],
],
[
'top 10 items',
['top', '10', 'items'],
],
]
for testcase in testcases:
expression = testcase[0]
exp = testcase[1]
got = self.worksheet._extract_filter_tokens(expression)
self.assertEqual(got, exp)
|
[
"[email protected]"
] | |
186d6c74a11f3723f7afa9fee9fabb8293b12090
|
a7122df9b74c12a5ef23af3cd38550e03a23461d
|
/Elementary/Popular Words.py
|
b195f9aa52413e4b553b3d220df0730845fc443b
|
[] |
no_license
|
CompetitiveCode/py.checkIO.org
|
c41f6901c576c614c4c77ad5c4162448828c3902
|
e34648dcec54364a7006e4d78313e9a6ec6c498b
|
refs/heads/master
| 2022-01-09T05:48:02.493606 | 2019-05-27T20:29:24 | 2019-05-27T20:29:24 | 168,180,493 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 846 |
py
|
#Answer to Popular Words - https://py.checkio.org/en/mission/popular-words/
import re
def popular_words(text: str, words: list) -> dict:
dictionary = {}
for i in words:
count = sum(1 for _ in re.finditer(r'\b%s\b' %re.escape(i), text, re.I))
dictionary[i] = count
return dictionary
if __name__ == '__main__':
print("Example:")
print(popular_words('''
When I was One
I had just begun
When I was Two
I was nearly new
''', ['i', 'was', 'three', 'near']))
# These "asserts" are used for self-checking and not for an auto-testing
assert popular_words('''
When I was One
I had just begun
When I was Two
I was nearly new
''', ['i', 'was', 'three', 'near']) == {
'i': 4,
'was': 3,
'three': 0,
'near': 0
}
print("Coding complete? Click 'Check' to earn cool rewards!")
|
[
"[email protected]"
] | |
67bed8ee2cf8654efc0ac980515699f555298c21
|
bcb178eabc006d2d3dcff3d35bb3a47dc1573841
|
/tools/nicythize
|
01143856430ef42cc335ccecd619ec488c98f407
|
[
"BSD-3-Clause"
] |
permissive
|
poldrack/nipy
|
dc34e341ce07ecd6a1240b84582186073a1348a7
|
cffbb76c86e648a647327e654f0d33220035c6ec
|
refs/heads/master
| 2022-04-24T00:23:12.054157 | 2020-04-23T15:29:34 | 2020-04-23T15:29:34 | 258,041,266 | 0 | 0 |
NOASSERTION
| 2020-04-22T23:12:22 | 2020-04-22T23:12:21 | null |
UTF-8
|
Python
| false | false | 5,989 |
#!/usr/bin/env python
""" nicythize
Cythonize pyx files into C files as needed.
Usage: nicythize [root_dir]
Default [root_dir] is 'nipy'.
Checks pyx files to see if they have been changed relative to their
corresponding C files. If they have, then runs cython on these files to
recreate the C files.
The script thinks that the pyx files have changed relative to the C files if:
* The pyx file is modified compared to the version checked into git, and the C
file is not
* The pyx file was committed to the repository more recently than the C file.
Simple script to invoke Cython (and Tempita) on all .pyx (.pyx.in)
files; while waiting for a proper build system. Uses file hashes to
figure out if rebuild is needed (using dates seem to fail with
frequent change of git branch).
For now, this script should be run by developers when changing Cython files
only, and the resulting C files checked in, so that end-users (and Python-only
developers) do not get the Cython/Tempita dependencies.
Originally written by Dag Sverre Seljebotn, and copied here from:
https://raw.github.com/dagss/private-scipy-refactor/cythonize/cythonize.py
Note: this script does not check any of the dependent C libraries; it only
operates on the Cython .pyx files.
"""
import os
from os.path import join as pjoin, abspath
import sys
import hashlib
import pickle
from subprocess import Popen, PIPE
from distutils.version import LooseVersion
try:
import Cython
except ImportError:
raise OSError('We need cython for this script')
from Cython.Compiler.Version import version as cython_version
HAVE_CYTHON_0p14 = LooseVersion(cython_version) >= LooseVersion('0.14')
HASH_FILE = 'cythonize.dat'
DEFAULT_ROOT = 'nipy'
EXTRA_FLAGS = '-I {}'.format(
abspath(pjoin('lib', 'fff_python_wrapper')))
#
# Rules
#
def process_pyx(fromfile, tofile):
if HAVE_CYTHON_0p14:
opt_str = '--fast-fail'
else:
opt_str = ''
if os.system('cython %s %s -o "%s" "%s"' % (
opt_str, EXTRA_FLAGS, tofile, fromfile)) != 0:
raise Exception('Cython failed')
def process_tempita_pyx(fromfile, tofile):
import tempita
with open(fromfile, 'rt') as f:
tmpl = f.read()
pyxcontent = tempita.sub(tmpl)
assert fromfile.endswith('.pyx.in')
pyxfile = fromfile[:-len('.pyx.in')] + '.pyx'
with open(pyxfile, 'w') as f:
f.write(pyxcontent)
process_pyx(pyxfile, tofile)
rules = {
# fromext : (toext, function)
'.pyx' : ('.c', process_pyx),
'.pyx.in' : ('.c', process_tempita_pyx)
}
#
# Hash db
#
def load_hashes(filename):
# Return { filename : (sha1 of input, sha1 of output) }
if os.path.isfile(filename):
with open(filename, 'rb') as f:
hashes = pickle.load(f)
else:
hashes = {}
return hashes
def save_hashes(hash_db, filename):
with open(filename, 'wb') as f:
pickle.dump(hash_db, f)
def sha1_of_file(filename):
h = hashlib.sha1()
with open(filename, 'rb') as f:
h.update(f.read())
return h.hexdigest()
#
# interface with git
#
def execproc(cmd):
assert isinstance(cmd, (list, tuple))
pp = Popen(cmd, stdout=PIPE, stderr=PIPE)
result = pp.stdout.read().strip()
err = pp.stderr.read()
retcode = pp.wait()
if retcode != 0:
return None
else:
return result
def git_last_commit_to(filename):
out = execproc(['git', 'log', '-1', '--format=format:%H', filename])
if out == '':
out = None
return out
def git_is_dirty(filename):
out = execproc(['git', 'status', '--porcelain', filename])
assert out is not None
return (out != '')
def git_is_child(parent_sha, child_sha):
out = execproc(['git', 'rev-list', child_sha, '^%s^' % parent_sha])
assert out is not None
for line in out.split('\n'):
if line == parent_sha:
return True
return False
#
# Main program
#
def get_hash(frompath, topath):
from_hash = sha1_of_file(frompath)
to_hash = sha1_of_file(topath) if os.path.exists(topath) else None
return (from_hash, to_hash)
def process(path, fromfile, tofile, processor_function, hash_db):
fullfrompath = os.path.join(path, fromfile)
fulltopath = os.path.join(path, tofile)
current_hash = get_hash(fullfrompath, fulltopath)
if current_hash == hash_db.get(fullfrompath, None):
print('%s has not changed' % fullfrompath)
return
from_sha = git_last_commit_to(fullfrompath)
to_sha = git_last_commit_to(fulltopath)
if (from_sha is not None and to_sha is not None and
not git_is_dirty(fullfrompath)):
# Both source and target is under revision control;
# check with revision control system whether we need to
# update
if git_is_child(from_sha, to_sha):
hash_db[fullfrompath] = current_hash
print('%s is up to date (according to git)' % fullfrompath)
return
orig_cwd = os.getcwd()
try:
os.chdir(path)
print('Processing %s' % fullfrompath)
processor_function(fromfile, tofile)
finally:
os.chdir(orig_cwd)
# changed target file, recompute hash
current_hash = get_hash(fullfrompath, fulltopath)
# store hash in db
hash_db[fullfrompath] = current_hash
def find_process_files(root_dir):
hash_db = load_hashes(HASH_FILE)
for cur_dir, dirs, files in os.walk(root_dir):
for filename in files:
for fromext, rule in rules.items():
if filename.endswith(fromext):
toext, function = rule
fromfile = filename
tofile = filename[:-len(fromext)] + toext
process(cur_dir, fromfile, tofile, function, hash_db)
save_hashes(hash_db, HASH_FILE)
def main():
try:
root_dir = sys.argv[1]
except IndexError:
root_dir = DEFAULT_ROOT
find_process_files(root_dir)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | ||
4fb63461d25aae70e78349f22ea21fa4ac0d836a
|
1b0a729f6e20c542a6370785a49c181c0675e334
|
/mint/_versions/20151206231927 router/mint/host.py
|
2ead8cb0368f35e377b1077b2efe0c88d58c32b6
|
[] |
no_license
|
fans656/mint-dev
|
68125c4b41ab64b20d54a2b19e8bf0179dc4636b
|
408f6f055670b15a3f3ee9c9ec086b1090cce372
|
refs/heads/master
| 2021-05-04T11:43:44.740116 | 2016-09-07T13:43:44 | 2016-09-07T13:43:44 | 45,515,119 | 3 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,711 |
py
|
import mint
from mint.interface import Interface
from mint.routetable import RouteTable
from mint.core import EntityWithNIC
from mint.libsocket import LibSocket
from mint.pdus import (
Frame, Packet,
IP,
Discomposer,
)
class Host(EntityWithNIC):
def __init__(self, **kwargs):
super(Host, self).__init__(n_interfaces=1)
self.libsocket = LibSocket(self)
ip = kwargs.get('ip', '192.168.0.1{:02}'.format(self.index))
mask = kwargs.get('mask', '255.255.255.0')
mac = kwargs.get('mac', None)
self.interfaces = [Interface(self, nic, ip, mask, mac)
for nic in self.nics]
self.interface = self.interfaces[0]
self.interface.report.connect(self.report)
self.interface.on_ipv4.connect(self.on_ipv4)
self.routes = RouteTable(self)
def send(self, data, ip, protocol=Packet.Protocol.Raw):
dst_ip = IP(ip)
_, _, gateway, interface = self.routes.find(dst_ip)
if gateway:
self.report('{} is non-local, beg {} to deliver', dst_ip, gateway)
else:
self.report('{} is local, send directly', dst_ip)
packet = Packet(
src_ip=interface.ip,
dst_ip=dst_ip,
protocol=protocol,
payload=data,
)
interface.send_packet(
packet.raw,
gateway if gateway else dst_ip,
Frame.EtherType.IPv4,
)
@property
def ip(self):
return self.interface.ip
@ip.setter
def ip(self, val):
self.interface.ip = val
@property
def mask(self):
return self.interface.mask
@mask.setter
def mask(self, val):
self.interface.mask = val
@property
def mac(self):
return self.interface.mac
@mac.setter
def mac(self, val):
self.interface.mac = val
@property
def default_gateway(self):
return self._default_gateway
@default_gateway.setter
def default_gateway(self, val):
gateway = IP(val)
self._default_gateway = gateway
self.routes[-1].gateway = gateway
def on_ipv4(self, frame, **_):
packet = Packet(frame.payload)
self.send('wooo..', packet.src_ip)
def run(self):
while True:
self.interface.do_send()
self.interface.do_recv()
mint.elapse(1)
@property
def status(self):
nic_title = 'IP:{}/{} MAC:{}'.format(
self.ip, self.mask.net_len, self.mac)
return [
(nic_title, self.nic),
('O', Discomposer(lambda: self.nic.odata)),
('I', Discomposer(lambda: self.nic.idata)),
]
|
[
"[email protected]"
] | |
cfa734dc45fd018c8e7f698ec5da162694aa8ce6
|
dcce56815dca2b18039e392053376636505ce672
|
/dumpscripts/collections_deque_rotate.py
|
bcc8cb420d285ce582d140a4f19d8855221efbf4
|
[] |
no_license
|
robertopauletto/PyMOTW-it_3.0
|
28ff05d8aeccd61ade7d4107a971d9d2576fb579
|
c725df4a2aa2e799a969e90c64898f08b7eaad7d
|
refs/heads/master
| 2021-01-20T18:51:30.512327 | 2020-01-09T19:30:14 | 2020-01-09T19:30:14 | 63,536,756 | 4 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 267 |
py
|
# collections_deque_rotate.py
import collections
d = collections.deque(range(10))
print('Normale :', d)
d = collections.deque(range(10))
d.rotate(2)
print('Rotazione destra:', d)
d = collections.deque(range(10))
d.rotate(-2)
print('Rotazione sinistra:', d)
|
[
"[email protected]"
] | |
244c6f3bb61e659a079e842e9b933fbcbc9b1d94
|
644b13f90d43e9eb2fae0d2dc580c7484b4c931b
|
/programmers/level1/최소공배수, 최대공약수.py
|
9ae0e0c7b50f0b452035698c8c30943f5df4074d
|
[] |
no_license
|
yeonnseok/ps-algorithm
|
c79a41f132c8016655719f74e9e224c0870a8f75
|
fc9d52b42385916344bdd923a7eb3839a3233f18
|
refs/heads/master
| 2020-07-09T11:53:55.786001 | 2020-01-26T02:27:09 | 2020-01-26T02:27:09 | 203,962,358 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 216 |
py
|
def solution(n, m):
answer = []
a, b = max(n, m), min(n, m)
if a == b:
return a
while a % b != 0:
a, b = b, a % b
answer.append(b)
answer.append(n * m // b)
return answer
|
[
"[email protected]"
] | |
f92f1dc064ee15db14477b6d7d43f81fc28eb2ce
|
36ac195ecceb868e78372bc8e976066cc9ff0fae
|
/torch_glow/tests/nodes/bitwise_not_test.py
|
638abae4da1c69f85aee374d8a864635a2f08b75
|
[
"Apache-2.0"
] |
permissive
|
jeff60907/glow
|
d283d65bc67e0cc9836854fa7e4e270b77023fff
|
34214caa999e4428edbd08783243d29a4454133f
|
refs/heads/master
| 2021-09-23T07:30:29.459957 | 2021-09-14T01:47:06 | 2021-09-14T01:48:00 | 216,199,454 | 0 | 0 |
Apache-2.0
| 2019-10-19T12:00:31 | 2019-10-19T12:00:31 | null |
UTF-8
|
Python
| false | false | 1,295 |
py
|
from __future__ import absolute_import, division, print_function, unicode_literals
import unittest
import torch
from parameterized import parameterized
from tests import utils
class SimpleBitwiseNotModule(torch.nn.Module):
def __init__(self):
super(SimpleBitwiseNotModule, self).__init__()
def forward(self, a):
b = torch.bitwise_not(a)
return torch.bitwise_not(b)
class TestBitwiseNot(utils.TorchGlowTestCase):
@utils.deterministic_expand(
[
lambda: ("basic", torch.tensor([-1, -2, 3], dtype=torch.int32)),
lambda: ("basic_int64", torch.tensor([-1, -2, 3], dtype=torch.int64)),
lambda: (
"rand_int",
torch.randint(-1000000000, 1000000000, (2, 3), dtype=torch.int64),
),
lambda: ("bool_ts", torch.zeros((2, 2, 3), dtype=torch.bool)),
lambda: ("bool_fs", torch.ones((2, 2, 3), dtype=torch.bool)),
lambda: ("bool_tf", torch.tensor([False, True], dtype=torch.bool)),
]
)
def test_bitwise_not(self, _, x):
"""Tests of the PyTorch Bitwise Not Node on Glow."""
utils.compare_tracing_methods(
SimpleBitwiseNotModule(),
x,
fusible_ops={"aten::bitwise_not"},
)
|
[
"[email protected]"
] | |
11ee931e40d0bb015d6763cf38814d5e8f796363
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/85/usersdata/216/58702/submittedfiles/funcoes1.py
|
e3b878f3470c294280f93e1db209d477acd78ca7
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,314 |
py
|
# -*- coding: utf-8 -*-
def crescente (lista):
for i in range(0,len(lista)-1,1):
if lista[i]<lista[i+1]:
return True
return False
def decrescente(lista):
for i in range(0,len(lista)-1,1):
if lista[i]>lista[i+1]:
return True
return False
def numiguais(lista):
for i in range(0,len(lista)-1,1):
if lista[i]==lista[i+1]:
return True
break
return False
n=int(input('Digite a quantidade de elementos:'))
a=[]
b=[]
c=[]
for i in range(0,n,1):
x=float(input('Digite os elementos:'))
a.append(x)
for i in range(0,n,1):
y=float(input('Digite os elementos:'))
b.append(y)
for i in range(0,n,1):
w=float(input('Digite os elementos:'))
c.append(w)
if crescente (a):
print('S')
else:
print('N')
if decrescente (a):
print('S')
else:
print('N')
if numiguais (a):
print('S')
else:
print('N')
if crescente (b):
print('S')
else:
print('N')
if decrescente (b):
print('S')
else:
print('N')
if numiguais (b):
print('S')
else:
print('N')
if crescente (c):
print('S')
else:
print('N')
if decrescente (c):
print('S')
else:
print('N')
if numiguais (c):
print('S')
else:
print('N')
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.