blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6fe7640c64822df4cca889a856f9099d33231595
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02860/s554783475.py
|
ba781c1a512917a311a200fc59b2e495d4dab5c5
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 201 |
py
|
n = int(input())
s = input()
if (n%2 == 1):
print("No")
else:
c = 0
for i in range(int(n/2)):
if (s[i] != s[i + int(n/2)]):
c = 1
if (c == 0):
print("Yes")
else:
print("No")
|
[
"[email protected]"
] | |
866fcd777ed57198ecc587fa85d3a71e6974ea99
|
9d1491368c5e87760131ba27d252ee2d10620433
|
/gammapy/spectrum/powerlaw.py
|
39edaeca1329962422682f6d153c6cf79d653ff1
|
[
"BSD-3-Clause"
] |
permissive
|
cnachi/gammapy
|
f9295306a8e81d0b7f4d2111b3fa3679a78da3f7
|
3d3fc38c111d2f490d984082750f8003580fe06c
|
refs/heads/master
| 2021-01-20T23:37:59.409914 | 2016-06-09T08:36:33 | 2016-06-09T08:36:33 | 60,764,807 | 0 | 0 | null | 2016-06-09T09:55:54 | 2016-06-09T09:55:54 | null |
UTF-8
|
Python
| false | false | 6,540 |
py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Power law spectrum helper functions.
Convert differential and integral fluxes with error propagation.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
__all__ = [
'power_law_evaluate',
'power_law_pivot_energy',
'df_over_f',
'power_law_flux',
'power_law_integral_flux',
'g_from_f',
'g_from_points',
'I_from_points',
'f_from_points',
'f_with_err',
'I_with_err',
'compatibility',
]
E_INF = 1e10 # practically infinitely high flux
g_DEFAULT = 2
def power_law_evaluate(energy, norm, gamma, energy_ref):
r"""Differential flux at a given energy.
.. math:: f(energy) = N (E / E_0) ^ - \Gamma
with norm ``N``, energy ``E``, reference energy ``E0`` and spectral index :math:`\Gamma`.
Parameters
----------
energy : array_like
Energy at which to compute the differential flux
gamma : array_like
Power law spectral index
"""
return norm * (energy / energy_ref) ** (-gamma)
def power_law_pivot_energy(energy_ref, f0, d_gamma, cov):
"""Compute pivot (a.k.a. decorrelation) energy.
Defined as smallest df / f.
Reference: http://arxiv.org/pdf/0910.4881
"""
pivot_energy = energy_ref * np.exp(cov / (f0 * d_gamma ** 2))
return pivot_energy
def df_over_f(e, e0, f0, df0, dg, cov):
"""Compute relative flux error at any given energy.
Used to draw butterflies.
Reference: http://arxiv.org/pdf/0910.4881 Equation (1)
"""
term1 = (df0 / f0) ** 2
term2 = 2 * cov / f0 * np.log(e / e0)
term3 = (dg * np.log(e / e0)) ** 2
return np.sqrt(term1 - term2 + term3)
def _conversion_factor(g, e, e1, e2):
"""Conversion factor between differential and integral flux."""
# In gamma-ray astronomy only falling power-laws are used.
# Here we force this, i.e. give "correct" input even if the
# user gives a spectral index with an incorrect sign.
g = np.abs(g)
term1 = e / (-g + 1)
term2 = (e2 / e) ** (-g + 1) - (e1 / e) ** (-g + 1)
return term1 * term2
def power_law_flux(I=1, g=g_DEFAULT, e=1, e1=1, e2=E_INF):
"""Compute differential flux for a given integral flux.
Parameters
----------
I : array_like
Integral flux in ``energy_min``, ``energy_max`` band
g : array_like
Power law spectral index
e : array_like
Energy at which to compute the differential flux
e1 : array_like
Energy band minimum
e2 : array_like
Energy band maximum
Returns
-------
flux : `numpy.array`
Differential flux at ``energy``.
"""
return I / _conversion_factor(g, e, e1, e2)
def power_law_integral_flux(f=1, g=g_DEFAULT, e=1, e1=1, e2=E_INF):
"""Compute integral flux for a given differential flux.
Parameters
----------
f : array_like
Differential flux at ``energy``
g : array_like
Power law spectral index
e : array_like
Energy at which the differential flux is given
e1 : array_like
Energy band minimum
e2 : array_like
Energy band maximum
Returns
-------
flux : `numpy.array`
Integral flux in ``energy_min``, ``energy_max`` band
"""
return f * _conversion_factor(g, e, e1, e2)
def g_from_f(e, f, de=1):
"""Spectral index at a given energy e for a given function f(e)"""
e1, e2 = e, e + de
f1, f2 = f(e1), f(e2)
return g_from_points(e1, e2, f1, f2)
def g_from_points(e1, e2, f1, f2):
"""Spectral index for two given differential flux points"""
return -np.log(f2 / f1) / np.log(e2 / e1)
def I_from_points(e1, e2, f1, f2):
"""Integral flux in energy bin for power law"""
g = g_from_points(e1, e2, f1, f2)
pl_int_flux = (f1 * e1 / (-g + 1) *
((e2 / e1) ** (-g + 1) - 1))
return pl_int_flux
def f_from_points(e1, e2, f1, f2, e):
"""Linear interpolation"""
e1 = np.asarray(e1, float)
e2 = np.asarray(e2, float)
f1 = np.asarray(f1, float)
f2 = np.asarray(f2, float)
e = np.asarray(e, float)
logdy = np.log(f2 / f1)
logdx = np.log(e2 / e1)
logy = np.log(f1) + np.log(e / e1) * (logdy / logdx)
return np.exp(logy)
def f_with_err(I_val=1, I_err=0, g_val=g_DEFAULT, g_err=0,
e=1, e1=1, e2=E_INF):
"""Wrapper for f so the user doesn't have to know about
the uncertainties module"""
from uncertainties import unumpy
I = unumpy.uarray(I_val, I_err)
g = unumpy.uarray(g_val, g_err)
_f = power_law_flux(I, g, e, e1, e2)
f_val = unumpy.nominal_values(_f)
f_err = unumpy.std_devs(_f)
return f_val, f_err
def I_with_err(f_val=1, f_err=0, g_val=g_DEFAULT, g_err=0,
e=1, e1=1, e2=E_INF):
"""Wrapper for f so the user doesn't have to know about
the uncertainties module"""
from uncertainties import unumpy
f = unumpy.uarray(f_val, f_err)
g = unumpy.uarray(g_val, g_err)
_I = power_law_integral_flux(f, g, e, e1, e2)
I_val = unumpy.nominal_values(_I)
I_err = unumpy.std_devs(_I)
return I_val, I_err
def compatibility(par_low, par_high):
"""Quantify spectral compatibility of power-law
measurements in two energy bands.
Reference: 2008ApJ...679.1299F Equation (2)
Compute spectral compatibility parameters for the
situation where two power laws were measured in a low
and a high spectral energy band.
par_low and par_high are the measured parameters,
which must be lists in the following order:
e, f, f_err, g, g_err
where e is the pivot energy, f is the flux density
and g the spectral index
"""
# Unpack power-law paramters
e_high, f_high, f_err_high, g_high, g_err_high = par_high
e_low, f_low, f_err_low, g_low, g_err_low = par_low
log_delta_e = np.log10(e_high) - np.log10(e_low)
log_delta_f = np.log10(f_high) - np.log10(f_low)
# g_match is the index obtained by connecting the two points
# with a power law, i.e. a straight line in the log_e, log_f plot
g_match = -log_delta_f / log_delta_e
# sigma is the number of standar deviations the match index
# is different from the measured index in one band.
# (see Funk et al. (2008ApJ...679.1299F) eqn. 2)
sigma_low = (g_match - g_low) / g_err_low
sigma_high = (g_match - g_high) / g_err_high
sigma_comb = np.sqrt(sigma_low ** 2 + sigma_high ** 2)
return g_match, sigma_low, sigma_high, sigma_comb
|
[
"[email protected]"
] | |
ddcaf6e28b533963df17ac8f9f13f4ce3c77631f
|
1581f1d66d6835b2c271295e3251c2dde239fec8
|
/payment_gateway/pg_utils.py
|
6036c701e7036016bef878326b20e168433fab8a
|
[] |
no_license
|
abinash-kumar/pythod
|
527659e3bdd161f9abcaaa9182dfe58044b3ff66
|
1469dc0cd9d6d72b2fe2e69f99542e470bea807b
|
refs/heads/master
| 2023-01-30T02:54:10.729606 | 2020-02-24T07:18:51 | 2020-02-24T07:18:51 | 242,670,715 | 0 | 0 | null | 2023-01-25T13:57:52 | 2020-02-24T07:16:02 |
Python
|
UTF-8
|
Python
| false | false | 2,318 |
py
|
from motor_product import prod_utils as mpu
from health_product import prod_utils as hpu
HEALTH_INSURER_SLUG = {
'the-oriental-insurance-company-ltd': 'oriental'
}
def resolve_utils(transaction):
if transaction.product_type == 'motor':
return mpu
elif transaction.product_type == 'health':
return hpu
else:
return None
def process_payment_response(request, response, transaction):
if transaction.product_type == 'motor':
return mpu.process_payment_response(
request,
mpu.VEHICLE_TYPE_SLUG[transaction.vehicle_type],
get_insurer_slug(transaction),
response,
transaction.transaction_id
)
elif transaction.product_type == 'health':
return hpu.process_payment_response(
transaction.slab.health_product.insurer.id,
response,
transaction
)
else:
return None
def get_insurer_slug(transaction):
if transaction.product_type == 'motor':
return transaction.insurer.slug
elif transaction.product_type == 'health':
return HEALTH_INSURER_SLUG[transaction.slab.health_product.insurer.slug]
else:
return None
def get_error_url(transaction):
if transaction.product_type == 'motor':
vehicle_type = mpu.VEHICLE_TYPE_SLUG[transaction.vehicle_type]
return '/motor/' + vehicle_type + '/product/failure/'
elif transaction.product_type == 'health':
return '/health-plan/payment/transaction/%s/failure/' % transaction.transaction_id
else:
return None
def todict(obj, classkey=None):
if isinstance(obj, dict):
data = {}
for (k, v) in obj.items():
data[k] = todict(v, classkey)
return data
elif hasattr(obj, "_ast"):
return todict(obj._ast())
elif hasattr(obj, "__iter__"):
return [todict(v, classkey) for v in obj]
elif hasattr(obj, "__dict__"):
data = dict([(key, todict(value, classkey))
for key, value in obj.__dict__.iteritems()
if not callable(value) and not key.startswith('_')])
if classkey is not None and hasattr(obj, "__class__"):
data[classkey] = obj.__class__.__name__
return data
else:
return obj
|
[
"[email protected]"
] | |
1d6007a5ebcba5fca71c8d3808860c34ac1f9ede
|
0f0f8b3b027f412930ca1890b0666538358a2807
|
/dotop/addons/base/ir/ir_filters.py
|
7e792068539ec5262791dfa23e1034b0a6500c7e
|
[] |
no_license
|
konsoar/dotop_pos_v11
|
741bd5ca944dfd52eb886cab6f4b17b6d646e131
|
576c860917edd25661a72726d0729c769977f39a
|
refs/heads/master
| 2021-09-06T13:25:34.783729 | 2018-02-07T02:11:12 | 2018-02-07T02:11:12 | 111,168,355 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,584 |
py
|
# -*- coding: utf-8 -*-
# Part of dotop. See LICENSE file for full copyright and licensing details.
import ast
from dotop import api, fields, models, _
from dotop.exceptions import UserError
class IrFilters(models.Model):
_name = 'ir.filters'
_description = 'Filters'
_order = 'model_id, name, id desc'
name = fields.Char(string='Filter Name', translate=True, required=True)
user_id = fields.Many2one('res.users', string='User', ondelete='cascade', default=lambda self: self._uid,
help="The user this filter is private to. When left empty the filter is public "
"and available to all users.")
domain = fields.Text(default='[]', required=True)
context = fields.Text(default='{}', required=True)
sort = fields.Text(default='[]', required=True)
model_id = fields.Selection(selection='_list_all_models', string='Model', required=True)
is_default = fields.Boolean(string='Default filter')
action_id = fields.Many2one('ir.actions.actions', string='Action', ondelete='cascade',
help="The menu action this filter applies to. "
"When left empty the filter applies to all menus "
"for this model.")
active = fields.Boolean(default=True)
@api.model
def _list_all_models(self):
self._cr.execute("SELECT model, name FROM ir_model ORDER BY name")
return self._cr.fetchall()
@api.multi
def copy(self, default=None):
self.ensure_one()
default = dict(default or {}, name=_('%s (copy)') % self.name)
return super(IrFilters, self).copy(default)
@api.multi
def _get_eval_domain(self):
self.ensure_one()
return ast.literal_eval(self.domain)
@api.model
def _get_action_domain(self, action_id=None):
"""Return a domain component for matching filters that are visible in the
same context (menu/view) as the given action."""
if action_id:
# filters specific to this menu + global ones
return [('action_id', 'in', [action_id, False])]
# only global ones
return [('action_id', '=', False)]
@api.model
def get_filters(self, model, action_id=None):
"""Obtain the list of filters available for the user on the given model.
:param action_id: optional ID of action to restrict filters to this action
plus global filters. If missing only global filters are returned.
The action does not have to correspond to the model, it may only be
a contextual action.
:return: list of :meth:`~osv.read`-like dicts containing the
``name``, ``is_default``, ``domain``, ``user_id`` (m2o tuple),
``action_id`` (m2o tuple) and ``context`` of the matching ``ir.filters``.
"""
# available filters: private filters (user_id=uid) and public filters (uid=NULL),
# and filters for the action (action_id=action_id) or global (action_id=NULL)
action_domain = self._get_action_domain(action_id)
filters = self.search(action_domain + [('model_id', '=', model), ('user_id', 'in', [self._uid, False])])
user_context = self.env.user.context_get()
return filters.with_context(user_context).read(['name', 'is_default', 'domain', 'context', 'user_id', 'sort'])
@api.model
def _check_global_default(self, vals, matching_filters):
""" _check_global_default(dict, list(dict), dict) -> None
Checks if there is a global default for the model_id requested.
If there is, and the default is different than the record being written
(-> we're not updating the current global default), raise an error
to avoid users unknowingly overwriting existing global defaults (they
have to explicitly remove the current default before setting a new one)
This method should only be called if ``vals`` is trying to set
``is_default``
:raises dotop.exceptions.UserError: if there is an existing default and
we're not updating it
"""
domain = self._get_action_domain(vals.get('action_id'))
defaults = self.search(domain + [
('model_id', '=', vals['model_id']),
('user_id', '=', False),
('is_default', '=', True),
])
if not defaults:
return
if matching_filters and (matching_filters[0]['id'] == defaults.id):
return
raise UserError(_("There is already a shared filter set as default for %(model)s, delete or change it before setting a new default") % {'model': vals.get('model_id')})
@api.model
@api.returns('self', lambda value: value.id)
def create_or_replace(self, vals):
action_id = vals.get('action_id')
current_filters = self.get_filters(vals['model_id'], action_id)
matching_filters = [f for f in current_filters
if f['name'].lower() == vals['name'].lower()
# next line looks for matching user_ids (specific or global), i.e.
# f.user_id is False and vals.user_id is False or missing,
# or f.user_id.id == vals.user_id
if (f['user_id'] and f['user_id'][0]) == vals.get('user_id')]
if vals.get('is_default'):
if vals.get('user_id'):
# Setting new default: any other default that belongs to the user
# should be turned off
domain = self._get_action_domain(action_id)
defaults = self.search(domain + [
('model_id', '=', vals['model_id']),
('user_id', '=', vals['user_id']),
('is_default', '=', True),
])
if defaults:
defaults.write({'is_default': False})
else:
self._check_global_default(vals, matching_filters)
# When a filter exists for the same (name, model, user) triple, we simply
# replace its definition (considering action_id irrelevant here)
if matching_filters:
matching_filter = self.browse(matching_filters[0]['id'])
matching_filter.write(vals)
return matching_filter
return self.create(vals)
_sql_constraints = [
# Partial constraint, complemented by unique index (see below). Still
# useful to keep because it provides a proper error message when a
# violation occurs, as it shares the same prefix as the unique index.
('name_model_uid_unique', 'unique (name, model_id, user_id, action_id)', 'Filter names must be unique'),
]
@api.model_cr_context
def _auto_init(self):
result = super(IrFilters, self)._auto_init()
# Use unique index to implement unique constraint on the lowercase name (not possible using a constraint)
self._cr.execute("DROP INDEX IF EXISTS ir_filters_name_model_uid_unique_index") # drop old index w/o action
self._cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = 'ir_filters_name_model_uid_unique_action_index'")
if not self._cr.fetchone():
self._cr.execute("""CREATE UNIQUE INDEX "ir_filters_name_model_uid_unique_action_index" ON ir_filters
(lower(name), model_id, COALESCE(user_id,-1), COALESCE(action_id,-1))""")
return result
|
[
"Administrator@20nuo003-PC"
] |
Administrator@20nuo003-PC
|
e5131ff29aa41698036707a61a86466d77e7d3b9
|
6c50bced6fb4474e4eb2e4f3c27a5ce38b0e6048
|
/manage.py
|
e1fbda688388d8db4449c6abeb1423356d40d79b
|
[] |
no_license
|
NMShihab/WebChatApp
|
0d5651fe38baccfee186e59e32c2c79de2bb39a4
|
2dda4e750c370e74bbfbc42dce02432268194d46
|
refs/heads/master
| 2023-02-01T22:57:53.738222 | 2020-12-15T17:09:14 | 2020-12-15T17:09:14 | 319,082,634 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 663 |
py
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ChatApi.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
f0558330618b47efd52ea7dae4624354fe0c32ac
|
89b45e528f3d495f1dd6f5bcdd1a38ff96870e25
|
/pyneng/exercises/09_functions/task_9_2.py
|
e2a25f74f4ea48dd6a5f51879221d1048f8a5c94
|
[] |
no_license
|
imatyukin/python
|
2ec6e712d4d988335fc815c7f8da049968cc1161
|
58e72e43c835fa96fb2e8e800fe1a370c7328a39
|
refs/heads/master
| 2023-07-21T13:00:31.433336 | 2022-08-24T13:34:32 | 2022-08-24T13:34:32 | 98,356,174 | 2 | 0 | null | 2023-07-16T02:31:48 | 2017-07-25T22:45:29 |
Python
|
UTF-8
|
Python
| false | false | 2,935 |
py
|
# -*- coding: utf-8 -*-
"""
Задание 9.2
Создать функцию generate_trunk_config, которая генерирует
конфигурацию для trunk-портов.
У функции должны быть такие параметры:
- intf_vlan_mapping: ожидает как аргумент словарь с соответствием интерфейс-VLANы
такого вида:
{'FastEthernet0/1': [10, 20],
'FastEthernet0/2': [11, 30],
'FastEthernet0/4': [17]}
- trunk_template: ожидает как аргумент шаблон конфигурации trunk-портов в виде
списка команд (список trunk_mode_template)
Функция должна возвращать список команд с конфигурацией на основе указанных портов
и шаблона trunk_mode_template. В конце строк в списке не должно быть символа
перевода строки.
Проверить работу функции на примере словаря trunk_config
и списка команд trunk_mode_template.
Если предыдущая проверка прошла успешно, проверить работу функции еще раз
на словаре trunk_config_2 и убедится, что в итоговом списке правильные номера
интерфейсов и вланов.
Пример итогового списка (перевод строки после каждого элемента сделан
для удобства чтения):
[
'interface FastEthernet0/1',
'switchport mode trunk',
'switchport trunk native vlan 999',
'switchport trunk allowed vlan 10,20,30',
'interface FastEthernet0/2',
'switchport mode trunk',
'switchport trunk native vlan 999',
'switchport trunk allowed vlan 11,30',
...]
Ограничение: Все задания надо выполнять используя только пройденные темы.
"""
from pprint import pprint
trunk_mode_template = [
"switchport mode trunk",
"switchport trunk native vlan 999",
"switchport trunk allowed vlan",
]
trunk_config = {
"FastEthernet0/1": [10, 20, 30],
"FastEthernet0/2": [11, 30],
"FastEthernet0/4": [17],
}
trunk_config_2 = {
"FastEthernet0/11": [120, 131],
"FastEthernet0/15": [111, 130],
"FastEthernet0/14": [117],
}
def generate_trunk_config(intf_vlan_mapping, trunk_template):
cfg = []
for intf, vlans in intf_vlan_mapping.items():
cfg.append("interface " + intf)
for s in trunk_template:
if s.endswith('allowed vlan'):
s = s + ' ' + str(vlans)[1:-1].replace(" ", "")
cfg.append(s)
return cfg
pprint(generate_trunk_config(trunk_config, trunk_mode_template))
pprint(generate_trunk_config(trunk_config_2, trunk_mode_template))
|
[
"[email protected]"
] | |
17fa82a9093701e46b8648bd51b5684c11c5f8c9
|
5d6365f4cc81272f8c481ee31f1111e8eca6dca5
|
/alipay/aop/api/domain/BizActionLogDTO.py
|
bdaee8dcf4791f2ea8f5f6ac64c0cb3184f154de
|
[
"Apache-2.0"
] |
permissive
|
barrybbb/alipay-sdk-python-all
|
9e99b56138e6ca9c0b236707c79899d396ac6f88
|
1b63620431d982d30d39ee0adc4b92463cbcee3c
|
refs/heads/master
| 2023-08-22T20:16:17.242701 | 2021-10-11T08:22:44 | 2021-10-11T08:22:44 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,378 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class BizActionLogDTO(object):
def __init__(self):
self._amount = None
self._biz_budget_apply_code = None
self._biz_budget_id = None
self._biz_name = None
self._biz_type = None
self._biz_uk_id = None
self._gmt_create = None
self._gmt_modified = None
self._id = None
self._modify_type = None
@property
def amount(self):
return self._amount
@amount.setter
def amount(self, value):
self._amount = value
@property
def biz_budget_apply_code(self):
return self._biz_budget_apply_code
@biz_budget_apply_code.setter
def biz_budget_apply_code(self, value):
self._biz_budget_apply_code = value
@property
def biz_budget_id(self):
return self._biz_budget_id
@biz_budget_id.setter
def biz_budget_id(self, value):
self._biz_budget_id = value
@property
def biz_name(self):
return self._biz_name
@biz_name.setter
def biz_name(self, value):
self._biz_name = value
@property
def biz_type(self):
return self._biz_type
@biz_type.setter
def biz_type(self, value):
self._biz_type = value
@property
def biz_uk_id(self):
return self._biz_uk_id
@biz_uk_id.setter
def biz_uk_id(self, value):
self._biz_uk_id = value
@property
def gmt_create(self):
return self._gmt_create
@gmt_create.setter
def gmt_create(self, value):
self._gmt_create = value
@property
def gmt_modified(self):
return self._gmt_modified
@gmt_modified.setter
def gmt_modified(self, value):
self._gmt_modified = value
@property
def id(self):
return self._id
@id.setter
def id(self, value):
self._id = value
@property
def modify_type(self):
return self._modify_type
@modify_type.setter
def modify_type(self, value):
self._modify_type = value
def to_alipay_dict(self):
params = dict()
if self.amount:
if hasattr(self.amount, 'to_alipay_dict'):
params['amount'] = self.amount.to_alipay_dict()
else:
params['amount'] = self.amount
if self.biz_budget_apply_code:
if hasattr(self.biz_budget_apply_code, 'to_alipay_dict'):
params['biz_budget_apply_code'] = self.biz_budget_apply_code.to_alipay_dict()
else:
params['biz_budget_apply_code'] = self.biz_budget_apply_code
if self.biz_budget_id:
if hasattr(self.biz_budget_id, 'to_alipay_dict'):
params['biz_budget_id'] = self.biz_budget_id.to_alipay_dict()
else:
params['biz_budget_id'] = self.biz_budget_id
if self.biz_name:
if hasattr(self.biz_name, 'to_alipay_dict'):
params['biz_name'] = self.biz_name.to_alipay_dict()
else:
params['biz_name'] = self.biz_name
if self.biz_type:
if hasattr(self.biz_type, 'to_alipay_dict'):
params['biz_type'] = self.biz_type.to_alipay_dict()
else:
params['biz_type'] = self.biz_type
if self.biz_uk_id:
if hasattr(self.biz_uk_id, 'to_alipay_dict'):
params['biz_uk_id'] = self.biz_uk_id.to_alipay_dict()
else:
params['biz_uk_id'] = self.biz_uk_id
if self.gmt_create:
if hasattr(self.gmt_create, 'to_alipay_dict'):
params['gmt_create'] = self.gmt_create.to_alipay_dict()
else:
params['gmt_create'] = self.gmt_create
if self.gmt_modified:
if hasattr(self.gmt_modified, 'to_alipay_dict'):
params['gmt_modified'] = self.gmt_modified.to_alipay_dict()
else:
params['gmt_modified'] = self.gmt_modified
if self.id:
if hasattr(self.id, 'to_alipay_dict'):
params['id'] = self.id.to_alipay_dict()
else:
params['id'] = self.id
if self.modify_type:
if hasattr(self.modify_type, 'to_alipay_dict'):
params['modify_type'] = self.modify_type.to_alipay_dict()
else:
params['modify_type'] = self.modify_type
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = BizActionLogDTO()
if 'amount' in d:
o.amount = d['amount']
if 'biz_budget_apply_code' in d:
o.biz_budget_apply_code = d['biz_budget_apply_code']
if 'biz_budget_id' in d:
o.biz_budget_id = d['biz_budget_id']
if 'biz_name' in d:
o.biz_name = d['biz_name']
if 'biz_type' in d:
o.biz_type = d['biz_type']
if 'biz_uk_id' in d:
o.biz_uk_id = d['biz_uk_id']
if 'gmt_create' in d:
o.gmt_create = d['gmt_create']
if 'gmt_modified' in d:
o.gmt_modified = d['gmt_modified']
if 'id' in d:
o.id = d['id']
if 'modify_type' in d:
o.modify_type = d['modify_type']
return o
|
[
"[email protected]"
] | |
93a759dd1d4ce068810fd67a473fd7f242615fd5
|
f2fcf807b441aabca1ad220b66770bb6a018b4ae
|
/coderbyte/StringMerge.py
|
aee27511c52f7fc9c13b05cde0262bec9a847235
|
[] |
no_license
|
gokou00/python_programming_challenges
|
22d1c53ccccf1f438754edad07b1d7ed77574c2c
|
0214d60074a3b57ff2c6c71a780ce5f9a480e78c
|
refs/heads/master
| 2020-05-17T15:41:07.759580 | 2019-04-27T16:36:56 | 2019-04-27T16:36:56 | 183,797,459 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 290 |
py
|
def StringMerge(string):
stringArr = string.split("*")
arr1 = stringArr[0]
arr2 = stringArr[1]
strBuild = ""
for i in range(len(arr1)):
strBuild+= arr1[i]
strBuild+= arr2[i]
return strBuild
print(StringMerge("123hg*aaabb"))
|
[
"[email protected]"
] | |
b69ca6b786925c7020c263729f5d7bd1e74e3d05
|
35cf6fc79b8d6c335add8e55e0f4dca6f2816d1d
|
/Python_Study/第七模块学习/Day04/EdmureBlog/web/forms/base.py
|
ab198421829eb1b2c3ebc96a9c1743d571cc884e
|
[] |
no_license
|
KongChan1988/51CTO-Treasure
|
08b4ca412ad8a09d67c1ea79c7149f8573309ca4
|
edb2e4bd11d39ac24cd240f3e815a88361867621
|
refs/heads/master
| 2021-07-04T15:57:56.164446 | 2019-07-24T15:28:36 | 2019-07-24T15:28:36 | 97,453,749 | 5 | 8 | null | 2019-10-30T22:05:12 | 2017-07-17T08:34:59 |
Python
|
UTF-8
|
Python
| false | false | 208 |
py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
class BaseForm(object):
def __init__(self, request, *args, **kwargs):
self.request = request
super(BaseForm, self).__init__(*args, **kwargs)
|
[
"[email protected]"
] | |
8d16a7b317c421b41cb6db551f09e5d6d244cff9
|
3d8d874ebba15fd065c0a9e74c05e8cd2a24dbe8
|
/Week 6 - Joining Data with pandas/19-Concatenate and merge to find common songs.py
|
9ad795f5e20ab5a06eff3519aec9c340843f3813
|
[] |
no_license
|
RomuloMileris/UCD_Professional_Certificate_in_Data_Analytics
|
db3e583a6e607e74f3d26b65ba0de59cff64e5a3
|
a4a77df69a2440132cfa3e89c4a1674e3e02d086
|
refs/heads/master
| 2023-02-22T12:48:50.039440 | 2021-01-15T17:06:07 | 2021-01-15T17:06:07 | 319,717,851 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 720 |
py
|
# Concatenate the classic tables vertically
classic_18_19 = pd.concat([classic_18, classic_19], ignore_index=True)
# Concatenate the pop tables vertically
pop_18_19 = pd.concat([pop_18, pop_19], ignore_index=True)
# Concatenate the classic tables vertically
classic_18_19 = pd.concat([classic_18, classic_19], ignore_index=True)
# Concatenate the pop tables vertically
pop_18_19 = pd.concat([pop_18, pop_19], ignore_index=True)
# Merge classic_18_19 with pop_18_19
classic_pop = classic_18_19.merge(pop_18_19, on='tid')
# Using .isin(), filter classic_18_19 rows where tid is in classic_pop
popular_classic = classic_18_19[classic_18_19['tid'].isin(classic_pop['tid'])]
# Print popular chart
print(popular_classic)
|
[
"[email protected]"
] | |
ba8d9485f114b77345b5bdc786cacf2516b8dba0
|
b29dcbf879166592b59e34f0e2bc4918c3ac94a0
|
/cart/views.py
|
4dfc522e62c9c9e4cc9b815d50b1184bbe3d6954
|
[] |
no_license
|
samdasoxide/myshop
|
ce6d4553af04f1ddf5de1cbfa38ef2ff33ac6b11
|
21115de7748862c8a44ef4dc5a61511ad67746dd
|
refs/heads/master
| 2022-12-14T07:39:13.803686 | 2017-06-20T11:42:30 | 2017-06-20T11:42:30 | 92,954,076 | 0 | 0 | null | 2022-12-07T23:58:40 | 2017-05-31T14:23:18 |
JavaScript
|
UTF-8
|
Python
| false | false | 1,067 |
py
|
from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.http import require_POST
from shop.models import Product
from .cart import Cart
from .forms import CartAddProductFrom
@require_POST
def cart_add(request, product_id):
cart = Cart(request)
product = get_object_or_404(Product, id=product_id)
form = CartAddProductFrom(request.POST)
if form.is_valid():
cd = form.cleaned_data
cart.add(product=product,
quantity=cd['quantity'],
update_quantity=cd['update'])
return redirect('cart:cart_detail')
def cart_remove(request, product_id):
cart = Cart(request)
product = get_object_or_404(Product, id=product_id)
cart.remove(product)
return redirect('cart:cart_detail')
def cart_detail(request):
cart = Cart(request)
for item in cart:
item['update_quantity_form'] = CartAddProductFrom(
initial={'quantity': item['quantity'], 'update': True}
)
return render(request, 'cart/detail.html', {'cart': cart})
|
[
"[email protected]"
] | |
b3743862fc7b8de3b6dca5344e37f61f50a634eb
|
b97a608517f024b81db0bdc4094d143ba87c8af4
|
/src/oceandata/export_production/mouw.py
|
5922a9fe193338af1b8d507473dce963eb6aaa90
|
[
"MIT"
] |
permissive
|
brorfred/oceandata
|
ff008042cc993a07d9db1de3fa72e70f70d44219
|
831e0691223da1aa6a6e97175e8c2d7874bf60cd
|
refs/heads/master
| 2022-02-14T11:48:13.401206 | 2022-01-27T17:01:56 | 2022-01-27T17:01:56 | 175,451,337 | 1 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,519 |
py
|
"""
Global ocean particulate organic carbon flux.
Ref: https://doi.org/10.1594/PANGAEA.855594,
"""
import os, pathlib
import warnings
import pandas as pd
import numpy as np
import requests
DATADIR = pathlib.PurePath(pathlib.Path.home(), ".oceandata")
pathlib.Path(DATADIR).mkdir(parents=True, exist_ok=True)
DATAURL = "https://doi.pangaea.de/10.1594/PANGAEA.855594"
"""
def load():
df = pd.read_hdf("h5files/ep_mouw_with_sat.h5")
df["Zeu"] = 4.6/df.kd490
df["ep_obs"] = df.POC_flux
df["chl"] = df["chl"] * df["Zeu"]
#lh = ecoregions.Longhurst()
#longh = lh.match("regions", lonvec=dfm.lon, latvec=dfm.lat, jdvec=dfm.lat*0)
#dfm["longhurst"] = longh
return df
"""
def load(datadir=DATADIR, filename="GO_flux.tab", with_std=False):
"""Load tab file and fix some columns"""
fn = os.path.join(datadir, filename)
if not os.path.isfile(fn):
download(datadir=datadir, filename=filename)
with open(fn ,"r") as fH:
while 1:
line = fH.readline()
if "*/" in line:
break
df = pd.read_csv(fH, sep="\t", parse_dates=[1,])
if not with_std:
df.drop(columns=['Flux std dev [±]', 'C flux [mg/m**2/day]',
'C flux std dev [±]', 'POC flux std dev [±]',
'PIC flux std dev [±]', 'PON flux std dev [±]',
'POP flux std dev [±]', 'PSi flux std dev [±]',
'PAl std dev [±]', 'CaCO3 flux std dev [±]',
'Reference'], inplace=True)
df.rename(columns={'ID (Reference identifier)':"ref_ID",
'ID (Unique location identifier)':"UUID",
'Type (Data type)':"sampling_type",
'Latitude':"lat",
'Longitude':"lon",
'Flux tot [mg/m**2/day]':"tot_flux",
'POC flux [mg/m**2/day]':"POC_flux",
'PIC flux [mg/m**2/day]':"PIC_flux",
'PON flux [mg/m**2/day]':"PON_flux",
'POP flux [mg/m**2/day]':"POP_flux",
'PSi flux [mg/m**2/day]':"PSi_flux",
'PSiO2 flux [mg/m**2/day]':"PSiO2_flux",
'PSi(OH)4 flux [mg/m**2/day]':"PSiOH4_flux",
'PAl [mg/m**2/day]':"PAl_flux",
'Chl flux [mg/m**2/day]':"Chl_flux",
'Pheop flux [µg/m**2/day]':"Pheop_flux",
'CaCO3 flux [mg/m**2/day]':"CaCO3_flux",
'Fe flux [mg/m**2/day]':"Fe_flux",
'Mn flux [µg/m**2/day]':"Mn_flux",
'Ba flux [µg/m**2/day]':"Ba_flux",
'Detrital flux [mg/m**2/day]':"Detr_flux",
'Ti flux [µg/m**2/day]':"Ti_flux",
'Bathy depth [m] (ETOPO1 bathymetry)':"bathy",
'Depth water [m] (Sediment trap deployment depth)':"depth",
'Area [m**2]':"area",
'Duration [days]':"duration",
'Date/Time (Deployed)':"start_time",
'Date/time end (Retrieved)':"end_time",
'Area [m**2] (Surface area of trap)':"trap_area",
},
inplace=True)
df.drop(columns=['Type (Sediment trap type)',
'Elevation [m a.s.l.] (Total water depth)'],
inplace=True)
df["start_time"] = pd.DatetimeIndex(df["start_time"])
df["end_time"] = pd.DatetimeIndex(df["end_time"])
df.set_index("end_time", inplace=True)
return df
def download(datadir=DATADIR, filename="GO_flux.tab"):
"""Download txt file from BATS server
Refs
----
"""
local_filename = os.path.join(datadir, filename)
try:
os.unlink(local_filename)
except FileNotFoundError:
pass
try:
r = requests.get(DATAURL, stream=True, timeout=6, params={"format":"textfile"})
except requests.ReadTimeout:
warnings.warn("Connection to server timed out.")
return False
if r.ok:
if local_filename is None:
return r.text
else:
with open(local_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
f.flush()
else:
raise IOError(f"Could not download file from server, Error {r.status_code}")
|
[
"[email protected]"
] | |
07e30b5ca44e0780d580e0e6e6bb3d6b3d5b027e
|
031b1c5b0c404f23ccd61a08845695bd4c3827f2
|
/python/pyfiles/算术运算符.py
|
39efec4aa582072f142c44bd1bc23d687686d1e0
|
[] |
no_license
|
AndyFlower/zixin
|
c8d957fd8b1e6ca0e1ae63389bc8151ab93dbb55
|
647705e5f14fae96f82d334ba1eb8a534735bfd9
|
refs/heads/master
| 2022-12-23T21:10:44.872371 | 2021-02-10T07:15:21 | 2021-02-10T07:15:21 | 232,578,547 | 1 | 0 | null | 2022-12-16T15:41:14 | 2020-01-08T14:13:25 |
Java
|
UTF-8
|
Python
| false | false | 795 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 1 22:57:02 2020
@author: sanglp
"""
# +运算符
print(3+5)
print(3.4+4.5)
print((3+4j)+(4+5j))
print('abc'+'def')
print([1,2]+[3,4])
print((1,2)+(3,))
# -运算符
print(7.9 -4.5) # 浮点数有误差
print(5-3)
num = 3
print(-num)
print(--num)
print({1,2,3}-{3,4,5}) #计算差集
# *运算符
print(3333*5555)
print((3+4j)*(5+6j))
print('重要的事情说3遍'*3)
print([0]*5)
print((0,)*3)
# /和//运算符
print(17 / 4)
print(17 // 4) #4
print((-17) / 4)
print((-17) // 4) #-5
# %运算符
print(365 %7)
print(365 %2)
print('%c,%c,%c' %(65,97,48)) # 数字格式化为字符 A,a,0
# **运算符
print(2 ** 4)
print(3 ** 3 ** 3)
print(3 ** (3**3))
print((3**3)**3)
print(9**0.5)
print((-1)**0.5) # 对负数计算平方根得到负数
|
[
"[email protected]"
] | |
0ef417ef2ea2ab51e1240c4fc86e2f26be2e0302
|
509d717f18caad77e00c3261dcf1934f7e5bd95d
|
/venv/css_selectors/sports_bet_page_locators.py
|
65e85e02577a73a8730a604977beb681bc7cbdcc
|
[] |
no_license
|
Swingyboy/pronet_design_testing
|
8aee2f42e2452ca178fbe34e7a51ce7377156e08
|
ad3dc5a58983ed6d6c9cef91a40ea8160f699dd0
|
refs/heads/master
| 2023-05-06T05:34:47.438023 | 2020-09-15T09:17:36 | 2020-09-15T09:17:36 | 281,055,876 | 1 | 1 | null | 2021-06-02T02:56:51 | 2020-07-20T08:12:21 |
Python
|
UTF-8
|
Python
| false | false | 403 |
py
|
from selenium.webdriver.common.by import By
class SportsBetPageLocators():
UPCOMING_EVENTS_BAR =(By.CSS_SELECTOR, 'upcoming-events > div > div.modul-header')
LIVE_BET_BAR = (By.CSS_SELECTOR, 'live-at-now > div > div.modul-header')
ESPORTS_BAR = (By.CSS_SELECTOR, 'app-esports > div > div.modul-header')
TODAY_EVENT_BAR = (By.CSS_SELECTOR, 'todays-sport-types > div > div.modul-header')
|
[
"[email protected]"
] | |
b0af71064e926490ac415e9930d72e7cccec1d8c
|
7464f15c33c74454f2a98dceb7f603919abba4d1
|
/happy.py
|
01383a2a50c7506bb341600a3deaf9076a692953
|
[] |
no_license
|
willingc/my-bit
|
374bece797c59956e500504cd62940a2c1718013
|
535768dcb09297f1028e0e111fd062b91e8032c6
|
refs/heads/master
| 2016-08-08T21:26:22.119643 | 2015-11-30T03:23:59 | 2015-11-30T03:23:59 | 47,053,915 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 935 |
py
|
"""
happy.py
by Carol Willing
November 28, 2015
Public Domain
Use this to display a 'Happy Face' image on micro:bit's 5x5 pixel grid of LEDs.
Remember... Writing a program is similar to planning a birthday party.
Program Birthday party
------- --------------
'Prepare' Prepare the room with balloons; order food; pick up a cake.
'Do' Do things during the party -- sing, dance, play videogames.
'Clean' Clean the table. Tidy up after the party. Take out the rubbish.
"""
from microbit import *
# Prepare. Put the preinstalled images into user friendly variables
my_happy_face = Image.HAPPY
my_sad_face = Image.SAD
# Do things! ----> Show the images on the display.
display.show(my_happy_face)
sleep(8000)
display.show(my_sad_face)
sleep(8000)
display.show(my_happy_face)
sleep(4000)
# Clean up stuff. Display 'BYE' and clear display. (Clean your room too.)
display.scroll("BYE")
display.clear()
|
[
"[email protected]"
] | |
f8f8a93e2b53a4b74d0c41930fd04e417f2189c8
|
2f418a0f2fcca40f84ec0863b31ff974b574350c
|
/scripts/addons_extern/cut_mesh-master/op_slice/slice_datastructure.py
|
6c86f20d47db1178d36c9ecde0f011a0e1296f6c
|
[] |
no_license
|
JT-a/blenderpython279
|
57a81b55564218f3b1417c2ffa97f5161897ec79
|
04846c82f794c22f87d677d9eb8cec1d05c48cda
|
refs/heads/master
| 2021-06-25T06:58:07.670613 | 2017-09-11T11:14:36 | 2017-09-11T11:14:36 | 103,723,697 | 4 | 2 | null | 2017-09-16T04:09:31 | 2017-09-16T04:09:31 | null |
UTF-8
|
Python
| false | false | 7,750 |
py
|
'''
Created on Oct 8, 2015
@author: Patrick
'''
import time
import bpy
import bmesh
from mathutils import Vector, Matrix, kdtree
from mathutils.bvhtree import BVHTree
from mathutils.geometry import intersect_point_line, intersect_line_plane
from bpy_extras import view3d_utils
from ..bmesh_fns import grow_selection_to_find_face, flood_selection_faces, edge_loops_from_bmedges
from ..cut_algorithms import cross_section_2seeds_ver1, path_between_2_points
from ..geodesic import geodesic_walk, continue_geodesic_walk, gradient_descent
from .. import common_drawing
class Slice(object):
'''
A class which manages user placed points on an object to create a
piecewise path of geodesics, adapted to the objects surface.
'''
def __init__(self,context, cut_object):
self.cut_ob = cut_object
self.bme = bmesh.new()
self.bme.from_mesh(cut_object.data)
self.bme.verts.ensure_lookup_table()
self.bme.edges.ensure_lookup_table()
self.bme.faces.ensure_lookup_table()
#non_tris = [f for f in self.bme.faces if len(f.verts) > 3]
#bmesh.ops.triangulate(self.bme, faces = non_tris, quad_method = 0, ngon_method = 0)
#non_tris = [f for f in self.bme.faces if len(f.verts) > 3]
#if len(non_tris):
#geom = bmesh.ops.connect_verts_concave(self.bme, non_tris)
self.bme.verts.ensure_lookup_table()
self.bme.edges.ensure_lookup_table()
self.bme.faces.ensure_lookup_table()
self.bvh = BVHTree.FromBMesh(self.bme)
self.seed = None
self.seed_loc = None
self.target = None
self.target_loc = None
self.path = []
def reset_vars(self):
'''
'''
self.seed = None
self.seed_loc = None
self.target = None
self.target_loc = None
self.geo_data = [dict(), set(), set(), set()] #geos, fixed, close, far
self.path = []
def grab_initiate(self):
if self.target != None :
self.grab_undo_loc = self.target_loc
self.target_undo = self.target
self.path_undo = self.path
return True
else:
return False
def grab_mouse_move(self,context,x,y):
region = context.region
rv3d = context.region_data
coord = x, y
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord)
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord)
ray_target = ray_origin + (view_vector * 1000)
mx = self.cut_ob.matrix_world
imx = mx.inverted()
if bversion() < '002.077.000':
loc, no, face_ind = self.cut_ob.ray_cast(imx * ray_origin, imx * ray_target)
else:
res, loc, no, face_ind = self.cut_ob.ray_cast(imx * ray_origin, imx * ray_target - imx * ray_origin)
loc2, no2, face_ind2, d = self.bvh.ray_cast(imx * ray_origin, view_vector)
if loc != None and loc2 != None:
print((loc - loc2).length)
if face_ind == -1:
self.grab_cancel()
return
self.target = self.bme.faces[face_ind]
self.target_loc = loc
vrts, eds, ed_cross, f_cross, error = path_between_2_points(self.bme, self.bvh, mx,mx* self.seed_loc,mx*self.target_loc,
max_tests = 10000, debug = True,
prev_face = None, use_limit = True)
if not error:
self.path = vrts
#else:
#self.path = []
def grab_cancel(self):
self.target_loc = self.grab_undo_loc
self.target = self.target_undo
self.path = self.path_undo
return
def grab_confirm(self):
self.grab_undo_loc = None
self.target_undo = None
self.path_undo = []
return
def click_add_seed(self,context,x,y):
'''
x,y = event.mouse_region_x, event.mouse_region_y
this will add a point into the bezier curve or
close the curve into a cyclic curve
'''
region = context.region
rv3d = context.region_data
coord = x, y
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord)
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord)
ray_target = ray_origin + (view_vector * 1000)
mx = self.cut_ob.matrix_world
imx = mx.inverted()
if bversion() < '002.077.000':
loc, no, face_ind = self.cut_ob.ray_cast(imx * ray_origin, imx * ray_target)
else:
res, loc, no, face_ind = self.cut_ob.ray_cast(imx * ray_origin, imx * ray_target - imx * ray_origin)
if face_ind == -1:
self.selected = -1
return
self.seed = self.bme.faces[face_ind]
self.seed_loc = loc
self.geo_data = [dict(), set(), set(), set()]
def click_add_target(self, context, x, y):
region = context.region
rv3d = context.region_data
coord = x, y
view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord)
ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord)
ray_target = ray_origin + (view_vector * 1000)
mx = self.cut_ob.matrix_world
imx = mx.inverted()
if bversion() < '002.077.000':
loc, no, face_ind = self.cut_ob.ray_cast(imx * ray_origin, imx * ray_target)
else:
res, loc, no, face_ind = self.cut_ob.ray_cast(imx * ray_origin, imx * ray_target - imx * ray_origin)
if face_ind == -1: return
self.target = self.bme.faces[face_ind]
self.target_loc = loc
vrts, eds, ed_cross, f_cross, error = path_between_2_points(self.bme, self.bvh, mx,mx* self.seed_loc,mx*self.target_loc,
max_tests = 10000, debug = True,
prev_face = None, use_limit = True)
if not error:
self.path = vrts
else:
self.path = []
return
def draw(self,context):
if len(self.path):
mx = self.cut_ob.matrix_world
pts = [mx * v for v in self.path]
common_drawing.draw_polyline_from_3dpoints(context, pts, (.2,.1,.8,1), 3, 'GL_LINE')
if self.seed_loc != None:
mx = self.cut_ob.matrix_world
common_drawing.draw_3d_points(context, [mx * self.seed_loc], 8, color = (1,0,0,1))
if self.target_loc != None:
mx = self.cut_ob.matrix_world
common_drawing.draw_3d_points(context, [mx * self.target_loc], 8, color = (0,1,0,1))
class PolyCutPoint(object):
def __init__(self,co):
self.co = co
self.no = None
self.face = None
self.face_region = set()
def find_closest_non_manifold(self):
return None
class NonManifoldEndpoint(object):
def __init__(self,co, ed):
if len(ed.link_faces) != 1:
return None
self.co = co
self.ed = ed
self.face = ed.link_faces[0]
|
[
"[email protected]"
] | |
33a16862ec2f40db072c68c1e4c243096bce805a
|
abb614790bdf41c7db9d09dfdea4385f78c2be52
|
/rtk-RQA/rtk/hardware/component/connection/Socket.py
|
c1454c5a9c43e324ac69b5e3c374fd2decff5864
|
[
"BSD-3-Clause"
] |
permissive
|
codacy-badger/rtk
|
f981bb75aadef6aaeb5a6fa427d0a3a158626a2a
|
bdb9392164b0b32b0da53f8632cbe6e3be808b12
|
refs/heads/master
| 2020-03-19T02:46:10.320241 | 2017-10-26T20:08:12 | 2017-10-26T20:08:12 | 135,659,105 | 0 | 0 | null | 2018-06-01T02:43:23 | 2018-06-01T02:43:23 | null |
UTF-8
|
Python
| false | false | 5,321 |
py
|
#!/usr/bin/env python
"""
######################################################
Hardware.Component.Connection Package IC Socket Module
######################################################
"""
# -*- coding: utf-8 -*-
#
# rtk.hardware.component.connection.Socket.py is part of the RTK
# Project
#
# All rights reserved.
import gettext
import locale
try:
import Configuration
import Utilities
from hardware.component.connection.Connection import Model as Connection
except ImportError: # pragma: no cover
import rtk.Configuration as Configuration
import rtk.Utilities as Utilities
from rtk.hardware.component.connection.Connection import Model as \
Connection
__author__ = 'Andrew Rowland'
__email__ = '[email protected]'
__organization__ = 'ReliaQual Associates, LLC'
__copyright__ = 'Copyright 2007 - 2015 Andrew "weibullguy" Rowland'
# Add localization support.
try:
locale.setlocale(locale.LC_ALL, Configuration.LOCALE)
except locale.Error: # pragma: no cover
locale.setlocale(locale.LC_ALL, '')
_ = gettext.gettext
class Socket(Connection):
"""
The Socket connection data model contains the attributes and methods of an
IC socket connection component. The attributes of an IC socket connection
are:
:cvar int subcategory: the Connection subcategory.
:ivar float base_hr: the MIL-HDBK-217FN2 base/generic hazard rate.
:ivar str reason: the reason(s) the Connection is overstressed.
:ivar float piE: the MIL-HDBK-217FN2 operating environment factor.
Hazard Rate Models:
# MIL-HDBK-217FN2, section 15.3.
"""
# MIL-HDBK-217FN2 hazard rate calculation variables.
# ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
_piQ = [1.0, 2.0]
_piE = [1.0, 3.0, 14.0, 6.0, 18.0, 8.0, 12.0, 11.0, 13.0, 25.0, 0.5, 14.0,
36.0, 650.0]
_lambdab_count = [0.0019, 0.0058, 0.027, 0.012, 0.035, 0.015, 0.023, 0.021,
0.025, 0.048, 0.00097, 0.027, 0.070, 1.3]
# ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
subcategory = 74 # Subcategory ID in the common DB.
def __init__(self):
"""
Method to initialize a IC Socket connection data model instance.
"""
super(Socket, self).__init__()
# Define private dictionary attributes.
# Define private list attributes.
# Define private scalar attributes.
# Define public dictionary attributes.
# Define public list attributes.
# Define public scalar attributes.
self.n_active_contacts = 0
self.piP = 0.0
self.base_hr = 0.00042
def set_attributes(self, values):
"""
Method to set the Multi-Pin Connection data model attributes.
:param tuple values: tuple of values to assign to the instance
attributes.
:return: (_code, _msg); the error code and error message.
:rtype: tuple
"""
_code = 0
_msg = ''
(_code, _msg) = Connection.set_attributes(self, values[:133])
try:
self.base_hr = 0.00042
self.piP = float(values[133])
self.n_active_contacts = int(values[134])
except IndexError as _err:
_code = Utilities.error_handler(_err.args)
_msg = "ERROR: Insufficient input values."
except(TypeError, ValueError) as _err:
_code = Utilities.error_handler(_err.args)
_msg = "ERROR: Converting one or more inputs to correct data type."
return(_code, _msg)
def get_attributes(self):
"""
Method to retrieve the current values of the Multi-Pin Connection data
model attributes.
:return: (n_active_contacts, piP)
:rtype: tuple
"""
_values = Connection.get_attributes(self)
_values = _values + (self.piP, self.n_active_contacts)
return _values
def calculate_part(self):
"""
Method to calculate the hazard rate for the Multi-Pin Connection data
model.
:return: False if successful or True if an error is encountered.
:rtype: bool
"""
from math import exp
self.hazard_rate_model = {}
if self.hazard_rate_type == 1:
self.hazard_rate_model['equation'] = 'lambdab * piQ'
# Quality factor.
self.piQ = self._piQ[self.quality - 1]
elif self.hazard_rate_type == 2:
self.hazard_rate_model['equation'] = 'lambdab * piE * piP'
# Active pins correction factor.
if self.n_active_contacts >= 2:
self.piP = exp(((self.n_active_contacts - 1) / 10.0)**0.51064)
else:
self.piP = 0.0
self.hazard_rate_model['piP'] = self.piP
# Environmental correction factor.
self.piE = self._piE[self.environment_active - 1]
return Connection.calculate_part(self)
|
[
"[email protected]"
] | |
4c54b23822c77598fc8746f24f4c1bf18cdad087
|
d9fb6c246965cbf290186268298859ddb913ee6e
|
/190813/03_mod.py
|
3a21a5da1950eb762f029d3aa591e49c9be98f49
|
[] |
no_license
|
91hongppie/algorithm
|
1ca6d54de6eab252c708bf83835ace8a109d73fc
|
4c2fa8178e0ef7afbf0b736387f05cbada72f95d
|
refs/heads/master
| 2020-07-20T22:17:40.700366 | 2020-06-29T00:06:11 | 2020-06-29T00:06:11 | 206,717,677 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 375 |
py
|
import sys
sys.stdin = open('sample_input_03.txt', 'r')
N = int(input())
for i in range(1, N+1):
play = list(map(int, input().split()))
test_words = [[] for i in range(play[0])]
for j in range(play[0]):
test_words[j] = list(map(str, input()))
for m in range(play[0]):
for n in range(play[0]):
mo_list = test_words[m][n:play[0]:]
|
[
"[email protected]"
] | |
cd4f12206ec91523ba27cb33a771f3673c839cd1
|
cc129db64fc64d1cb9a99526583771c10e245deb
|
/tests/test_det_next_michigan_development_corporation.py
|
da9a98ab1e31ab67be68a83440ae713aa016e955
|
[
"MIT"
] |
permissive
|
avelosa/city-scrapers-det
|
a42df36b7d2e98f7be68ae17e22c03af7a20280c
|
964b941b67fb5113cda5e2bebd2ba288ac1422d7
|
refs/heads/main
| 2023-02-02T01:19:07.396737 | 2020-09-29T16:52:11 | 2020-09-29T16:52:11 | 300,441,174 | 1 | 0 |
MIT
| 2020-10-01T22:30:23 | 2020-10-01T22:30:22 | null |
UTF-8
|
Python
| false | false | 4,826 |
py
|
from datetime import datetime
from os.path import dirname, join
import pytest
import scrapy
from city_scrapers_core.constants import BOARD, PASSED, TENTATIVE
from city_scrapers_core.utils import file_response
from freezegun import freeze_time
from scrapy.settings import Settings
from city_scrapers.spiders.det_next_michigan_development_corporation import (
DetNextMichiganDevelopmentCorporationSpider,
)
LOCATION = {
"name": "DEGC, Guardian Building",
"address": "500 Griswold St, Suite 2200, Detroit, MI 48226",
}
TITLE = "Board of Directors"
test_response = file_response(
join(dirname(__file__), "files", "det_next_michigan_development_corporation.html"),
url="http://www.degc.org/public-authorities/d-nmdc/",
)
freezer = freeze_time("2018-07-26")
spider = DetNextMichiganDevelopmentCorporationSpider()
spider.settings = Settings(values={"CITY_SCRAPERS_ARCHIVE": False})
freezer.start()
parsed_items = [item for item in spider._next_meetings(test_response)]
freezer.stop()
def test_initial_request_count():
freezer.start()
items = list(spider.parse(test_response))
freezer.stop()
assert len(items) == 3
urls = {r.url for r in items if isinstance(r, scrapy.Request)}
assert urls == {
"http://www.degc.org/public-authorities/d-nmdc/fy-2017-2018-meetings/",
"http://www.degc.org/public-authorities/d-nmdc/dnmdc-fy-2016-2017-meetings/",
}
# current meeting http://www.degc.org/public-authorities/ldfa/
def test_title():
assert parsed_items[0]["title"] == TITLE
def test_description():
assert parsed_items[0]["description"] == ""
def test_start():
assert parsed_items[0]["start"] == datetime(2018, 9, 11, 9)
def test_end():
assert parsed_items[0]["end"] is None
def test_id():
assert (
parsed_items[0]["id"]
== "det_next_michigan_development_corporation/201809110900/x/board_of_directors"
)
def test_status():
assert parsed_items[0]["status"] == TENTATIVE
def test_location():
assert parsed_items[0]["location"] == LOCATION
def test_sources():
assert parsed_items[0]["source"] == "http://www.degc.org/public-authorities/d-nmdc/"
def test_links():
assert parsed_items[0]["links"] == []
@pytest.mark.parametrize("item", parsed_items)
def test_all_day(item):
assert item["all_day"] is False
@pytest.mark.parametrize("item", parsed_items)
def test_classification(item):
assert item["classification"] == BOARD
# previous meetings e.g.
# http://www.degc.org/public-authorities/ldfa/fy-2017-2018-meetings/
test_prev_response = file_response(
join(
dirname(__file__),
"files",
"det_next_michigan_development_corporation_prev.html",
),
url="http://www.degc.org/public-authorities/d-nmdc/dnmdc-fy-2016-2017-meetings",
)
freezer.start()
parsed_prev_items = [item for item in spider._parse_prev_meetings(test_prev_response)]
parsed_prev_items = sorted(parsed_prev_items, key=lambda x: x["start"], reverse=True)
freezer.stop()
def test_prev_request_count():
freezer.start()
items = list(spider._prev_meetings(test_response))
freezer.stop()
urls = {r.url for r in items if isinstance(r, scrapy.Request)}
assert len(urls) == 2
assert urls == {
"http://www.degc.org/public-authorities/d-nmdc/fy-2017-2018-meetings/",
"http://www.degc.org/public-authorities/d-nmdc/dnmdc-fy-2016-2017-meetings/",
}
def test_prev_meeting_count():
assert len(parsed_prev_items) == 1
def test_prev_title():
assert parsed_prev_items[0]["title"] == TITLE
def test_prev_description():
assert parsed_prev_items[0]["description"] == ""
def test_prev_start():
assert parsed_prev_items[0]["start"] == datetime(2017, 8, 8, 9)
def test_prev_end():
assert parsed_prev_items[0]["end"] is None
def test_prev_id():
assert (
parsed_prev_items[0]["id"]
== "det_next_michigan_development_corporation/201708080900/x/board_of_directors"
)
def test_prev_status():
assert parsed_prev_items[0]["status"] == PASSED
def test_prev_location():
assert parsed_prev_items[0]["location"] == LOCATION
def test_prev_source():
assert (
parsed_prev_items[0]["source"]
== "http://www.degc.org/public-authorities/d-nmdc/dnmdc-fy-2016-2017-meetings"
)
def test_prev_links():
assert parsed_prev_items[0]["links"] == [
{
"href": "http://www.degc.org/wp-content/uploads/2016-08-09-DNMDC-Special-Board-Meeting-Agenda-4-1.pdf", # noqa
"title": "D-NMDC Agenda",
},
]
@pytest.mark.parametrize("item", parsed_prev_items)
def test_prev_all_day(item):
assert item["all_day"] is False
@pytest.mark.parametrize("item", parsed_prev_items)
def test_prev_classification(item):
assert item["classification"] == BOARD
|
[
"[email protected]"
] | |
4212426d83cef5a31b6993b1859aa096f5a86957
|
c7bb490ef96fda51a946478a4f584814e1665a6a
|
/backend/urls.py
|
06c33f1ea3c2e43ed3c886400d353b67ec87d687
|
[] |
no_license
|
pawanpaudel93/motion-planning-dashboard
|
e70acc9737cdedf0fd0beac0a0700cc88f9c2559
|
642f5955d518747dfc14f1f22a93ef20784329d8
|
refs/heads/master
| 2023-03-11T14:33:31.643898 | 2021-02-28T11:26:16 | 2021-02-28T11:26:16 | 340,398,506 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 528 |
py
|
"""MPD URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
"""
from django.contrib import admin
from django.urls import path, include, re_path
from rest_framework import routers
from .api import urls as api_urls
from .api.views import index_view
router = routers.DefaultRouter()
urlpatterns = [
path('api/v1/', include(api_urls)),
path('admin/', admin.site.urls),
re_path(r'^.*$', index_view, name='index')
]
|
[
"[email protected]"
] | |
0bc44e39ed3c0411a6484900df8dc4ccda28fa3a
|
67b0379a12a60e9f26232b81047de3470c4a9ff9
|
/profile/migrations/0042_auto_20170225_1639.py
|
6f002bfd9f51f8ca97ff8153953db520d0afe6e9
|
[] |
no_license
|
vintkor/whitemandarin
|
8ea9022b889fac718e0858873a07c586cf8da729
|
5afcfc5eef1bb1cc2febf519b04a4819a7b9648f
|
refs/heads/master
| 2021-05-06T03:35:09.367375 | 2017-12-20T15:43:08 | 2017-12-20T15:43:08 | 114,904,110 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 565 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-02-25 14:39
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('profile', '0041_auto_20170217_1405'),
]
operations = [
migrations.AlterField(
model_name='user',
name='date_of_birth',
field=models.DateField(default=datetime.datetime(2017, 2, 25, 14, 39, 18, 342403, tzinfo=utc)),
),
]
|
[
"[email protected]"
] | |
6eb0d30982c51c95fe8b185a70ce7a5e912cdd20
|
2da72c9f9bbb0b5db33710cddbdee28503e5a606
|
/UCI/pyQT-matplot-example 2.py
|
0228e2bce7c9d982c2ca7970f732c4860c0e6cc5
|
[] |
no_license
|
gddickinson/python_code
|
2e71fb22b929cb26c2a1456b11dc515af048c441
|
dbb20e171fb556e122350fb40e12cc76adbb9a66
|
refs/heads/master
| 2022-10-26T15:20:40.709820 | 2022-10-11T16:06:27 | 2022-10-11T16:06:27 | 44,060,963 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,689 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 23 16:50:19 2015
@author: George
"""
import sys
from PyQt4 import QtGui
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib import NavigationToolbar2QTAgg as NavigationToolbar
import matplotlib.pyplot as plt
import random
class Window(QtGui.QDialog):
def __init__(self, parent=None):
super(Window, self).__init__(parent)
# a figure instance to plot on
self.figure = plt.figure()
# this is the Canvas Widget that displays the `figure`
# it takes the `figure` instance as a parameter to __init__
self.canvas = FigureCanvas(self.figure)
# this is the Navigation widget
# it takes the Canvas widget and a parent
self.toolbar = NavigationToolbar(self.canvas, self)
# Just some button connected to `plot` method
self.button = QtGui.QPushButton('Plot')
self.button.clicked.connect(self.plot)
# set the layout
layout = QtGui.QVBoxLayout()
layout.addWidget(self.toolbar)
layout.addWidget(self.canvas)
layout.addWidget(self.button)
self.setLayout(layout)
def plot(self):
''' plot some random stuff '''
# random data
data = [random.random() for i in range(10)]
# create an axis
ax = self.figure.add_subplot(111)
# discards the old graph
ax.hold(False)
# plot data
ax.plot(data, '*-')
# refresh canvas
self.canvas.draw()
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
main = Window()
main.show()
sys.exit(app.exec_())
|
[
"[email protected]"
] | |
bd8527aee37e224f869349bec2f6fb2bdadc1d5b
|
a140fe192fd643ce556fa34bf2f84ddbdb97f091
|
/.history/예외처리_20200709144804.py
|
9b8a16ecb397905296a8e33b88abcd084eadb309
|
[] |
no_license
|
sangha0719/py-practice
|
826f13cb422ef43992a69f822b9f04c2cb6d4815
|
6d71ce64bf91cc3bccee81378577d84ba9d9c121
|
refs/heads/master
| 2023-03-13T04:40:55.883279 | 2021-02-25T12:02:04 | 2021-02-25T12:02:04 | 342,230,484 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 223 |
py
|
try:
print("나누기 전용 계산기입니다.")
num1 = int(input("첫 번째 숫자를 입력하세요 : "))
num2 = int(input("두 번째 숫자를 입력하세요 : "))
print("{0} / {1} = {2}".format(n))
|
[
"[email protected]"
] | |
cc878c320008f8db66aa030c2f2f6bc3e205a9cc
|
6d1728bf105a7d6481d0bbca2b88f4478e0632d9
|
/study/ch1/area.py
|
1a498690da37f4f891110371603717db2e529035
|
[] |
no_license
|
Phantomn/Python
|
00c63aceb2d4aa0db71fe5e33fe8b5159b41aadd
|
12808adf4b52c60cfe94befb6daa1e8187224beb
|
refs/heads/Python
| 2022-11-09T16:49:49.165884 | 2019-08-05T07:30:07 | 2019-08-05T07:30:07 | 44,149,995 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 191 |
py
|
horizon=0
vertical=0
print("Input horizon length : ", end="")
horizon=int(input())
print("Input vertical length : ",end="")
vertical=int(input())
print("rectangle is %d."%(horizon*vertical))
|
[
"[email protected]"
] | |
0baadeafe82ed3f2330579af9aeb7806db738dc3
|
7f8c24fe161fee3f32e206e013ea89fc8eb9a50a
|
/example_api/urls.py
|
4c07dd5d1421c42a6038b536a60b6f7e7826f9cc
|
[] |
no_license
|
vnitikesh/rest-registration
|
a04f4cf643766d3844e7a63e0616157d1c1f1e9a
|
0578589f6cb9b9138fa5915395bf616de57eaf0b
|
refs/heads/main
| 2023-02-18T12:32:40.392439 | 2021-01-21T23:55:23 | 2021-01-21T23:55:23 | 331,453,447 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 575 |
py
|
from django.urls import path
from . import views
from rest_framework.routers import DefaultRouter
urlpatterns = [
path('category/', views.CategoryListView.as_view(), name = 'category-list'),
path('category/<int:pk>/', views.CategoryDetailView.as_view(), name = 'category-detail'),
path('product/', views.ProductRecordView.as_view(), name = 'product-list'),
path('cart/', views.CartViewSet.as_view(), name = 'cart'),
path('checkout/', views.CheckoutView.as_view(), name = 'checkout'),
#path('order/', views.OrderViewSet.as_view(), name = 'order')
]
|
[
"[email protected]"
] | |
637aebc9dc0ee30985a63efc692a3f892fbed308
|
c6f9a46393048add6fad888d382978b9be12dd4c
|
/python/ql/test/experimental/dataflow/strange-pointsto-interaction-investigation/src/urandom_problem.py
|
d4a06529cf60991084b7d954d234703134c192b9
|
[
"MIT",
"LicenseRef-scancode-python-cwi",
"LicenseRef-scancode-other-copyleft",
"GPL-1.0-or-later",
"LicenseRef-scancode-free-unknown",
"Python-2.0"
] |
permissive
|
luchua-bc/ql
|
6e9480e8c92cbb12570fcc7f65366bfdd54dad06
|
a1d9228a66cb80329041fa8d95b08ce5697dec54
|
refs/heads/master
| 2023-01-23T17:11:54.776916 | 2022-07-20T14:36:37 | 2022-07-20T14:36:37 | 248,313,302 | 4 | 0 |
MIT
| 2023-01-16T09:13:30 | 2020-03-18T18:35:48 |
CodeQL
|
UTF-8
|
Python
| false | false | 920 |
py
|
# These are defined so that we can evaluate the test code.
NONSOURCE = "not a source"
SOURCE = "source"
def is_source(x):
return x == "source" or x == b"source" or x == 42 or x == 42.0 or x == 42j
def SINK(x):
if is_source(x):
print("OK")
else:
print("Unexpected flow", x)
def SINK_F(x):
if is_source(x):
print("Unexpected flow", x)
else:
print("OK")
# ------------------------------------------------------------------------------
# Actual tests
# ------------------------------------------------------------------------------
def give_src():
return SOURCE
foo = give_src()
SINK(foo) # $ flow="SOURCE, l:-3 -> foo"
import os
cond = os.urandom(1)[0] > 128 # $ unresolved_call=os.urandom(..)
if cond:
pass
if cond:
pass
foo = give_src() # $ unresolved_call=give_src()
SINK(foo) # $ unresolved_call=SINK(..) MISSING: flow="SOURCE, l:-15 -> foo"
|
[
"[email protected]"
] | |
bd1236dee44cc218e34f71aa057ce6aeaae640d8
|
4f365fbdfd4701c3a294dfba17c1377d4eb369d8
|
/jinja2htmlcompress.py
|
507c7509a9a3a8418fcb4ce187fb21809e76fc26
|
[
"BSD-3-Clause"
] |
permissive
|
Orvillar/jinja2-htmlcompress
|
4e725f9b6ceb6f327d4247d7dab6f55d344039ea
|
b34dc409762aaf205ccd59e37ad4b3dc5331904d
|
refs/heads/master
| 2020-04-07T16:06:54.607802 | 2018-11-21T08:31:21 | 2018-11-21T08:31:21 | 158,515,466 | 0 | 0 |
NOASSERTION
| 2018-11-21T08:29:20 | 2018-11-21T08:29:19 | null |
UTF-8
|
Python
| false | false | 6,354 |
py
|
# -*- coding: utf-8 -*-
"""
jinja2htmlcompress
~~~~~~~~~~~~~~~~~~
A Jinja2 extension that eliminates useless whitespace at template
compilation time without extra overhead.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import re
from jinja2.ext import Extension
from jinja2.lexer import Token, describe_token
from jinja2 import TemplateSyntaxError
_tag_re = re.compile(r'(?:<(/?)([a-zA-Z0-9_-]+)\s*|(>\s*))(?s)')
_ws_normalize_re = re.compile(r'[ \t\r\n]+')
class StreamProcessContext(object):
def __init__(self, stream):
self.stream = stream
self.token = None
self.stack = []
def fail(self, message):
raise TemplateSyntaxError(message, self.token.lineno,
self.stream.name, self.stream.filename)
def _make_dict_from_listing(listing):
rv = {}
for keys, value in listing:
for key in keys:
rv[key] = value
return rv
class HTMLCompress(Extension):
isolated_elements = set(['script', 'style', 'noscript', 'textarea'])
void_elements = set(['br', 'img', 'area', 'hr', 'param', 'input',
'embed', 'col'])
block_elements = set(['div', 'p', 'form', 'ul', 'ol', 'li', 'table', 'tr',
'tbody', 'thead', 'tfoot', 'tr', 'td', 'th', 'dl',
'dt', 'dd', 'blockquote', 'h1', 'h2', 'h3', 'h4',
'h5', 'h6', 'pre'])
breaking_rules = _make_dict_from_listing([
(['p'], set(['#block'])),
(['li'], set(['li'])),
(['td', 'th'], set(['td', 'th', 'tr', 'tbody', 'thead', 'tfoot'])),
(['tr'], set(['tr', 'tbody', 'thead', 'tfoot'])),
(['thead', 'tbody', 'tfoot'], set(['thead', 'tbody', 'tfoot'])),
(['dd', 'dt'], set(['dl', 'dt', 'dd']))
])
def is_isolated(self, stack):
for tag in reversed(stack):
if tag in self.isolated_elements:
return True
return False
def is_breaking(self, tag, other_tag):
breaking = self.breaking_rules.get(other_tag)
return breaking and (tag in breaking or
('#block' in breaking and tag in self.block_elements))
def enter_tag(self, tag, ctx):
while ctx.stack and self.is_breaking(tag, ctx.stack[-1]):
self.leave_tag(ctx.stack[-1], ctx)
if tag not in self.void_elements:
ctx.stack.append(tag)
def leave_tag(self, tag, ctx):
if not ctx.stack:
ctx.fail('Tried to leave "%s" but something closed '
'it already' % tag)
if tag == ctx.stack[-1]:
ctx.stack.pop()
return
for idx, other_tag in enumerate(reversed(ctx.stack)):
if other_tag == tag:
for num in xrange(idx + 1):
ctx.stack.pop()
elif not self.breaking_rules.get(other_tag):
break
def normalize(self, ctx):
pos = 0
buffer = []
def write_data(value):
if not self.is_isolated(ctx.stack):
value = _ws_normalize_re.sub(' ', value.strip())
buffer.append(value)
for match in _tag_re.finditer(ctx.token.value):
closes, tag, sole = match.groups()
preamble = ctx.token.value[pos:match.start()]
write_data(preamble)
if sole:
write_data(sole)
else:
buffer.append(match.group())
(closes and self.leave_tag or self.enter_tag)(tag, ctx)
pos = match.end()
write_data(ctx.token.value[pos:])
return u''.join(buffer)
def filter_stream(self, stream):
ctx = StreamProcessContext(stream)
for token in stream:
if token.type != 'data':
yield token
continue
ctx.token = token
value = self.normalize(ctx)
yield Token(token.lineno, 'data', value)
class SelectiveHTMLCompress(HTMLCompress):
def filter_stream(self, stream):
ctx = StreamProcessContext(stream)
strip_depth = 0
while 1:
if stream.current.type == 'block_begin':
if stream.look().test('name:strip') or \
stream.look().test('name:endstrip'):
stream.skip()
if stream.current.value == 'strip':
strip_depth += 1
else:
strip_depth -= 1
if strip_depth < 0:
ctx.fail('Unexpected tag endstrip')
stream.skip()
if stream.current.type != 'block_end':
ctx.fail('expected end of block, got %s' %
describe_token(stream.current))
stream.skip()
if strip_depth > 0 and stream.current.type == 'data':
ctx.token = stream.current
value = self.normalize(ctx)
yield Token(stream.current.lineno, 'data', value)
else:
yield stream.current
stream.next()
def test():
from jinja2 import Environment
env = Environment(extensions=[HTMLCompress])
tmpl = env.from_string('''
<html>
<head>
<title>{{ title }}</title>
</head>
<script type=text/javascript>
if (foo < 42) {
document.write('Foo < Bar');
}
</script>
<body>
<li><a href="{{ href }}">{{ title }}</a><br>Test Foo
<li><a href="{{ href }}">{{ title }}</a><img src=test.png>
</body>
</html>
''')
print tmpl.render(title=42, href='index.html')
env = Environment(extensions=[SelectiveHTMLCompress])
tmpl = env.from_string('''
Normal <span> unchanged </span> stuff
{% strip %}Stripped <span class=foo > test </span>
<a href="foo"> test </a> {{ foo }}
Normal <stuff> again {{ foo }} </stuff>
<p>
Foo<br>Bar
Baz
<p>
Moep <span>Test</span> Moep
</p>
{% endstrip %}
''')
print tmpl.render(foo=42)
if __name__ == '__main__':
test()
|
[
"[email protected]"
] | |
68bda07db08e3d6b58a8cbb0bf86ce63b584f900
|
5a1f77b71892745656ec9a47e58a078a49eb787f
|
/4_Backwoods_Forest/140-A_Fine_Mint/fine_mint.py
|
f17553bc8c35e99e05fe9b3bbd9916adfeaa85f8
|
[
"MIT"
] |
permissive
|
ripssr/Code-Combat
|
78776e7e67c033d131e699dfeffb72ca09fd798e
|
fbda1ac0ae4a2e2cbfce21492a2caec8098f1bef
|
refs/heads/master
| 2020-06-11T20:17:59.817187 | 2019-07-21T09:46:04 | 2019-07-21T09:46:04 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 338 |
py
|
def pickUpCoin():
coin = hero.findNearestItem()
if coin:
hero.moveXY(coin.pos.x, coin.pos.y)
def attackEnemy():
enemy = hero.findNearestEnemy()
if enemy:
if hero.isReady("cleave"):
hero.cleave(enemy)
else:
hero.attack(enemy)
while True:
attackEnemy()
pickUpCoin()
|
[
"[email protected]"
] | |
203c4c5c65469b178d194de6b85feec2a5037e9a
|
129941a1fb7c0bbd9969f0dd8843b057ce9f3666
|
/VAJets/PKUTreeMaker/test/Wcrab/crab3_analysismu.py
|
09dc3efeef0cc17499456da57454ef8dcc335da1
|
[] |
no_license
|
PKUHEPEWK/VBS_WGamma
|
7cf43f136dd92777ab7a8a742c163e222b1f4dbf
|
0f94abb2d4303b1c08d62971a74f25b100cbe042
|
refs/heads/master
| 2020-03-25T04:36:21.119377 | 2019-07-15T02:56:32 | 2019-07-15T02:56:32 | 143,404,007 | 0 | 4 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,416 |
py
|
from WMCore.Configuration import Configuration
config = Configuration()
config.section_("General")
config.General.requestName = 'SMu16B-v1'
config.General.transferLogs = True
config.section_("JobType")
config.JobType.pluginName = 'Analysis'
config.JobType.inputFiles =['Summer16_23Sep2016BCDV4_DATA_L1FastJet_AK4PFchs.txt','Summer16_23Sep2016BCDV4_DATA_L2Relative_AK4PFchs.txt','Summer16_23Sep2016BCDV4_DATA_L3Absolute_AK4PFchs.txt','Summer16_23Sep2016BCDV4_DATA_L2L3Residual_AK4PFchs.txt','Summer16_23Sep2016BCDV4_DATA_L1FastJet_AK4PFPuppi.txt','Summer16_23Sep2016BCDV4_DATA_L2Relative_AK4PFPuppi.txt','Summer16_23Sep2016BCDV4_DATA_L3Absolute_AK4PFPuppi.txt','Summer16_23Sep2016BCDV4_DATA_L2L3Residual_AK4PFPuppi.txt']
# Name of the CMSSW configuration file
config.JobType.psetName = 'analysis_data.py'
config.JobType.allowUndistributedCMSSW = True
config.section_("Data")
config.Data.inputDataset = '/SingleMuon/Run2016B-03Feb2017_ver2-v2/MINIAOD'
config.Data.inputDBS = 'global'
config.Data.splitting = 'LumiBased'
config.Data.unitsPerJob = 40
config.Data.lumiMask = 'Cert_271036-284044_13TeV_23Sep2016ReReco_Collisions16_JSON.txt'
#config.Data.runRange = '246908-258750'
#config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB())
config.Data.publication = False
config.Data.outputDatasetTag = 'SMu16B-v1'
config.section_("Site")
config.Site.storageSite = 'T3_US_FNALLPC' #T2_CN_Beijing'
|
[
"[email protected]"
] | |
a7b174b85eba3c6f121e88eb9985de14f93428b9
|
14ac991bba2eb7d59a1d76db792b7689316f8060
|
/leetcode/00179.py
|
2097fd3046480dd7c91a1af857c955626b82b82d
|
[] |
no_license
|
munagekar/cp
|
bde88fa565a7e2158ebe0f2611c4718a3d2970f1
|
c25d29f68943e3721233e177abe13068e5f40e4b
|
refs/heads/master
| 2021-07-04T05:00:02.511874 | 2021-05-30T14:30:05 | 2021-05-30T14:30:05 | 240,286,072 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 382 |
py
|
from itertools import zip_longest
from functools import cmp_to_key
def cmp(a, b):
if a + b > b + a:
return 1
else:
return -1
class Solution:
def largestNumber(self, nums: List[int]) -> str:
nums = map(str, nums)
nums = sorted(nums, key=cmp_to_key(cmp), reverse=True)
nums = "".join(nums)
return nums.lstrip("0") or "0"
|
[
"[email protected]"
] | |
62a61d7f251b2dd796c2a0864e338c6272236b1a
|
87828431072e3c60a92dc274b078d7cf1e5705be
|
/back_python/account/migrations/0001_initial.py
|
34d3acacd2cf509d472797922ba4727ed9535d39
|
[] |
no_license
|
cash2one/habit
|
90adfd80427a0c0d04104ea5cf8123cf025b2d8b
|
3782e498e1e40d6b638aaf2c7c1ac087c0739a36
|
refs/heads/master
| 2021-01-19T12:32:51.627847 | 2017-04-11T15:41:28 | 2017-04-11T15:41:28 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,302 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-25 08:49
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('activity', '0013_auto_20170125_1649'),
]
operations = [
migrations.CreateModel(
name='Account',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tradeDate', models.DateField(auto_now=True, verbose_name='时间')),
('tradeType', models.CharField(choices=[('fee', '套餐服务费'), ('deposit', '押金'), ('milyInput', '套餐囤米'), ('milyInputByDeposit', '押金囤米'), ('milyOutput', '米粒打赏'), ('milyOutputByDonate', '米粒捐赠'), ('feedBack', '打卡奖励米粒'), ('feedBackReturnDeposit', '打卡返还押金'), ('aveDeposit', '平均分配懒人押金')], max_length=50, verbose_name='类型')),
('fee', models.IntegerField(default=0, verbose_name='套餐服务费')),
('deposit', models.IntegerField(default=0, verbose_name='囤米押金')),
('milyInput', models.IntegerField(default=0, verbose_name='套餐囤米')),
('milyInputByDeposit', models.IntegerField(default=0, verbose_name='押金囤米')),
('milyOutput', models.IntegerField(default=0, verbose_name='米粒打赏')),
('milyOutputByDonate', models.IntegerField(default=0, verbose_name='米粒捐赠')),
('feedBack', models.IntegerField(default=0, verbose_name='打卡奖励米粒')),
('feedBackReturnDeposit', models.IntegerField(default=0, verbose_name='打卡奖励押金')),
('aveDeposit', models.IntegerField(default=0, verbose_name='平均分配懒人押金')),
('createdTime', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('updatedTime', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('activity', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='activity.Activity', verbose_name='活动')),
],
),
]
|
[
"[email protected]"
] | |
72e87ff5fac87b45a4fbe10d20bbd6dc95907e38
|
242ebcb7220c2e16c141a6bea4a09c7cb5e4287d
|
/accounts/forms.py
|
83f3c4a31f7b0a3a43e78a73a2980318f2d55c71
|
[] |
no_license
|
olivx/estudos_crud
|
06ed8c269a4c36db3579daf6d6aef5e7d49dc5f9
|
24af031ed44a7c6cf567368556d368fe58ab1090
|
refs/heads/master
| 2021-01-11T09:28:49.355388 | 2017-03-03T15:17:25 | 2017-03-03T15:17:25 | 81,199,807 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,126 |
py
|
from django import forms
from django.contrib.auth import authenticate
from accounts.models import User
from django.utils.translation import ugettext_lazy as _
class RegisterForm(forms.ModelForm):
password1 = forms.CharField(max_length=30, widget=forms.PasswordInput, required=True)
password2 = forms.CharField(max_length=30, widget=forms.PasswordInput, required=True)
def clean_password2(self):
password1 = self.cleaned_data['password1']
password2 = self.cleaned_data['password2']
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(_("The two password fields didn't match."))
return self.cleaned_data
class Meta:
model = User
fields = ('username', 'email', 'password1', 'password2')
def save(self, commit=True):
user = super(RegisterForm, self).save(commit=False)
user.email = self.cleaned_data['username']
user.email = self.cleaned_data['email']
user.set_password(self.cleaned_data['password1'])
if commit:
user.save()
return user
class AuthenticanUserForm(forms.Form):
email = forms.EmailField(label='Email', max_length=30, required=True)
password = forms.CharField(label='Password', max_length=30, required=True, widget=forms.PasswordInput)
error_messages = {
'invalid_login': _(
"Please enter a correct %(email)s and password. Note that both "
"fields may be case-sensitive."
),
'inactive': _("This account is inactive."),
'email_confirmation': _(
'this email is not confirmed yet, please confirm the your eamil and try again'
),
}
def clean(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
if email and password:
self.user = authenticate(email=email, password=password)
if self.user is None:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'email': 'Email'},
)
return self.cleaned_data
def confirm_login_allowed(self, user):
"""
Controls whether the given User may log in. This is a policy setting,
independent of end-user authentication. This default behavior is to
allow login by active users, and reject login by inactive users.
If the given user cannot log in, this method should raise a
``forms.ValidationError``.
If the given user may log in, this method should return None.
"""
if not user.is_active:
raise forms.ValidationError(
self.error_messages['inactive'],
code='inactive',
)
if not user.profile.email_confirmation:
raise forms.ValidationError(
self.error_messages['email_confirmation'],
code='email_confirmation'
)
class Meta:
fields = ('email', 'password')
|
[
"[email protected]"
] | |
1cd644fe4370089fe5cf86ae2fc2d3fa316e8e2e
|
e629d61db2f08f66cf46d934ab0f87fa1666de05
|
/backend/lively_heart_25130/urls.py
|
5c32c3d6b9e17dce8e7eb899ed0a90b4b5455ae7
|
[] |
no_license
|
crowdbotics-apps/lively-heart-25130
|
ec80559da8d6b168df1ce75415c5d6b916c97ee1
|
ed33785297cbb8f794034de1bc3c7fb81bdbe048
|
refs/heads/master
| 2023-03-24T16:57:41.146127 | 2021-03-19T21:41:18 | 2021-03-19T21:41:18 | 349,561,856 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,235 |
py
|
"""lively_heart_25130 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "Lively Heart"
admin.site.site_title = "Lively Heart Admin Portal"
admin.site.index_title = "Lively Heart Admin"
# swagger
api_info = openapi.Info(
title="Lively Heart API",
default_version="v1",
description="API documentation for Lively Heart App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
urlpatterns += [path("", TemplateView.as_view(template_name='index.html'))]
urlpatterns += [re_path(r"^(?:.*)/?$",
TemplateView.as_view(template_name='index.html'))]
|
[
"[email protected]"
] | |
ef679fa89caf7d38e7aa2766c74680ff885e8be4
|
ae9bb7babce2a0349ae932985cf418a03057c670
|
/ProgramAndDataStructure/list/__init__.py
|
50e5397d5291650f1e1f4a4e99a244b430ba0f89
|
[] |
no_license
|
Veraun/HogwartsSDET17-1
|
d2592fcb4c9c63724c19bcf9edde349ebcd2c8af
|
6648dbfb640b065ff2c76cb6889a8f9e4f124b91
|
refs/heads/main
| 2023-07-02T05:20:32.161248 | 2021-08-06T03:55:13 | 2021-08-06T03:55:13 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 157 |
py
|
'''
#!/usr/bin/python3
# -*- coding: utf-8 -*-
@author: wangwei
@project: HogwartsSDET17
@file: __init__.py.py
@time: 2021/5/20 19:54
@Email: Warron.Wang
'''
|
[
"[email protected]"
] | |
337238a653f2c421c1f017238cbef58842b56a43
|
567ecf4ea5afbd7eb3003f7e14e00c7b9289b9c6
|
/ax/storage/json_store/decoders.py
|
7a586e03ddb3b32b0a5780c941e67e791e29d11a
|
[
"MIT"
] |
permissive
|
danielrjiang/Ax
|
f55ef168a59381b5a03c6d51bc394f6c72ed0f39
|
43014b28683b3037b5c7307869cb9b75ca31ffb6
|
refs/heads/master
| 2023-03-31T12:19:47.118558 | 2019-12-02T16:47:39 | 2019-12-02T16:49:36 | 225,493,047 | 0 | 0 |
MIT
| 2019-12-03T00:09:52 | 2019-12-03T00:09:51 | null |
UTF-8
|
Python
| false | false | 3,501 |
py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
from datetime import datetime
from typing import TYPE_CHECKING, Any, Dict, List, Optional
from ax.core.arm import Arm
from ax.core.base_trial import TrialStatus
from ax.core.batch_trial import AbandonedArm, BatchTrial, GeneratorRunStruct
from ax.core.generator_run import GeneratorRun
from ax.core.runner import Runner
from ax.core.trial import Trial
if TYPE_CHECKING:
# import as module to make sphinx-autodoc-typehints happy
from ax import core # noqa F401 # pragma: no cover
def batch_trial_from_json(
experiment: "core.experiment.Experiment",
index: int,
trial_type: Optional[str],
status: TrialStatus,
time_created: datetime,
time_completed: Optional[datetime],
time_staged: Optional[datetime],
time_run_started: Optional[datetime],
abandoned_reason: Optional[str],
run_metadata: Optional[Dict[str, Any]],
generator_run_structs: List[GeneratorRunStruct],
runner: Optional[Runner],
abandoned_arms_metadata: Dict[str, AbandonedArm],
num_arms_created: int,
status_quo: Optional[Arm],
status_quo_weight_override: float,
optimize_for_power: Optional[bool],
) -> BatchTrial:
"""Load Ax BatchTrial from JSON.
Other classes don't need explicit deserializers, because we can just use
their constructors (see decoder.py). However, the constructor for Batch
does not allow us to exactly recreate an existing object.
"""
batch = BatchTrial(experiment=experiment)
batch._index = index
batch._trial_type = trial_type
batch._status = status
batch._time_created = time_created
batch._time_completed = time_completed
batch._time_staged = time_staged
batch._time_run_started = time_run_started
batch._abandoned_reason = abandoned_reason
batch._run_metadata = run_metadata or {}
batch._generator_run_structs = generator_run_structs
batch._runner = runner
batch._abandoned_arms_metadata = abandoned_arms_metadata
batch._num_arms_created = num_arms_created
batch._status_quo = status_quo
batch._status_quo_weight_override = status_quo_weight_override
batch.optimize_for_power = optimize_for_power
return batch
def trial_from_json(
experiment: "core.experiment.Experiment",
index: int,
trial_type: Optional[str],
status: TrialStatus,
time_created: datetime,
time_completed: Optional[datetime],
time_staged: Optional[datetime],
time_run_started: Optional[datetime],
abandoned_reason: Optional[str],
run_metadata: Optional[Dict[str, Any]],
generator_run: GeneratorRun,
runner: Optional[Runner],
num_arms_created: int,
) -> Trial:
"""Load Ax trial from JSON.
Other classes don't need explicit deserializers, because we can just use
their constructors (see decoder.py). However, the constructor for Trial
does not allow us to exactly recreate an existing object.
"""
trial = Trial(experiment=experiment, generator_run=generator_run)
trial._index = index
trial._trial_type = trial_type
trial._status = status
trial._time_created = time_created
trial._time_completed = time_completed
trial._time_staged = time_staged
trial._time_run_started = time_run_started
trial._abandoned_reason = abandoned_reason
trial._run_metadata = run_metadata or {}
trial._runner = runner
trial._num_arms_created = num_arms_created
return trial
|
[
"[email protected]"
] | |
856043c72dfa18187c13e630e6c9e58fcc3c660b
|
a56a74b362b9263289aad96098bd0f7d798570a2
|
/venv/lib/python3.8/site-packages/matplotlib/_pylab_helpers.py
|
2407b573c4aabbe64132bc3a0ae71163132785bc
|
[
"MIT"
] |
permissive
|
yoonkt200/ml-theory-python
|
5812d06841d30e1068f6592b5730a40e87801313
|
7643136230fd4f291b6e3dbf9fa562c3737901a2
|
refs/heads/master
| 2022-12-21T14:53:21.624453 | 2021-02-02T09:33:07 | 2021-02-02T09:33:07 | 132,319,537 | 13 | 14 |
MIT
| 2022-12-19T17:23:57 | 2018-05-06T08:17:45 |
Python
|
UTF-8
|
Python
| false | false | 3,445 |
py
|
"""
Manage figures for pyplot interface.
"""
import atexit
import gc
class Gcf:
"""
Singleton to manage a set of integer-numbered figures.
This class is never instantiated; it consists of two class
attributes (a list and a dictionary), and a set of static
methods that operate on those attributes, accessing them
directly as class attributes.
Attributes
----------
figs
dictionary of the form {*num*: *manager*, ...}
_activeQue
list of *managers*, with active one at the end
"""
_activeQue = []
figs = {}
@classmethod
def get_fig_manager(cls, num):
"""
If figure manager *num* exists, make it the active
figure and return the manager; otherwise return *None*.
"""
manager = cls.figs.get(num, None)
if manager is not None:
cls.set_active(manager)
return manager
@classmethod
def destroy(cls, num):
"""
Try to remove all traces of figure *num*.
In the interactive backends, this is bound to the
window "destroy" and "delete" events.
"""
if not cls.has_fignum(num):
return
manager = cls.figs[num]
manager.canvas.mpl_disconnect(manager._cidgcf)
cls._activeQue.remove(manager)
del cls.figs[num]
manager.destroy()
gc.collect(1)
@classmethod
def destroy_fig(cls, fig):
"*fig* is a Figure instance"
num = next((manager.num for manager in cls.figs.values()
if manager.canvas.figure == fig), None)
if num is not None:
cls.destroy(num)
@classmethod
def destroy_all(cls):
# this is need to ensure that gc is available in corner cases
# where modules are being torn down after install with easy_install
import gc # noqa
for manager in list(cls.figs.values()):
manager.canvas.mpl_disconnect(manager._cidgcf)
manager.destroy()
cls._activeQue = []
cls.figs.clear()
gc.collect(1)
@classmethod
def has_fignum(cls, num):
"""
Return *True* if figure *num* exists.
"""
return num in cls.figs
@classmethod
def get_all_fig_managers(cls):
"""
Return a list of figure managers.
"""
return list(cls.figs.values())
@classmethod
def get_num_fig_managers(cls):
"""
Return the number of figures being managed.
"""
return len(cls.figs)
@classmethod
def get_active(cls):
"""
Return the manager of the active figure, or *None*.
"""
if len(cls._activeQue) == 0:
return None
else:
return cls._activeQue[-1]
@classmethod
def set_active(cls, manager):
"""
Make the figure corresponding to *manager* the active one.
"""
oldQue = cls._activeQue[:]
cls._activeQue = [m for m in oldQue if m != manager]
cls._activeQue.append(manager)
cls.figs[manager.num] = manager
@classmethod
def draw_all(cls, force=False):
"""
Redraw all figures registered with the pyplot
state machine.
"""
for f_mgr in cls.get_all_fig_managers():
if force or f_mgr.canvas.figure.stale:
f_mgr.canvas.draw_idle()
atexit.register(Gcf.destroy_all)
|
[
"[email protected]"
] | |
ee1a31f88eeb3c7e9f45e9d6e74e4f4ac8581dbf
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_381/ch15_2020_09_14_14_10_44_836878.py
|
03149e67069cc3803fab0866de1f386bfbe66feb
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 177 |
py
|
def chris(nome):
if chris == nome:
return 'Todo mundo odeia o Chris'
else:
return 'Olá, {0}'.format(nome)
nome = input('Qual seu nome?')
|
[
"[email protected]"
] | |
7a1df63cd632b5b6f4ccaeaeee6eff6164e582d7
|
bffcfa6103ee72d7ac394c14aa861e60616c7ab8
|
/pytorch3d/datasets/__init__.py
|
1687213018a29e5d75a4c5490368d52e5f4d893a
|
[
"BSD-3-Clause"
] |
permissive
|
Amit2016-17/pytorch3d
|
ccac686bc1a3caeb4bd0f38519fbcb83f816501d
|
7944d24d4872bdb01b821450840049e28d0ce12b
|
refs/heads/master
| 2022-11-25T10:40:14.409087 | 2020-08-05T13:58:53 | 2020-08-05T14:00:20 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 255 |
py
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
from .r2n2 import R2N2, BlenderCamera
from .shapenet import ShapeNetCore
from .utils import collate_batched_meshes
__all__ = [k for k in globals().keys() if not k.startswith("_")]
|
[
"[email protected]"
] | |
7dfcead14cfcc41518ec35eaa9c96ca9cfbc0be3
|
8fb846f4f4ac5fd417489d731eae8a8a1bdc77c3
|
/rllab/misc/console.py
|
b32d21a249a3d389e0aef97f641591cdb13bb35a
|
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
zhongwen/rllab
|
0a9f9ea2d8995037b83aaae5853a299d5cf9e432
|
d8239c05179fcc55d865db7ce933defa3baae24d
|
refs/heads/master
| 2021-01-14T08:36:37.272071 | 2016-08-17T12:29:00 | 2016-08-17T12:29:00 | 65,801,245 | 1 | 1 | null | 2016-08-16T08:18:47 | 2016-08-16T08:18:46 | null |
UTF-8
|
Python
| false | false | 5,514 |
py
|
import sys
import time
import os
import errno
import shlex
import pydoc
import inspect
color2num = dict(
gray=30,
red=31,
green=32,
yellow=33,
blue=34,
magenta=35,
cyan=36,
white=37,
crimson=38
)
def colorize(string, color, bold=False, highlight=False):
attr = []
num = color2num[color]
if highlight:
num += 10
attr.append(str(num))
if bold:
attr.append('1')
return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), string)
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def log(s): # , send_telegram=False):
print s
sys.stdout.flush()
class SimpleMessage(object):
def __init__(self, msg, logger=log):
self.msg = msg
self.logger = logger
def __enter__(self):
print self.msg
self.tstart = time.time()
def __exit__(self, etype, *args):
maybe_exc = "" if etype is None else " (with exception)"
self.logger("done%s in %.3f seconds" %
(maybe_exc, time.time() - self.tstart))
MESSAGE_DEPTH = 0
class Message(object):
def __init__(self, msg):
self.msg = msg
def __enter__(self):
global MESSAGE_DEPTH # pylint: disable=W0603
print colorize('\t' * MESSAGE_DEPTH + '=: ' + self.msg, 'magenta')
self.tstart = time.time()
MESSAGE_DEPTH += 1
def __exit__(self, etype, *args):
global MESSAGE_DEPTH # pylint: disable=W0603
MESSAGE_DEPTH -= 1
maybe_exc = "" if etype is None else " (with exception)"
print colorize('\t' * MESSAGE_DEPTH + "done%s in %.3f seconds" % (maybe_exc, time.time() - self.tstart), 'magenta')
def prefix_log(prefix, logger=log):
return lambda s: logger(prefix + s)
def tee_log(file_name):
f = open(file_name, 'w+')
def logger(s):
log(s)
f.write(s)
f.write('\n')
f.flush()
return logger
def collect_args():
splitted = shlex.split(' '.join(sys.argv[1:]))
return {arg_name[2:]: arg_val
for arg_name, arg_val in zip(splitted[::2], splitted[1::2])}
def type_hint(arg_name, arg_type):
def wrap(f):
meta = getattr(f, '__tweak_type_hint_meta__', None)
if meta is None:
f.__tweak_type_hint_meta__ = meta = {}
meta[arg_name] = arg_type
return f
return wrap
def tweak(fun_or_val, identifier=None):
if callable(fun_or_val):
return tweakfun(fun_or_val, identifier)
return tweakval(fun_or_val, identifier)
def tweakval(val, identifier):
if not identifier:
raise ValueError('Must provide an identifier for tweakval to work')
args = collect_args()
for k, v in args.iteritems():
stripped = k.replace('-', '_')
if stripped == identifier:
log('replacing %s in %s with %s' % (stripped, str(val), str(v)))
return type(val)(v)
return val
def tweakfun(fun, alt=None):
"""Make the arguments (or the function itself) tweakable from command line.
See tests/test_misc_console.py for examples.
NOTE: this only works for the initial launched process, since other processes
will get different argv. What this means is that tweak() calls wrapped in a function
to be invoked in a child process might not behave properly.
"""
cls = getattr(fun, 'im_class', None)
method_name = fun.__name__
if alt:
cmd_prefix = alt
elif cls:
cmd_prefix = cls + '.' + method_name
else:
cmd_prefix = method_name
cmd_prefix = cmd_prefix.lower()
args = collect_args()
if cmd_prefix in args:
fun = pydoc.locate(args[cmd_prefix])
if type(fun) == type:
argspec = inspect.getargspec(fun.__init__)
else:
argspec = inspect.getargspec(fun)
# TODO handle list arguments
defaults = dict(
zip(argspec.args[-len(argspec.defaults or []):], argspec.defaults or []))
replaced_kwargs = {}
cmd_prefix += '-'
if type(fun) == type:
meta = getattr(fun.__init__, '__tweak_type_hint_meta__', {})
else:
meta = getattr(fun, '__tweak_type_hint_meta__', {})
for k, v in args.iteritems():
if k.startswith(cmd_prefix):
stripped = k[len(cmd_prefix):].replace('-', '_')
if stripped in meta:
log('replacing %s in %s with %s' % (stripped, str(fun), str(v)))
replaced_kwargs[stripped] = meta[stripped](v)
elif stripped not in argspec.args:
raise ValueError(
'%s is not an explicit parameter of %s' % (stripped, str(fun)))
elif stripped not in defaults:
raise ValueError(
'%s does not have a default value in method %s' % (stripped, str(fun)))
elif defaults[stripped] is None:
raise ValueError(
'Cannot infer type of %s in method %s from None value' % (stripped, str(fun)))
else:
log('replacing %s in %s with %s' % (stripped, str(fun), str(v)))
# TODO more proper conversions
replaced_kwargs[stripped] = type(defaults[stripped])(v)
def tweaked(*args, **kwargs):
all_kw = dict(zip(argspec[0], args) +
kwargs.items() + replaced_kwargs.items())
return fun(**all_kw)
return tweaked
|
[
"[email protected]"
] | |
d01f9d1b57765a72c85ec040eab037e9d12c89bb
|
ca77e9e45d666771c7b0897e7e3093b3d3c12f65
|
/scripts/trigger/add_prices.py
|
ec79a8be575fe0f59c9b16754b18afc1910a7a29
|
[] |
no_license
|
2gDigitalPost/custom
|
46175d3a3fc4c3be21dc20203ff0a48fb93b5639
|
6a3a804ef4ef6178044b70ad1e4bc5c56ab42d8d
|
refs/heads/master
| 2020-04-04T07:40:17.962611 | 2016-12-28T18:35:28 | 2016-12-28T18:35:28 | 39,648,283 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,857 |
py
|
"""
This file was generated automatically from a custom script found in Project -> Script Editor.
The custom script was moved to a file so that it could be integrated with GitHub.
"""
__author__ = 'Topher.Hughes'
__date__ = '04/08/2015'
import traceback
def main(server=None, input=None):
"""
The main function of the custom script. The entire script was copied
and pasted into the body of the try statement in order to add some
error handling. It's all legacy code, so edit with caution.
:param server: the TacticServerStub object
:param input: a dict with data like like search_key, search_type, sobject, and update_data
:return: None
"""
if not input:
input = {}
try:
# CUSTOM_SCRIPT00035
# Matthew Tyler Misenhimer
# This is used to have the prices on projects trickle up to titles, then orders
# This is DEPRECATED
sobj = input.get('sobject')
sk = input.get('search_key')
price_str = sobj.get('price')
price = 0
if price_str not in [None,'']:
price = float(price_str)
proj = server.eval("@SOBJECT(twog/proj['code','%s'])" % sobj.get('proj_code'))[0]
current_proj_price_str = proj.get('price')
current_proj_price = 0
if current_proj_price_str not in [None,'']:
current_proj_price = float(current_proj_price_str)
new_proj_price = current_proj_price + price
server.update(proj.get('__search_key__'), {'price': new_proj_price})
title = server.eval("@SOBJECT(twog/title['code','%s'])" % proj.get('title_code'))[0]
current_title_price_str = title.get('price')
current_title_price = 0
if current_title_price_str not in [None,'']:
current_title_price = float(current_title_price_str)
new_title_price = current_title_price + price
server.update(title.get('__search_key__'), {'price': new_title_price})
order = server.eval("@SOBJECT(twog/order['code','%s'])" % title.get('order_code'))[0]
current_order_price_str = order.get('price')
current_order_price = 0
if current_order_price_str not in [None,'']:
current_order_price = float(current_order_price_str)
new_order_price = current_order_price + price
server.update(order.get('__search_key__'), {'price': new_order_price})
except AttributeError as e:
traceback.print_exc()
print str(e) + '\nMost likely the server object does not exist.'
raise e
except KeyError as e:
traceback.print_exc()
print str(e) + '\nMost likely the input dictionary does not exist.'
raise e
except Exception as e:
traceback.print_exc()
print str(e)
raise e
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
1da98ce1969f888ec8962c9239a84d4f7a580f78
|
b72dbc51279d3e59cb6410367b671f8a956314c1
|
/leet_code/leet_372.py
|
5c1d0057a5ac67543ab059922519a69fe52287d6
|
[] |
no_license
|
ddobokki/coding-test-practice
|
7b16d20403bb1714d97adfd1f47aa7d3ccd7ea4b
|
c88d981a1d43b986169f7884ff3ef1498e768fc8
|
refs/heads/main
| 2023-07-08T15:09:32.269059 | 2021-08-08T12:19:44 | 2021-08-08T12:19:44 | 344,116,013 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 205 |
py
|
from typing import List
class Solution:
def superPow(self, a: int, b: List[int]) -> int:
if a in [1,0]:
return a
return int(pow(a,int("".join(str(i) for i in b)),1337))
|
[
"[email protected]"
] | |
4f02cd88aa3d26c3be1bbb4b45c2049a6e8a6317
|
9ab9d9a3883471763edbceea59a0e83170581b5f
|
/eggs/bx_python-0.7.1_7b95ff194725-py2.7-linux-i686-ucs4.egg/EGG-INFO/scripts/bed_extend_to.py
|
2985cc3497acf222c69151a76b253624baa01752
|
[
"CC-BY-2.5",
"AFL-2.1",
"AFL-3.0",
"CC-BY-3.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
asmmhossain/phyG
|
24dc211dad5b3e89c87ff384e841f2e98bbd52db
|
023f505b705ab953f502cbc55e90612047867583
|
refs/heads/master
| 2022-11-21T12:43:46.172725 | 2014-02-14T12:33:08 | 2014-02-14T12:33:08 | 13,800,552 | 0 | 1 |
NOASSERTION
| 2020-07-25T21:05:41 | 2013-10-23T11:04:25 |
Python
|
UTF-8
|
Python
| false | false | 1,132 |
py
|
#!/afs/bx.psu.edu/project/pythons/linux-i686-ucs4/bin/python2.7
"""
Read BED file and extend each record to the specified minimum length. If chromosome
size information is provided trim extended intervals.
usage: %prog amount [ chrom_file ] < bed_file
"""
import sys
from bx.intervals.io import GenomicIntervalReader
length = int( sys.argv[1] )
chrom_len = None
if len( sys.argv ) > 2:
chrom_len = dict( ( fields[0], int( fields[1] ) ) for fields in map( str.split, open( sys.argv[2] ) ) )
for interval in GenomicIntervalReader( sys.stdin ):
if interval.end - interval.start < length:
start = interval.start
end = interval.end
# Extend in positive direction on strand
if interval.strand == "+":
end = start + length
else:
start = end - length
# Trim
if start < 0:
start = 0
if chrom_len and end > chrom_len[interval.chrom]:
end = chrom_len[interval.chrom]
# Set new start and end
interval.start = start
interval.end = end
# Output possibly adjusted interval
print interval
|
[
"[email protected]"
] | |
cccee8c95ce17bb44043b1a20a899ac4161055be
|
ee22ec2076a79e8de3011377fe205bc87163ab9f
|
/src/basic-c3/func-let.py
|
8c9c6ff3fea14adfbe60b86692ad4981a5710241
|
[] |
no_license
|
n18018/programming-term2
|
039a95c67372a38a34e2aa8c5975045a9fc731be
|
86c455269eed312def529604e1ac3b00f476226c
|
refs/heads/master
| 2020-03-22T08:59:29.545280 | 2018-08-29T07:57:37 | 2018-08-29T07:57:37 | 139,806,131 | 0 | 0 | null | 2018-07-05T06:42:11 | 2018-07-05T06:42:11 | null |
UTF-8
|
Python
| false | false | 326 |
py
|
# 関数を定義
def mul_func(a, b):
return a * b
def div_func(a, b):
return a / b
# mul_func関数を変数に代入
func = mul_func
# 代入した変数で関数を使う
result = func(2, 3)
print(result)
# div_func関数を変数に代入する場合
func2 = div_func
result = func2(10, 5)
print(result)
|
[
"[email protected]"
] | |
ae4cb13734a0740053a6c4093337ac9c7f2ab6d8
|
de707c94c91f554d549e604737b72e6c86eb0755
|
/supervised_learning/0x02-tensorflow/7-evaluate.py
|
16e4666e5785a1670cb87f5a081e939092818dc2
|
[] |
no_license
|
ejonakodra/holbertonschool-machine_learning-1
|
885cf89c1737573228071e4dc8e26304f393bc30
|
8834b201ca84937365e4dcc0fac978656cdf5293
|
refs/heads/main
| 2023-07-10T09:11:01.298863 | 2021-08-11T03:43:59 | 2021-08-11T03:43:59 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,069 |
py
|
#!/usr/bin/env python3
"""
Defines a function that evaluates output of
neural network classifier
"""
import tensorflow as tf
def evaluate(X, Y, save_path):
"""
Evaluates output of neural network
parameters:
X [numpy.ndarray]: contains the input data to evaluate
Y [numpy.ndarray]: contains the one-hot labels for X
save_path [string]: location to load the model from
returns:
the network's prediction, accuracy, and loss, respectively
"""
with tf.Session() as sess:
saver = tf.train.import_meta_graph(save_path + '.meta')
saver.restore(sess, save_path)
x = tf.get_collection('x')[0]
y = tf.get_collection('y')[0]
y_pred = tf.get_collection('y_pred')[0]
accuracy = tf.get_collection('accuracy')[0]
loss = tf.get_collection('loss')[0]
prediction = sess.run(y_pred, feed_dict={x: X, y: Y})
accuracy = sess.run(accuracy, feed_dict={x: X, y: Y})
loss = sess.run(loss, feed_dict={x: X, y: Y})
return (prediction, accuracy, loss)
|
[
"[email protected]"
] | |
3b7f9e6dbe9c7e658110923f1a4756af7ddbc9ba
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_274/ch134_2020_04_01_11_05_19_507472.py
|
ead9069b851dccdce2014878c391a3cdbe73018b
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 174 |
py
|
def verifica_quadrado_perfeito(n):
m=n
i=2
while m > -1:
m=m-i
i=i+2
if m**2 == n:
return True
else:
return False
|
[
"[email protected]"
] | |
81264f2bcadaa766a81e3a63ef481439ed76116f
|
e20ed90b9be7a0bcdc1603929d65b2375a224bf6
|
/generated-libraries/python/netapp/fpolicy/event_name.py
|
476ecd9c31bd1a9cacb1652502a4f667427125da
|
[
"MIT"
] |
permissive
|
radekg/netapp-ontap-lib-gen
|
530ec3248cff5ead37dc2aa47ced300b7585361b
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
refs/heads/master
| 2016-09-06T17:41:23.263133 | 2015-01-14T17:40:46 | 2015-01-14T17:40:46 | 29,256,898 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 142 |
py
|
class EventName(basestring):
"""
Event name
"""
@staticmethod
def get_api_name():
return "event-name"
|
[
"[email protected]"
] | |
77e13c60ab887ef65af5d208fbcad6ac63b78f87
|
f067b46c0bd8bf4fbc2471c42c4a74cb08359bd5
|
/server/config/settings/components/thumb.py
|
6b24b0bc3fdca3195cad3f422c9b6577525857e0
|
[] |
no_license
|
bopo/project-template
|
ffbf3516c9f486fadb46a767688cb26badda6a3d
|
0eedd18c236b66516e543750673934f4932555ca
|
refs/heads/develop
| 2020-03-23T01:57:29.471378 | 2018-07-17T05:38:48 | 2018-07-17T05:38:48 | 140,947,688 | 0 | 1 | null | 2022-04-21T04:40:56 | 2018-07-14T13:18:49 |
Python
|
UTF-8
|
Python
| false | false | 1,330 |
py
|
# -*- coding: utf-8 -*-
# INSTALLED_APPS += [
# "django_thumbor",
# ]
# INSTALLED_APPS += ('easy_thumbnails',)
# THUMBNAIL_ALIASES = {
# '': {
# 'avatar': {'size': (50, 50), 'crop': True},
# },
# }
# THUMB_LIST = '500x500'
# THUMB_DETAIL = '800x800'
# The host serving the thumbor resized images
THUMBOR_SERVER = 'http://localhost:8888'
# The prefix for the host serving the original images
# This must be a resolvable address to allow thumbor to reach the images
THUMBOR_MEDIA_URL = 'http://localhost:8888/media'
# If you want the static to be handled by django thumbor
# default as False, set True to handle it if you host your statics
THUMBOR_STATIC_ENABLED = False
# The prefix for the host serving the original static images
# this must be a resolvable address to allow thumbor to reach the images
THUMBOR_STATIC_URL = 'http://localhost:8888/static'
# The same security key used in the thumbor service to
# match the URL construction
THUMBOR_SECURITY_KEY = 'MY_SECURE_KEY'
# Default arguments passed to the `generate_url` helper or
# the `thumbor_url` templatetag
THUMBOR_ARGUMENTS = {}
# An alias represents a named set of arguments to the generate_url function
# or thumbor_url template tag. Use it to share general thumbnail
# configurations without repeating yourself.
THUMBOR_ALIASES = {}
|
[
"[email protected]"
] | |
368933543f3030bfc38b32795b89b4bccf0c2b47
|
d8c1419eba8aeec8c203e819aae46475b744a66f
|
/archive/main.py
|
e6f339fa6539de2f2ff591d9c551fbb00f096b86
|
[] |
no_license
|
StefenYin/yeadon
|
a66aa1808ef662c76cd5d96db0f74cd25e3abcc7
|
03ae2c5881795e44f1890818fcb3530ba3c6feac
|
refs/heads/master
| 2021-01-17T08:50:12.313236 | 2012-12-18T19:54:52 | 2012-12-18T19:54:52 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 698 |
py
|
import stadium as stad
#import segment
import human as hum
import matplotlib.pyplot as mpl
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import data
import densities
# INPUTS ARE 95 MEASUREMENTS, DENSITIES, AND ORIENTATION ANGLES
# read input file of 95 measurements
# create solid objects
# create segment objects
# create human object
# plot human, no angles
# read in angles file
# plot human, with joint angles
# plot human conforming to a bicycle
# SECOND ITERATION: MOVE FROM FILE INPUTS (FOR ANGLES ONLY) TO QT GUI
externalangles = np.zeros( 3 )
externalangles[0] = 0
jointangles = np.zeros( 18 )
print "Creating human object."
H = hum.human(externalangles)
H.draw()
|
[
"[email protected]"
] | |
14b6673a73fd4152a4af7be21d6eb6e4233c7f7e
|
944401a6292baa2d23b9738898e0b0cb199d0795
|
/color_quantization/octree/img_quality_assessment(IQA)/psnr/rgb_cs/rgb_psnr_sky.py
|
cdd42100b4527e977262e59e0ed94e2810f09ba1
|
[] |
no_license
|
sunnyweilai/Finding-Theme-Color-Palettes
|
cc84c93ce58abdd1802431c41bd59181d7a4f75b
|
4c38b112f5c40b43d6ec126e415b609c7fdc1f39
|
refs/heads/master
| 2022-12-21T09:41:31.187411 | 2019-04-30T14:50:17 | 2019-04-30T14:50:17 | 184,273,925 | 1 | 0 | null | 2022-12-07T03:46:55 | 2019-04-30T14:09:52 |
Python
|
UTF-8
|
Python
| false | false | 1,028 |
py
|
"""
image quality assessment (IQA) of the quantized images and the original image in RGB color space
----- method: PSNR
----- version 1.0 ("skimage" library)
----- http://scikit-image.org/docs/dev/api/skimage.measure.html#skimage.measure.compare_psnr
"""
import numpy as np
from PIL import Image
import glob
import csv
import skimage.measure as skm
# ---- obtain the original and quantized images
temp_img = np.array(Image.open('../../../../img/sky.jpg'))
quantized_img_path_list = []
quantized_img_path_list = glob.glob(r'../../../img/sky/rgb_cs/quantized_img/*.png')
quantized_img_path_list.sort()
# ---- compute PSNR
score_list = []
for i in quantized_img_path_list:
quantized_img = np.array(Image.open(i))
score = skm.compare_psnr(temp_img, quantized_img)
score_list.append(score)
# print(score_list)
# ---- save psnr score to csv file
csvfile = "sky_psnr.csv"
with open(csvfile, "w") as output:
writer = csv.writer(output, lineterminator='\n')
for val in score_list:
writer.writerow([val])
|
[
"[email protected]"
] | |
4e3c0ef1f25cdcd986f146665468ac1c76395c52
|
fac16ad71ac9b09afc9abf0528a98171ac02afc4
|
/payment/payments/migrations/0003_category_product.py
|
ada7734a9c41e562f17f56d3edb03d1a44dd48c7
|
[] |
no_license
|
evansmwendwa/payment_gateway
|
96dbaf3728ebe4e0875152c96ecfbe7b7004dd98
|
afdeab38524ded46d1e557bab696afca9c387e7b
|
refs/heads/master
| 2020-03-10T09:38:25.395169 | 2018-04-12T23:52:34 | 2018-04-12T23:52:34 | 129,314,383 | 0 | 0 | null | 2018-04-12T21:44:34 | 2018-04-12T21:44:33 | null |
UTF-8
|
Python
| false | false | 1,097 |
py
|
# Generated by Django 2.0.3 on 2018-03-26 03:13
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('payments', '0002_auto_20180326_0248'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('categoryName', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('productName', models.CharField(max_length=100)),
('productPrice', models.IntegerField()),
('productBrand', models.CharField(max_length=100)),
('productCategory', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payments.Category')),
],
),
]
|
[
"[email protected]"
] | |
454cbfb46c7d918fb69092033e9e5117676beb29
|
6eef7d400474384c9e36cafbbae95e3c34dbb6ad
|
/manage.py
|
9546f991d846e27cec4ace859f5bbc2dda3e97ad
|
[] |
no_license
|
codeAligned/clinvitae
|
61d3c160e9dbc65d548818292681a27501d330ce
|
4a75c14113dc562991c7d2d1a5812d2db91e2da0
|
refs/heads/master
| 2020-05-17T12:02:33.514187 | 2019-02-21T06:47:35 | 2019-02-21T06:47:35 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 263 |
py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ben_kremer_clinvitae.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
[
"[email protected]"
] | |
cd0c89d314658b289357e3eaf240900c29f54130
|
7bd5ca970fbbe4a3ed0c7dadcf43ba8681a737f3
|
/aoj/aoj-icpc/300/1305.py
|
e3d262fb73a310e2dafd28e76451fa6d53bedd63
|
[] |
no_license
|
roiti46/Contest
|
c0c35478cd80f675965d10b1a371e44084f9b6ee
|
c4b850d76796c5388d2e0d2234f90dc8acfaadfa
|
refs/heads/master
| 2021-01-17T13:23:30.551754 | 2017-12-10T13:06:42 | 2017-12-10T13:06:42 | 27,001,893 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 444 |
py
|
while 1:
n = input()
if n == 0: break
ans = {}
for i in range(n):
group,name = raw_input().split(":")
if i == 0: first = group
ans[group] = set(name[:-1].split(","))
while 1:
for key in ans:
flag = 0
if key == first: continue
for key1 in ans:
if key in ans[key1]:
ans[key1] |= ans[key]
ans[key1].discard(key)
flag = 1
if flag:
del ans[key]
break
if flag == 0: break
print len(ans[first])
|
[
"[email protected]"
] | |
9c455ce4b8af925afea25a90680844bd0cd02e46
|
301b039050c00a9efa4f3a5635e8b633f8adf988
|
/caffe2/python/layers/functional.py
|
08612d21a4babfe8b412473834b03ea02a2621a1
|
[
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
] |
permissive
|
sunpan822/caffe2
|
9704b6fe556d272fbedfd6edfdb796f6a8f02970
|
a3c56d892eb85054b4e7cbd1cf0a0d07422ae796
|
refs/heads/master
| 2020-04-12T14:31:45.919799 | 2019-04-19T04:10:40 | 2019-04-19T04:10:40 | 162,555,100 | 1 | 0 |
Apache-2.0
| 2018-12-20T09:14:48 | 2018-12-20T09:14:47 | null |
UTF-8
|
Python
| false | false | 5,022 |
py
|
# @package functional
# Module caffe2.python.layers.functional
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core, schema, scope, workspace
from caffe2.python.layers.layers import (
ModelLayer,
)
import caffe2.proto.caffe2_pb2 as caffe2_pb2
import numpy as np
import six
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class Functional(ModelLayer):
def __init__(self, model, input_record, output_names_or_num, function,
name='functional', output_dtypes=None, **kwargs):
# allow coercion
input_record = schema.as_record(input_record)
super(Functional, self).__init__(model, name, input_record, **kwargs)
self._function = function
self._kwargs = kwargs
return_struct = (
isinstance(output_names_or_num, list) or
(isinstance(output_names_or_num, six.integer_types) and
output_names_or_num != 1)
)
with scope.NameScope(self.name, reset=True):
if isinstance(output_names_or_num, int):
struct_output_schema = schema.NewRecord(
model.net, schema.RawTuple(output_names_or_num))
elif isinstance(output_names_or_num, schema.Field):
self.output_schema = output_names_or_num.clone(keep_blobs=True)
return
else:
if not isinstance(output_names_or_num, list):
output_names_or_num = [output_names_or_num]
out_tuple = [(out, np.void) for out in output_names_or_num]
struct_output_schema = schema.NewRecord(
model.net, schema.Struct(*out_tuple))
num_outputs = len(struct_output_schema.field_blobs())
# functional layer returns Struct if more than one outputs or output is
# a list, otherwise Scalar
if return_struct:
self.output_schema = struct_output_schema
else:
self.output_schema = struct_output_schema[0]
# If output_dtypes is provided, use it for output schema. Otherwise
# the shape and type will be inferred.
if output_dtypes is not None:
if not isinstance(output_dtypes, list):
output_dtypes = [output_dtypes] * num_outputs
assert len(output_dtypes) == num_outputs
for dtype, scalar in zip(output_dtypes,
self.output_schema.all_scalars()):
scalar.set_type(dtype)
return
# Fake execution of the function to infer shapes and types automatically
had_issues = False
try:
type_net = core.Net('_temp_type_and_shape_inference_net')
schema.InitEmptyRecord(type_net, input_record, enforce_types=True)
function(type_net, self.input_record, self.output_schema, **kwargs)
(shapes, types) = workspace.InferShapesAndTypes([type_net], {})
for i in range(num_outputs):
scalar_schema = (self.output_schema[i] if return_struct
else self.output_schema)
blob = scalar_schema()
if blob not in types or blob not in shapes:
had_issues = True
continue
if shapes[blob] == []:
# Scalar type
shape = tuple()
elif shapes[blob][0] == 0:
shape = tuple(shapes[blob][1:])
else:
logger.warning("unexpeced shape: {}".format(shapes[blob]))
# If batch dimension is not first - give up on shape
# inference for that blob
had_issues = True
continue
# TODO(amalevich): Move it to some shared library
dtype = None
if types[blob] == caffe2_pb2.TensorProto.DOUBLE:
dtype = (np.float64, shape)
elif types[blob] == caffe2_pb2.TensorProto.FLOAT:
dtype = (np.float32, shape)
elif types[blob] == caffe2_pb2.TensorProto.INT32:
dtype = (np.int32, shape)
elif types[blob] == caffe2_pb2.TensorProto.INT64:
dtype = (np.int64, shape)
elif types[blob] == caffe2_pb2.TensorProto.FLOAT16:
dtype = (np.float16, shape)
if dtype is not None:
scalar_schema.set_type(dtype)
except TypeError as ex:
had_issues = True
logger.warning(str(ex))
if had_issues:
logger.warning(
"Type inference had problems for layer: {}".format(self.name))
def add_ops(self, net):
self._function(
net, self.input_record, self.output_schema, **(self._kwargs))
|
[
"[email protected]"
] | |
1de7c275d0299c2c4771f2e76446f0388e3b6064
|
57dbcfe5fe149b5353d42d687ebacfee36f16551
|
/sambam/sam_strip_tags.py
|
07dd6983bcdc366b975a62036992193da80974d7
|
[
"MIT"
] |
permissive
|
peterjc/picobio
|
74d3f570a6344dc3fbd3ddca46d65c4292ce0ee7
|
63a5f8b5670afc3680bdeac0d9663d8fcbe904c1
|
refs/heads/master
| 2023-09-06T04:26:31.955632 | 2023-08-31T14:12:25 | 2023-08-31T14:12:25 | 2,184,466 | 34 | 14 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,572 |
py
|
#!/usr/bin/env python
"""Python script to remove tags from SAM/BAM files.
This script is designed to be used as part of a Unix pipeline. It
takes as optional command line arguments a white list of tags to
preserve (or a black list of tags to remove). It reads SAM format
data from stdin, and writes SAM format data to stdout.
Simple usage with SAM files, keeping only read-group tags:
$ ./sam_strip_tags.py RG < original.sam > only_RG.sam
Simple usage with BAM files with conversion to/from SAM via samtools:
$ samtools view -h original.bam | ./sam_strip_tags.py RG | samtools view -S -b - > only_RG.bam
If your SAM/BAM files lack @SQ headers, you may need to give
samtools the reference FASTA file as well.
To remove particular tags (a black list rather than a white list)
include the switch -v (for invert, like the grep option). For example,
to remove any original quality (OC) tags, use:
$ ./sam_strip_tags.py -v OQ < original.sam > no_OQ.sam
Likewise with BAM files via samtools,
$ samtools view -h original.bam | ./sam_strip_tags.py -v OQ | samtools view -S -b - > no_OQ.bam
Copyright Peter Cock 2012. All rights reserved. See:
https://github.com/peterjc/picobio
"""
import sys
if "-v" in sys.argv[1:]:
black_list = set(x.strip() for x in sys.argv[1:] if x != "-v")
sys.stderr.write("Removing these tags: %s\n" % ", ".join(black_list))
for line in sys.stdin:
if line[0] != "@":
# Should be a read
(
qname,
flag,
rname,
pos,
mapq,
cigar,
rnext,
pnext,
tlen,
seq,
qual,
tags,
) = line.rstrip().split("\t", 11)
tags = "\t".join(t for t in tags.split("\t") if t[:2] not in black_list)
line = (
"\t".join(
[
qname,
flag,
rname,
pos,
mapq,
cigar,
rnext,
pnext,
tlen,
seq,
qual,
tags,
]
)
+ "\n"
)
sys.stdout.write(line)
else:
white_list = set(x.strip() for x in sys.argv[1:])
sys.stderr.write("Keeping only these tags: %s\n" % ", ".join(white_list))
for line in sys.stdin:
if line[0] != "@":
# Should be a read
(
qname,
flag,
rname,
pos,
mapq,
cigar,
rnext,
pnext,
tlen,
seq,
qual,
tags,
) = line.rstrip().split("\t", 11)
tags = "\t".join(t for t in tags.split("\t") if t[:2] in white_list)
line = (
"\t".join(
[
qname,
flag,
rname,
pos,
mapq,
cigar,
rnext,
pnext,
tlen,
seq,
qual,
tags,
]
)
+ "\n"
)
sys.stdout.write(line)
|
[
"[email protected]"
] | |
6fa53185e2e05b9e6e6db365d3d3defaf82130cf
|
f8e64dd069b2d65f1b9af53e03c42d97301e9a1d
|
/apps/currency/forms/fields.py
|
9c65327d4b8701519fd5e5bf2100f8c390ed6e36
|
[] |
no_license
|
grengojbo/django-currency
|
8daef53e442d7409f02c68dec48ff535b1712377
|
26e26cfb09ae8e62851a81bc8d821e1530eef20c
|
refs/heads/master
| 2021-04-12T04:32:53.928776 | 2010-04-28T17:00:39 | 2010-04-28T17:00:39 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 897 |
py
|
from django.utils.translation import ugettext_lazy as _
from django import forms
from widgets import InputMoneyWidget
from currency.money import Money
from currency.models import Currency
__all__ = ('MoneyField',)
class MoneyField(forms.DecimalField):
def __init__(self, currency_widget=None, *args, **kwargs):
self.widget = InputMoneyWidget(currency_widget=currency_widget)
super(MoneyField, self).__init__(*args, **kwargs)
def clean(self, value):
if not isinstance(value, tuple):
raise Exception("Invalid value provided for MoneyField.clean (expected tupple)")
amount = super(MoneyField, self).clean(value[0])
currency = Currency.objects.get_currency(value[1])
if not currency:
raise forms.ValidationError(_(u'Input currency'))
return Money(amount=amount, currency=currency)
|
[
"[email protected]"
] | |
b59c5b90bec745c23ed7e23d949ecbabbe82375a
|
4762b15498e642b39edfff3745e9ea134f081893
|
/workshop_admin/moodle/migrations/0002_statement.py
|
302a1756920a5a26ec21dd32551a7dd89f96533f
|
[] |
no_license
|
Informatinks/informatics-back
|
d1d29e7297e547a8749b8da4d6c70565495fc509
|
be298f72c072023be004895faf88cff9806650f6
|
refs/heads/master
| 2022-12-10T05:33:34.637043 | 2019-11-01T16:29:12 | 2019-11-01T16:29:12 | 171,288,054 | 0 | 3 | null | 2022-12-08T04:53:26 | 2019-02-18T13:20:53 |
Python
|
UTF-8
|
Python
| false | false | 671 |
py
|
# Generated by Django 2.2.1 on 2019-07-02 11:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('moodle', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Statement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
('summary', models.TextField()),
],
options={
'db_table': 'mdl_statements',
'managed': False,
},
),
]
|
[
"[email protected]"
] | |
118e3b71b782fa295f2a247d81a815d8673f60c5
|
b4982d7ffb9e65db8432e7728f89fa2dd4878aa6
|
/Object Oriented Concept/encapsulation.py
|
f8aed772cf8b60e7fcaabebf4a62a52ede6aebd2
|
[] |
no_license
|
anupjungkarki/30-Days-Python-Challenge
|
1d8e794235ac60e098f704cefa2c4a461134e8a4
|
96be38590a159d59397b122f8ee171574f5a556c
|
refs/heads/master
| 2023-06-04T13:24:16.250487 | 2022-12-05T09:46:26 | 2022-12-05T09:46:26 | 327,277,767 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,465 |
py
|
# Encapsulation is one of the method of the fundamental concept in object oriented programming(OOP).Other programming have access specifier
# to handle with the private data but in python private data is easily access from the outside of the class so Encapsulation helps to
# restrict to access data and variable outside the class.
# Here access of private key is possible
class Car:
def __init__(self, name, mileage):
self._name = name
self.mileage = mileage
def description(self):
return f'The{self._name} car gives the mileage of {self.mileage} km/1hr'
obj = Car('BMW 7-Series', 39.53)
# accessing the protected variable by class method
print(obj.description())
# accessing the protected variable directly from outside
print(obj._name)
print(obj.mileage)
# Now lets work some encapsulation method
class Car:
def __init__(self, name, mileage):
self.__name = name # Private Variable
self.mileage = mileage
def description(self):
return f'The {self.__name} car given the mileage of {self.mileage} km/1hr'
obj = Car('BMW 7-Series', 39.53)
# Accessing the private variable by class method
print(obj.description())
# Accessing the private variable directly from the outside
# print(obj.__name)
# print(obj.mileage)
# It give an error while trying to access from the outside the class but we can also access by using Name MANGLING
# print(obj.mileage)
# print(obj._car__name) # mangled name
|
[
"[email protected]"
] | |
009c97483cd7634d38ffeac4a1744beaae479f57
|
ae7d5d11351af9201ce6181c48b8c60363c7ed00
|
/packages/data/setup.py
|
28faa87c3d988024ce6993d21ad79eeb365f0a85
|
[
"CC-BY-2.5",
"AFL-2.1",
"AFL-3.0",
"CC-BY-3.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
natefoo/galaxy
|
818037d03f39ccfb3714c7e784fd64d7ad8f4d2e
|
64150c5bd803e75ed032e9f15acd003bae92b5ef
|
refs/heads/master
| 2023-08-17T02:57:02.580487 | 2020-03-26T13:33:01 | 2020-03-26T13:33:01 | 31,212,836 | 2 | 1 |
NOASSERTION
| 2019-04-25T12:30:28 | 2015-02-23T15:01:46 |
Python
|
UTF-8
|
Python
| false | false | 3,207 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import ast
import os
import re
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
SOURCE_DIR = "galaxy"
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('%s/project_galaxy_data.py' % SOURCE_DIR, 'rb') as f:
init_contents = f.read().decode('utf-8')
def get_var(var_name):
pattern = re.compile(r'%s\s+=\s+(.*)' % var_name)
match = pattern.search(init_contents).group(1)
return str(ast.literal_eval(match))
version = get_var("__version__")
PROJECT_NAME = get_var("PROJECT_NAME")
PROJECT_URL = get_var("PROJECT_URL")
PROJECT_AUTHOR = get_var("PROJECT_AUTHOR")
PROJECT_EMAIL = get_var("PROJECT_EMAIL")
PROJECT_DESCRIPTION = get_var("PROJECT_DESCRIPTION")
TEST_DIR = 'tests'
PACKAGES = [
'galaxy',
'galaxy.datatypes',
'galaxy.datatypes.dataproviders',
'galaxy.datatypes.display_applications',
'galaxy.datatypes.util',
'galaxy.datatypes.test',
'galaxy.model',
'galaxy.model.dataset_collections',
'galaxy.model.migrate',
'galaxy.model.orm',
'galaxy.model.store',
'galaxy.model.tool_shed_install',
'galaxy.quota',
'galaxy.security',
]
ENTRY_POINTS = '''
[console_scripts]
galaxy-build-objects=galaxy.model.store.build_objects:main
galaxy-manage-db=galaxy.model.orm.scripts:manage_db
'''
PACKAGE_DATA = {
# Be sure to update MANIFEST.in for source dist.
'galaxy': [
'datatypes/test/*',
],
}
PACKAGE_DIR = {
SOURCE_DIR: SOURCE_DIR,
}
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
if os.path.exists("requirements.txt"):
requirements = open("requirements.txt").read().split("\n")
else:
# In tox, it will cover them anyway.
requirements = []
test_requirements = [
# TODO: put package test requirements here
]
setup(
name=PROJECT_NAME,
version=version,
description=PROJECT_DESCRIPTION,
long_description=readme + '\n\n' + history,
long_description_content_type='text/x-rst',
author=PROJECT_AUTHOR,
author_email=PROJECT_EMAIL,
url=PROJECT_URL,
packages=PACKAGES,
entry_points=ENTRY_POINTS,
package_data=PACKAGE_DATA,
package_dir=PACKAGE_DIR,
include_package_data=True,
install_requires=requirements,
license="AFL",
zip_safe=False,
keywords='galaxy',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'License :: OSI Approved :: Academic Free License (AFL)',
'Operating System :: POSIX',
'Topic :: Software Development',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Testing',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
test_suite=TEST_DIR,
tests_require=test_requirements
)
|
[
"[email protected]"
] | |
d72a1163acfa6e897a9e131e9d3523083253254c
|
0268f4c895f9f54e93fc7e3d2b0334206a4e6d9e
|
/day14/03-tk.py
|
a2ce5018023977ebed3408b81989034151538d9e
|
[] |
no_license
|
zhangzongyan/python0702
|
adebccacf26e300ec7a681bdf0f7ab7bdf228eeb
|
7dcb6133d241fdf97b0812b9f25933ab389d2663
|
refs/heads/master
| 2020-03-22T21:05:51.218502 | 2018-08-15T09:54:42 | 2018-08-15T09:54:42 | 140,656,620 | 8 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,214 |
py
|
import tkinter as tk
# 按钮触发的方法
def click_button():
print("已点击")
def click_button2():
print("再次点击")
root = tk.Tk()
root.geometry("400x600")
root.title("这是一个测试窗口")
#root.minsize(width=400, height=300)
#root.maxsize(width=400, height=300)
#root.resizable(width=0,height=0) # width 0不可伸缩, 1可伸缩
'''
# 按钮类Button
button = tk.Button(root, text="确定", fg="red", bg = "black", command=click_button)
button["fg"] = "blue"
button["text"] = "退出"
button.config(fg="yellow")
button.pack(side="top", expand=0) # pack布局
button.invoke() # 触发按钮
button.config(command = click_button2)
button2 = tk.Button(root, text="退出")
button2.pack(side="left", expand=0)
'''
# 网格布局
b1 = tk.Button(root, text="1")
b2 = tk.Button(root, text="2")
b3 = tk.Button(root, text="3")
b4 = tk.Button(root, text="4")
b5 = tk.Button(root, text="5")
b1.grid(row = 1, column=1)
b2.grid(row = 1, column=0)
b3.grid(row = 1, column=2)
b4.grid(row = 2, column=0, columnspan=2)
b5.grid(row = 2, column=2)
'''
#place
b1 = tk.Button(root, text="1")
b2 = tk.Button(root, text="2")
b1.place(x=0, y= 0)
b2.place(x=100, y=100)
'''
root.mainloop() # 不结束
|
[
"[email protected]"
] | |
c3d0c6798414ea088eb7b3efc5bd017d1d44eda3
|
55267c377da7a2a6676978d958e07c07bfc9d9b6
|
/nbutil.py
|
395b05b0e7c54b1f0b25ec174c5bb9c33908ef84
|
[] |
no_license
|
larsks/netbox-data-scripts
|
54916afab045bed663c2a08ca90f102bf7efeeaa
|
91aa6554aa815bdfc894a500037e942962c16705
|
refs/heads/master
| 2023-01-11T16:50:50.551000 | 2020-11-11T22:33:14 | 2020-11-11T22:33:14 | 309,502,679 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,594 |
py
|
import click
import json
import logging
import pynetbox
import resources
import netbox
LOG = logging.getLogger(__name__)
logging.basicConfig(level='DEBUG')
@click.group(
context_settings=dict(auto_envvar_prefix='NETBOX'))
@click.option('--url', '-u')
@click.option('--token', '-t')
@click.pass_context
def main(ctx, url, token):
ctx.obj = netbox.Netbox(url, token=token)
@main.command()
@click.option('--site', '-s', required=True)
@click.option('--device-role', '-r')
@click.argument('factfiles', nargs=-1)
@click.pass_context
def load(ctx, site, device_role, factfiles):
api = ctx.obj
devices = []
for factfile in factfiles:
with open(factfile) as fd:
facts = json.load(fd)
if 'ansible_facts' not in facts:
LOG.warning('invalid fact file: %s', factfile)
continue
if facts['ansible_facts'].get('ansible_virtualization_role') != 'host':
LOG.warning('skipping virtual machine: %s', factfile)
continue
try:
dev = resources.device.from_ansible_facts(facts['ansible_facts'])
except KeyError as err:
LOG.warning('failed loading device from %s: missing %s',
factfile, err)
else:
devices.append(dev)
for dev in devices:
try:
_dev = api.dcim.devices.filter(name=dev.name)[0]
except IndexError:
LOG.info('adding %s', dev)
try:
_site = api.dcim.sites.filter(name=site)[0]
except IndexError:
_site = api.dcim.sites.create(name=site)
try:
manufacturer = api.dcim.manufacturers.filter(
name=dev.device_type.manufacturer)[0]
except IndexError:
obj = resources.manufacturer(name=dev.device_type.manufacturer)
LOG.info('create new manufacturer %s', obj)
manufacturer = api.dcim.manufacturers.create(**obj.to_dict())
try:
devtype = api.dcim.device_types.filter(
manufacturer_name=manufacturer.name,
model=dev.device_type.model)[0]
except IndexError:
obj = resources.device_type(
manufacturer=manufacturer.id,
model=dev.device_type.model)
LOG.info('create new device type %s', obj)
devtype = api.dcim.device_types.create(**obj.to_dict())
try:
devrole = api.dcim.device_roles.filter(
name=dev.device_role)[0]
except IndexError:
obj = resources.device_role(name=dev.device_role)
LOG.info('create new device role %s', obj)
devrole = api.dcim.device_roles.create(**obj.to_dict())
dev.site = _site.id
dev.device_type = devtype.id
dev.device_role = devrole.id
try:
_dev = api.dcim.devices.create(**dev.to_dict())
except pynetbox.core.query.RequestError as err:
breakpoint()
...
for interface in dev.interfaces.interfaces:
try:
_iface = api.dcim.interfaces.filter(
device_id=_dev.id, name=interface.name)[0]
except IndexError:
LOG.info('create new interface %s on %s', interface, dev)
_iface = api.dcim.interfaces.create(
device=_dev.id, **interface.to_dict())
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
2157fa5f00a7ea2f2da78c201b0648401aa85d19
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/otherforms/_attending.py
|
42c967da90aa064ad1ee81dd35207c570ee2ae1f
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 226 |
py
|
#calss header
class _ATTENDING():
def __init__(self,):
self.name = "ATTENDING"
self.definitions = attend
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['attend']
|
[
"[email protected]"
] | |
b14c2b98a07fad5acc877d946f624a0191ab7c48
|
3cfd5edbacb48d5197d709f52f77433194cedf2a
|
/app/middlewares/acl.py
|
72dd97eb8c38bb3d704106b06790ff099a0bf2a5
|
[] |
no_license
|
pikoUsername/A-Search
|
1ebb3062a930225cc3a7e5a515f77371aed862b6
|
59377c4e8cb6d0af09375aca1c03f35c371a212f
|
refs/heads/master
| 2023-02-18T19:10:01.007817 | 2021-01-18T14:10:48 | 2021-01-18T14:10:48 | 325,986,023 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,000 |
py
|
from typing import Optional
from aiogram import types
from aiogram.dispatcher.middlewares import BaseMiddleware
from ..models import dbc, User, Chat
class AclMiddleware(BaseMiddleware):
async def setup_chat(self, data: dict, tg_user: types.User, tg_chat: Optional[types.Chat] = None):
user_id = tg_user.id
chat_id = tg_chat.id if tg_chat else tg_user.id
user = await User.get(user_id)
if not user:
user = await dbc.add_new_user(tg_user)
chat = await Chat.get(chat_id)
if not chat:
chat = await dbc.add_new_chat(tg_chat)
data["user"] = user
data["chat"] = chat
async def on_pre_process_message(self, message: types.Message, data: dict):
await self.setup_chat(data, message.from_user, message.chat)
async def on_pre_process_callback_query(self, query: types.CallbackQuery, data: dict):
await self.setup_chat(data, query.from_user, query.message.chat if query.message else None)
|
[
"[email protected]"
] | |
63b8925658c1f05ca2b3c52b232b086acf5307c0
|
f2b5889d73cc9fcfd58a2dc807253bd4796849b5
|
/naginpy/pipeforward.py
|
a4893a4b1d3370e7b48d50c402601de681886f75
|
[
"MIT"
] |
permissive
|
dalejung/naginpy
|
e290cb2d26728c625d9b4199dbf1956fe1f6a0c9
|
bbc2b380a278a129449ee170fb22efa7f687b6e8
|
refs/heads/master
| 2020-12-25T18:17:16.498018 | 2018-08-19T18:14:12 | 2018-08-19T18:14:12 | 23,586,699 | 4 | 1 |
MIT
| 2018-08-19T06:29:59 | 2014-09-02T16:40:21 |
Python
|
UTF-8
|
Python
| false | false | 419 |
py
|
"""
df = value %>%
sum %>%
filter(is_na) %>%
summarize
df = value |>
sum |>
filter(is_na) |>
summarize
with PipeForward(value) as df:
_ = value
_ = sum(_)
_ = filter(_, is_na)
_ = summarize(_)
df = _
with PipeForward(value):
sum
filter(10)
summarize
with value.pipe():
"""
with value.pipe():
sum #>>
filter(10) #>>
summarize
value >> sum
|
[
"[email protected]"
] | |
cc6aeb11c159d67d3188ad48a3943fd5c5bb5b57
|
34bf67017440fe47658559f91fe153c153a359f4
|
/126.py
|
ab76eec45e690df7ee056355c5e29df63513c5d3
|
[] |
no_license
|
KevinWangTHU/LeetCode
|
1be5f8f1ab587eea5365abb940785c9fe26f5214
|
a7916e0818b0853ec75e24724bde94c49234c7dc
|
refs/heads/master
| 2021-05-04T10:16:26.666260 | 2017-08-09T04:17:12 | 2017-08-09T04:18:49 | 53,427,005 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,917 |
py
|
import collections, string
class Solution(object):
def findLadders(self, beginWord, endWord, wordlist):
"""
:type beginWord: str
:type endWord: str
:type wordlist: Set[str]
:rtype: List[List[int]]
"""
def construct_paths(source, dest, tree):
if source == dest:
return [[source]]
return [[source] + path for succ in tree[source] # path can be [] - for failed trials.
for path in construct_paths(succ, dest, tree)]
def add_path(tree, word, neigh, is_forw):
if is_forw:
tree[word] += neigh,
else:
tree[neigh] += word,
def bfs_level(cur, other, tree, is_forw, wordlist):
if not cur:
return False
if len(cur) > len(other):
return bfs_level(other, cur, tree, not is_forw, wordlist)
for word in (cur | other):
wordlist.discard(word)
next, done = set(), False
while cur:
word = cur.pop()
for neigh in [word[:idx] + c + word[idx+1:]
for c in string.ascii_lowercase
for idx in range(len(word))]:
if neigh in other:
done = True
add_path(tree, word, neigh, is_forw)
if not done and neigh in wordlist:
next.add(neigh)
add_path(tree, word, neigh, is_forw)
return done or bfs_level(next, other, tree, is_forw, wordlist)
tree, paths = collections.defaultdict(list), []
is_found = bfs_level(set([beginWord]), set([endWord]), tree, True, wordlist)
return construct_paths(beginWord, endWord, tree)
s=Solution()
print s.findLadders("hit", "dog", {"hog", "hig", "hip"})
|
[
"[email protected]"
] | |
4e8773dfd7c43372b1e0e2487c9908b3ce02e2ec
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02695/s928241641.py
|
8a3650d94b02032a7e04ac7856e18f47bbcccc2d
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 888 |
py
|
import copy
def gen_mono_inc_seqs(N, M, cur_list, cur_len):
if cur_len == N:
return cur_list
result = []
for l in cur_list:
last_val = l[len(l)-1]
for i in range(last_val, M+1):
tmp = copy.copy(l)
tmp.append(i)
result.append(tmp)
return gen_mono_inc_seqs(N, M, result, cur_len+1)
def mono_inc_seqs(N, M):
l = [ [i] for i in range(1, M+1) ]
return gen_mono_inc_seqs(N, M, l, 1)
N, M, Q = map(int, input().split())
a, b, c, d = [0] * Q, [0] * Q, [0] * Q, [0] * Q
for i in range(Q):
a_, b_, c_, d_ = map(int, input().split())
a[i], b[i], c[i], d[i] = a_, b_, c_, d_
max_result = -1
seqs = mono_inc_seqs(N, M)
for seq in seqs:
tmp = 0
for i in range(Q):
if seq[b[i]-1] - seq[a[i]-1] == c[i]:
tmp += d[i]
max_result = max(max_result, tmp)
print(max_result)
|
[
"[email protected]"
] | |
efcacf5019e593a4bf64f6c3a04e37e1c9331b44
|
c6588d0e7d361dba019743cacfde83f65fbf26b8
|
/x12/5030/435005030.py
|
a57f914a95dac66f74356e3869e7f5bc1cf84657
|
[] |
no_license
|
djfurman/bots-grammars
|
64d3b3a3cd3bd95d625a82204c3d89db6934947c
|
a88a02355aa4ca900a7b527b16a1b0f78fbc220c
|
refs/heads/master
| 2021-01-12T06:59:53.488468 | 2016-12-19T18:37:57 | 2016-12-19T18:37:57 | 76,887,027 | 0 | 0 | null | 2016-12-19T18:30:43 | 2016-12-19T18:30:43 | null |
UTF-8
|
Python
| false | false | 879 |
py
|
from bots.botsconfig import *
from records005030 import recorddefs
syntax = {
'version' : '00403', #version of ISA to send
'functionalgroup' : 'RK',
}
structure = [
{ID: 'ST', MIN: 1, MAX: 1, LEVEL: [
{ID: 'SID', MIN: 1, MAX: 9999, LEVEL: [
{ID: 'N9', MIN: 0, MAX: 30},
{ID: 'DTM', MIN: 0, MAX: 10},
{ID: 'LQ', MIN: 0, MAX: 100, LEVEL: [
{ID: 'MSG', MIN: 0, MAX: 100},
]},
{ID: 'LX', MIN: 0, MAX: 4, LEVEL: [
{ID: 'N9', MIN: 0, MAX: 50},
{ID: 'LH3', MIN: 0, MAX: 100},
{ID: 'LH2', MIN: 0, MAX: 8},
{ID: 'LFH', MIN: 0, MAX: 20},
{ID: 'LEP', MIN: 0, MAX: 3},
{ID: 'LH4', MIN: 0, MAX: 4},
{ID: 'CRC', MIN: 0, MAX: 5},
]},
]},
{ID: 'SE', MIN: 1, MAX: 1},
]}
]
|
[
"[email protected]"
] | |
dc9a696d53a940224de5525365420e23e1c82e96
|
5077fc5d82caa3b3ed5ce0e062bfe75cd4037ebc
|
/forever_thinking/bilibili获取封面.py
|
260f9926d90e3490e4b217ca8bb4cc9d9081eb75
|
[] |
no_license
|
asswecanfat/git_place
|
ee10e1057d8307d3c72f57291b5bcb6d0579017e
|
244ff0de11ffbe1aa9f20308e43af39486507f6f
|
refs/heads/master
| 2021-07-18T23:06:14.324164 | 2020-09-02T12:15:27 | 2020-09-02T12:15:27 | 210,833,462 | 1 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,107 |
py
|
import requests
from bs4 import BeautifulSoup
from attr import attrib, attrs
import json
import re
import random
import os
@attrs
class BiliBili(object):
file_path = attrib(default=r'C:\Users\10248\Desktop\1.txt')
pic_path = attrib(default=r'C:\Users\10248\Desktop')
source_wab_url = attrib(default='https://search.bilibili.com/all?keyword=')
headers = attrib(default={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '
'AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/75.0.3770.142 Safari/537.36'})
def update_url(self, av_num):
self.source_wab_url = '{}{}{}'.format(self.source_wab_url, av_num, '&from_source=banner_search')
def get_url_data(self, url):
return requests.get(url, headers=self.headers) # reponse
def download_cover(self):
reponse = self.get_url_data(self.source_wab_url)
self.source_wab_url.__init__()
# self.test_save_data(reponse)
pic_url = '{}{}'.format(' http:', self.deal_web_data(reponse))
final_pic_path = r'{}\{}'.format(self.pic_path, str(random.randint(0, 1000)) + '.jpg')
while os.path.exists(final_pic_path):
final_pic_path = r'{}\{}'.format(self.pic_path, str(random.randint(0, 1000)) + '.jpg')
with open(final_pic_path, 'wb') as f:
f.write(self.get_url_data(pic_url).content)
print('封面获取成功!')
def deal_web_data(self, reponse):
soup = BeautifulSoup(reponse.text, 'lxml')
point = soup.find_all('script')
# print(point[6])
real_data = re.split(r'=|;\(', point[6].text)[1]
# print(real_data)
now = json.loads(real_data)
# print(now['allData']['video'][0]['pic'])
return now['allData']['video'][0]['pic']
def test_save_data(self, reponse):
with open(self.file_path, 'wb') as f:
f.write(reponse.content)
if __name__ == '__main__':
bi = BiliBili()
av_n = input('请输入av号:')
bi.update_url(av_n)
bi.download_cover()
|
[
"[email protected]"
] | |
afca61d5d8ba52a219c2ad7064268eca41cd96c6
|
495ce92166457a6d5818d786a6a3303d3280fcd0
|
/src/registration/urls.py
|
ac889db2d836112cd2cb69c66483cb85276e9187
|
[] |
no_license
|
patrickhusi/django-inspectional-registration
|
616e7d44716c41b09b32c30415a1cf86d3b7324f
|
c0aee3ddc4f1a5e870643a605d8a9575b3a7520f
|
refs/heads/master
| 2020-12-25T22:57:45.123082 | 2015-08-01T00:19:32 | 2015-08-01T00:19:32 | 39,487,644 | 0 | 0 | null | 2015-07-22T05:35:21 | 2015-07-22T05:35:21 | null |
UTF-8
|
Python
| false | false | 2,964 |
py
|
# coding=utf-8
"""
URLconf for django-inspectional-registration
"""
__author__ = 'Alisue <[email protected]>'
from registration.compat import url
from registration.compat import patterns
from registration.views import RegistrationView
from registration.views import RegistrationClosedView
from registration.views import RegistrationCompleteView
from registration.views import ActivationView
from registration.views import ActivationCompleteView
urlpatterns = patterns('',
url(r'^activate/complete/$', ActivationCompleteView.as_view(),
name='registration_activation_complete'),
url(r'^activate/(?P<activation_key>\w+)/$', ActivationView.as_view(),
name='registration_activate'),
url(r'^register/$', RegistrationView.as_view(),
name='registration_register'),
url(r'^register/closed/$', RegistrationClosedView.as_view(),
name='registration_disallowed'),
url(r'^register/complete/$', RegistrationCompleteView.as_view(),
name='registration_complete'),
)
# django.contrib.auth
from registration.conf import settings
from django.contrib.auth import views as auth_views
if settings.REGISTRATION_DJANGO_AUTH_URLS_ENABLE:
prefix = settings.REGISTRATION_DJANGO_AUTH_URL_NAMES_PREFIX
suffix = settings.REGISTRATION_DJANGO_AUTH_URL_NAMES_SUFFIX
import django
if django.VERSION >= (1, 6):
uidb = r"(?P<uidb64>[0-9A-Za-z_\-]+)"
token = r"(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})"
password_reset_confirm_rule = (
r"^password/reset/confirm/%s/%s/$" % (uidb, token)
)
else:
uidb = r"(?P<uidb36>[0-9A-Za-z]+)"
token = r"(?P<token>.+)"
password_reset_confirm_rule = (
r"^password/reset/confirm/%s-%s/$" % (uidb, token)
)
urlpatterns += patterns('',
url(r'^login/$', auth_views.login,
{'template_name': 'registration/login.html'},
name=prefix+'login'+suffix),
url(r'^logout/$', auth_views.logout,
{'template_name': 'registration/logout.html'},
name=prefix+'logout'+suffix),
url(r'^password/change/$', auth_views.password_change,
name=prefix+'password_change'+suffix),
url(r'^password/change/done/$', auth_views.password_change_done,
name=prefix+'password_change_done'+suffix),
url(r'^password/reset/$', auth_views.password_reset,
name=prefix+'password_reset'+suffix, kwargs=dict(
post_reset_redirect=prefix+'password_reset_done'+suffix)),
url(password_reset_confirm_rule,
auth_views.password_reset_confirm,
name=prefix+'password_reset_confirm'+suffix),
url(r'^password/reset/complete/$', auth_views.password_reset_complete,
name=prefix+'password_reset_complete'+suffix),
url(r'^password/reset/done/$', auth_views.password_reset_done,
name=prefix+'password_reset_done'+suffix),
)
|
[
"[email protected]"
] | |
5d241edba0322488b4b7f84cee1a16c8cd0b1bd6
|
cdd0fa35e6867932d9821b54f3e9897306139d1a
|
/myPracticeProblems/ordered_dict.py
|
ac21f387d95bb5f5a10a305313ea69109d20cc7d
|
[] |
no_license
|
jisshub/python-development
|
cfd4246981999d5bc8cfe4cc15a57ebfada2691e
|
392e7362bf8e83930d410984e985d73a0a2f40d1
|
refs/heads/master
| 2021-01-05T02:25:12.896814 | 2020-03-23T16:05:25 | 2020-03-23T16:05:25 | 240,844,792 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 326 |
py
|
from collections import OrderedDict
ordered_dict = OrderedDict()
ordered_dict["jissmon"] = 33
ordered_dict["jissmon"] = 33
ordered_dict["jissmon"] = 33
ordered_dict["jissmon"] = 33
ordered_dict["jissmon"] = 33
print(ordered_dict)
new_dict = dict()
new_dict["a"] = 44
new_dict["a"] = 44
new_dict["b"] = 44
print(new_dict)
|
[
"[email protected]"
] | |
b0b42a8618f56c00d5b0d03cce3873bd96adb26e
|
d41d18d3ea6edd2ec478b500386375a8693f1392
|
/plotly/validators/scatter3d/line/_showscale.py
|
534d53f00aee0a02ffb55e951c76e575cebf5dfe
|
[
"MIT"
] |
permissive
|
miladrux/plotly.py
|
38921dd6618650d03be9891d6078e771ffccc99a
|
dbb79e43e2cc6c5762251537d24bad1dab930fff
|
refs/heads/master
| 2020-03-27T01:46:57.497871 | 2018-08-20T22:37:38 | 2018-08-20T22:37:38 | 145,742,203 | 1 | 0 |
MIT
| 2018-08-22T17:37:07 | 2018-08-22T17:37:07 | null |
UTF-8
|
Python
| false | false | 425 |
py
|
import _plotly_utils.basevalidators
class ShowscaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name='showscale', parent_name='scatter3d.line', **kwargs
):
super(ShowscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='calc',
role='info',
**kwargs
)
|
[
"[email protected]"
] | |
b1a1e15b3a0558a5a77872235e3522ea33bab5cc
|
43ab33b2f50e47f5dbe322daa03c86a99e5ee77c
|
/rcc/models/jaxb_element.py
|
49e4e3b8f1e30a23cafa6a6b5a8c3fbc12ef4791
|
[] |
no_license
|
Sage-Bionetworks/rcc-client
|
c770432de2d2950e00f7c7bd2bac22f3a81c2061
|
57c4a621aecd3a2f3f9faaa94f53b2727992a01a
|
refs/heads/main
| 2023-02-23T05:55:39.279352 | 2021-01-21T02:06:08 | 2021-01-21T02:06:08 | 331,486,099 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,874 |
py
|
# coding: utf-8
"""
nPhase REST Resource
REDCap REST API v.2 # noqa: E501
The version of the OpenAPI document: 2.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from rcc.configuration import Configuration
class JAXBElement(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'name': 'QName',
'value': 'object',
'nil': 'bool',
'global_scope': 'bool',
'type_substituted': 'bool'
}
attribute_map = {
'name': 'name',
'value': 'value',
'nil': 'nil',
'global_scope': 'globalScope',
'type_substituted': 'typeSubstituted'
}
def __init__(self, name=None, value=None, nil=None, global_scope=None, type_substituted=None, local_vars_configuration=None): # noqa: E501
"""JAXBElement - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._name = None
self._value = None
self._nil = None
self._global_scope = None
self._type_substituted = None
self.discriminator = None
if name is not None:
self.name = name
if value is not None:
self.value = value
if nil is not None:
self.nil = nil
if global_scope is not None:
self.global_scope = global_scope
if type_substituted is not None:
self.type_substituted = type_substituted
@property
def name(self):
"""Gets the name of this JAXBElement. # noqa: E501
:return: The name of this JAXBElement. # noqa: E501
:rtype: QName
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this JAXBElement.
:param name: The name of this JAXBElement. # noqa: E501
:type: QName
"""
self._name = name
@property
def value(self):
"""Gets the value of this JAXBElement. # noqa: E501
:return: The value of this JAXBElement. # noqa: E501
:rtype: object
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this JAXBElement.
:param value: The value of this JAXBElement. # noqa: E501
:type: object
"""
self._value = value
@property
def nil(self):
"""Gets the nil of this JAXBElement. # noqa: E501
:return: The nil of this JAXBElement. # noqa: E501
:rtype: bool
"""
return self._nil
@nil.setter
def nil(self, nil):
"""Sets the nil of this JAXBElement.
:param nil: The nil of this JAXBElement. # noqa: E501
:type: bool
"""
self._nil = nil
@property
def global_scope(self):
"""Gets the global_scope of this JAXBElement. # noqa: E501
:return: The global_scope of this JAXBElement. # noqa: E501
:rtype: bool
"""
return self._global_scope
@global_scope.setter
def global_scope(self, global_scope):
"""Sets the global_scope of this JAXBElement.
:param global_scope: The global_scope of this JAXBElement. # noqa: E501
:type: bool
"""
self._global_scope = global_scope
@property
def type_substituted(self):
"""Gets the type_substituted of this JAXBElement. # noqa: E501
:return: The type_substituted of this JAXBElement. # noqa: E501
:rtype: bool
"""
return self._type_substituted
@type_substituted.setter
def type_substituted(self, type_substituted):
"""Sets the type_substituted of this JAXBElement.
:param type_substituted: The type_substituted of this JAXBElement. # noqa: E501
:type: bool
"""
self._type_substituted = type_substituted
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, JAXBElement):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, JAXBElement):
return True
return self.to_dict() != other.to_dict()
|
[
"[email protected]"
] | |
46a6be98cd37c203fd6efd53b180795a67a6b079
|
ecff7ab1d962ff895b3e9a0b4239329dd03ce966
|
/webpage_text/__init__.py
|
b20daaa188f87b44418af0b010d45a46826360d1
|
[
"MIT"
] |
permissive
|
MSLNZ/pr-webpage-text
|
ea91e138b3e476688a07210e2b0625cb23538ff8
|
7790e8bbeb5cfbb9c0d7ac508903acd7414ff9d5
|
refs/heads/main
| 2022-09-15T12:26:29.947169 | 2022-08-05T21:21:26 | 2022-08-05T21:21:26 | 227,973,198 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,399 |
py
|
import os
import re
import sys
import argparse
import configparser
from gevent import monkey
monkey.patch_all()
import gevent
from gevent import pywsgi
import requests
from flask import (
Flask,
Markup,
render_template,
request,
send_from_directory,
)
gevent.get_hub().NOT_ERROR += (KeyboardInterrupt,)
PORT = 1683
endpoint_dict = {}
default_dict = {}
default_endpoint = 'defaults'
app = Flask(__name__)
@app.errorhandler(404)
def page_not_found(*args):
return render_template('page_not_found.html', names=endpoint_dict.keys(), url=request.host_url), 404
@app.route('/favicon.ico')
def favicon():
return send_from_directory('static', 'favicon.ico', mimetype='image/vnd.microsoft.icon')
@app.route('/<name>', methods=['GET', 'PUT'])
def page(name):
if name not in endpoint_dict:
return page_not_found()
if request.method == 'PUT':
data = request.json
data['text'] = Markup(re.sub(r'\n|\\n', '<br>', data['text']))
endpoint_dict[name].update(data)
return render_template('page.html', title=name, **endpoint_dict[name])
@app.route('/'+default_endpoint, methods=['GET'])
def defaults():
return default_dict
def run(*args):
"""Run the web server.
This function is only meant to be called from the command line via the
`webpage-text` entry point (see setup.py).
"""
host = '0.0.0.0'
text = ''
size = 100
refresh = 1.0
use_flask = False
enable_log = False
parser = argparse.ArgumentParser(description='Start a web server to display text on a web page.')
parser.add_argument(
'-c', '--config',
help='path to a configuration file (INI format)'
)
parser.add_argument(
'-H', '--host', default=host,
help='hostname or IP address of the server [default={}]'.format(host)
)
parser.add_argument(
'-p', '--port', default=PORT, type=int,
help='port to run the server on [default={}]'.format(PORT)
)
parser.add_argument(
'-e', '--endpoints', nargs='*',
help='the names of the URL endpoints'
)
parser.add_argument(
'-t', '--text', default=text, nargs='*',
help='initial text to display at each endpoint [default={!r}]'.format(text)
)
parser.add_argument(
'-s', '--size', default=size, type=int,
help='font size (in px) of the text [default={}]'.format(size)
)
parser.add_argument(
'-r', '--refresh', default=refresh, type=float,
help='number of seconds for a web browser to wait before automatically '
'refreshing the web page [default={}]'.format(refresh)
)
parser.add_argument(
'-l', '--log', action='store_true', help='show INFO log messages from the gevent WSGI server'
)
parser.add_argument(
'-f', '--flask', action='store_true', help='use the flask development server in debug mode'
)
if not args:
args = sys.argv[1:]
args = parser.parse_args(args)
if args.config is not None:
if not os.path.isfile(args.config):
sys.exit('FileNotFoundError: ' + args.config)
ini = configparser.ConfigParser()
ini.read(args.config)
host = ini.get('server', 'host', fallback=host)
port = ini.getint('server', 'port', fallback=PORT)
endpoints = [e.strip() for e in ini.get('server', 'endpoints', fallback='').split(',') if e.strip()]
use_flask = ini.getboolean('server', 'use_flask', fallback=use_flask)
enable_log = ini.getboolean('server', 'enable_log', fallback=enable_log)
text = ini.get('text', 'initial', fallback=text)
size = ini.getint('text', 'size', fallback=size)
refresh = ini.getfloat('text', 'refresh', fallback=refresh)
else:
host = args.host
port = args.port
endpoints = args.endpoints
use_flask = args.flask
enable_log = args.log
text = ' '.join(args.text) if args.text else args.text
size = args.size
refresh = args.refresh
if not endpoints:
sys.exit('You must specify at least 1 endpoint')
for endpoint in endpoints:
if endpoint == default_endpoint:
sys.exit('The name of an endpoint cannot be {!r} because this name is reserved'.format(default_endpoint))
print('Added endpoint http://{}:{}/{}'.format(host, port, endpoint))
endpoint_dict[endpoint] = {'text': text, 'size': size, 'refresh': refresh}
default_dict['size'] = size
default_dict['refresh'] = refresh
if use_flask:
# use the development server from flask
app.run(host=host, port=port, debug=True)
else:
print('Server running on http://{}:{}/ (Press CTRL+C to quit)'.format(host, port))
log = 'default' if enable_log else None
server = pywsgi.WSGIServer((host, port), application=app.wsgi_app, log=log)
try:
server.serve_forever()
except KeyboardInterrupt:
pass
def put(text, endpoint, host='127.0.0.1', port=PORT, size=None, refresh=None):
"""Update the text that is displayed on a web page.
The URL of the web page to update follows the ``http://host:port/endpoint`` nomenclature.
Parameters
----------
text : str
The text to display on the web page.
endpoint : str
The endpoint of the web page's URL.
host : str, optional
The hostname or IP address of the web server.
port : int, optional
The port number of the web server.
size : int, optional
The font size of the `text`.
refresh : float, optional
The number of second a web browser will wait before it automatically refreshes.
"""
url = 'http://{}:{}/'.format(host, port)
try:
default = default_dict[url]
except KeyError:
default = requests.get(url+default_endpoint).json()
default_dict[url] = {'size': default['size'], 'refresh': default['refresh']}
if size is None:
size = default['size']
if refresh is None:
refresh = default['refresh']
reply = requests.put(url+endpoint.lstrip('/'), json={'text': text, 'size': size, 'refresh': refresh})
if not reply.ok:
matches = re.findall(r'/(\w+)</p>', reply.content.decode())
raise ValueError('Invalid endpoint {!r}. Must be one of: {}'.format(endpoint, ', '.join(matches)))
|
[
"[email protected]"
] | |
20f48de587f36ac22f7b751403edee7311221783
|
49536aafb22a77a6caf249c7fadef46d63d24dfe
|
/tensorflow/tensorflow/python/ops/linalg/linalg.py
|
22c87ea697b7d702dec0fb5fe037ea1157fdaf58
|
[
"Apache-2.0"
] |
permissive
|
wangzhi01/deeplearning-1
|
4e5ad93f0d9ecd302b74352f80fe1fa6ae70bf0d
|
46ab82253d956953b8aa98e97ceb6cd290e82288
|
refs/heads/master
| 2020-05-28T03:14:55.687567 | 2018-09-12T16:52:09 | 2018-09-12T16:52:09 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,785 |
py
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Public API for tf.linalg namespace."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_linalg_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import special_math_ops
# go/tf-wildcard-import
# pylint: disable=wildcard-import,unused-import
from tensorflow.python.ops.linalg.linalg_impl import *
from tensorflow.python.ops.linalg.linear_operator import *
from tensorflow.python.ops.linalg.linear_operator_composition import *
from tensorflow.python.ops.linalg.linear_operator_diag import *
from tensorflow.python.ops.linalg.linear_operator_full_matrix import *
from tensorflow.python.ops.linalg.linear_operator_identity import *
from tensorflow.python.ops.linalg.linear_operator_low_rank_update import *
from tensorflow.python.ops.linalg.linear_operator_lower_triangular import *
# pylint: enable=wildcard-import
# Linear algebra ops.
band_part = array_ops.matrix_band_part
cholesky = linalg_ops.cholesky
cholesky_solve = linalg_ops.cholesky_solve
det = linalg_ops.matrix_determinant
# pylint: disable=protected-access
slogdet = gen_linalg_ops._log_matrix_determinant
# pylint: disable=protected-access
diag = array_ops.matrix_diag
diag_part = array_ops.matrix_diag_part
eigh = linalg_ops.self_adjoint_eig
eigvalsh = linalg_ops.self_adjoint_eigvals
einsum = special_math_ops.einsum
eye = linalg_ops.eye
inv = linalg_ops.matrix_inverse
lstsq = linalg_ops.matrix_solve_ls
norm = linalg_ops.norm
qr = linalg_ops.qr
set_diag = array_ops.matrix_set_diag
solve = linalg_ops.matrix_solve
svd = linalg_ops.svd
tensordot = math_ops.tensordot
trace = math_ops.trace
transpose = array_ops.matrix_transpose
triangular_solve = linalg_ops.matrix_triangular_solve
# Seal API.
del absolute_import
del array_ops
del division
del gen_linalg_ops
del linalg_ops
del math_ops
del ops
del print_function
del special_math_ops
|
[
"[email protected]"
] | |
cd5ab0ff640c9c8555b6af3aad71c70091b91ec4
|
2760effda15d884af413ca2a35809d03fabea377
|
/lc-1222.py
|
fb44d86b4ecfc652aaac148671173ef0b40bbe00
|
[] |
no_license
|
UtsavRaychaudhuri/leetcode
|
31943b98ad89d96d72ee4b6b1d1c8d70429d1e1f
|
77a13580fd6231830558b1cf8c84f8b3b62b99d0
|
refs/heads/master
| 2020-11-27T18:02:23.712639 | 2020-09-29T19:39:49 | 2020-09-29T19:39:49 | 229,552,583 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,248 |
py
|
class Solution(object):
def __init__(self):
self.outarray=[]
def queensAttacktheKing(self, queens, king):
"""
:type queens: List[List[int]]
:type king: List[int]
:rtype: List[List[int]]
"""
self.checkleft(king,queens)
self.checkup(king,queens)
self.checkdown(king,queens)
self.checkright(king,queens)
self.checkdiagonal(king,queens)
return self.outarray
def checkleft(self,king,queens):
j=king[1]
for i in range(king[0],-1,-1):
if [i,j] in queens:
self.outarray.append([i,j])
break
def checkright(self,king,queens):
i=king[0]
for j in range(king[1],10):
if [i,j] in queens:
self.outarray.append([i,j])
break
def checkup(self,king,queens):
j=king[1]
for i in range(king[0],10):
if [i,j] in queens:
self.outarray.append([i,j])
break
def checkdown(self,king,queens):
i=king[0]
for j in range(king[1],-1,-1):
if [i,j] in queens:
self.outarray.append([i,j])
break
def checkdiagonal(self,king,queens):
i=king[0]
j=king[1]
while(i>=0 and j>=0):
if [i,j] in queens and [i,j] not in self.outarray:
self.outarray.append([i,j])
break
i-=1
j-=1
i,j=king[0],king[1]
while(i<=9 and j<=9):
if [i,j] in queens and [i,j] not in self.outarray:
self.outarray.append([i,j])
break
i+=1
j+=1
i,j=king[0],king[1]
while(j>=0 and i<=9):
if [i,j] in queens and [i,j] not in self.outarray:
self.outarray.append([i,j])
break
i+=1
j-=1
i,j=king[0],king[1]
while(i>=0 and j<=9):
if [i,j] in queens and [i,j] not in self.outarray:
self.outarray.append([i,j])
break
j+=1
i-=1
|
[
"[email protected]"
] | |
09267857397c18219dcb468ef2b121a2fea8f574
|
c83e356d265a1d294733885c373d0a4c258c2d5e
|
/mayan/apps/locales/managers.py
|
b80a4b8368ef07497f74fee837058582ac4e31a0
|
[
"Apache-2.0"
] |
permissive
|
TrellixVulnTeam/fall-2021-hw2-451-unavailable-for-legal-reasons_6YX3
|
4160809d2c96707a196b8c94ea9e4df1a119d96a
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
refs/heads/master
| 2023-08-21T23:36:41.230179 | 2021-10-02T03:51:12 | 2021-10-02T03:51:12 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 424 |
py
|
from django.contrib.auth import get_user_model
from django.db import models
class UserLocaleProfileManager(models.Manager):
def get_by_natural_key(self, user_natural_key):
User = get_user_model()
try:
user = User.objects.get_by_natural_key(user_natural_key)
except User.DoesNotExist:
raise self.model.DoesNotExist
return self.get(user__pk=user.pk)
|
[
"[email protected]"
] | |
498488d0e02adf53cce7096cd9c7afa81a6a5814
|
64267b1f7ca193b0fab949089b86bc7a60e5b859
|
/slehome/account/migrations/0046_auto_20150130_0600.py
|
4d7e8d0246f4262cdd73c9abdd7338982e3d2674
|
[] |
no_license
|
hongdangodori/slehome
|
6a9f2b4526c2783932627b982df0540762570bff
|
3e558c78c3943dadf0ec485738a0cc98dea64353
|
refs/heads/master
| 2021-01-17T12:00:34.221088 | 2015-02-06T13:44:00 | 2015-02-06T13:44:00 | 28,847,585 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 531 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('account', '0045_auto_20150130_0558'),
]
operations = [
migrations.AlterField(
model_name='basicmemberinformation',
name='auth_key',
field=models.CharField(default='43f9a685bc7146b4ecc63bdf9bc3e5136b7543f436a42e4a2f2ae749ffb0c6db', max_length=64),
preserve_default=True,
),
]
|
[
"[email protected]"
] | |
69ed92de644fca515a276845a1ab3c88f930d96c
|
ecf6fe6aa87b2c3f041acc30fab11b0cafe3dd46
|
/architecture_py/archi_v3_4.py
|
c44736bffc6e0190265c5c5a8ec71479998ec8b7
|
[] |
no_license
|
antgratia/Memoire_code
|
73c7806c4576c2e73e00d9a84b1063a2c8f6b559
|
2cdc1339ea24896a6628238f6467edff80f98166
|
refs/heads/main
| 2023-06-20T16:19:07.041464 | 2021-07-13T11:53:48 | 2021-07-13T11:53:48 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,719 |
py
|
import numpy as np
import os
from keras import backend as K
from tensorflow import keras
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.models import Sequential, Model,load_model
from tensorflow.keras.layers import Input, Add, Dense, Activation, ZeroPadding2D, BatchNormalization, Flatten, Conv2D, AveragePooling2D, MaxPooling2D, GlobalMaxPooling2D, GlobalAveragePooling2D, MaxPool2D, Concatenate, Dropout
from tensorflow.keras.initializers import glorot_uniform
from tensorflow.keras.utils import plot_model
import tensorflow as tf
import sys
import traceback
import csv
from time import time
type_archi = 'ALL'
epsilon = 0.0
dropout_rate = 0.4
axis = 3
compress_factor = 0.5
# load dataset
(train_x, train_y), (test_x, test_y) = keras.datasets.cifar10.load_data()
# normalize to range 0-1
train_x = train_x / 255.0
test_x = test_x / 255.0
val_x = train_x[:5000]
val_y = train_y[:5000]
# init training time
training_time = 0
# init result test/train
test_result_loss = ""
test_result_acc = ""
train_result_loss = ""
train_result_acc = ""
nb_layers = "not build"
def id_block(X, f, filters, activation):
X_shortcut = X
X = Conv2D(filters=filters, kernel_size=(1, 1), strides=(1, 1), padding='same', kernel_initializer=glorot_uniform(seed=0))(X)
if epsilon != 0:
X = BatchNormalization(epsilon = epsilon, axis=axis)(X)
X = Activation(activation)(X)
X = Conv2D(filters=filters, kernel_size=(f, f), strides=(1, 1), padding='same', kernel_initializer=glorot_uniform(seed=0))(X)
if epsilon != 0:
X = BatchNormalization(epsilon = epsilon, axis=axis)(X)
X = Add()([X, X_shortcut])# SKIP Connection
X = Activation(activation)(X)
return X
def conv_block(X, f, filters, activation, s=2):
X_shortcut = X
X = Conv2D(filters=filters, kernel_size=(1, 1), strides=(s, s), padding='valid', kernel_initializer=glorot_uniform(seed=0))(X)
if epsilon != 0:
X = BatchNormalization(epsilon = epsilon, axis=axis)(X)
X = Activation(activation)(X)
X = Conv2D(filters=filters, kernel_size=(f, f), strides=(1, 1), padding='same', kernel_initializer=glorot_uniform(seed=0))(X)
if epsilon != 0:
X = BatchNormalization(epsilon = epsilon, axis=axis)(X)
X_shortcut = Conv2D(filters=filters, kernel_size=(1, 1), strides=(s, s), padding='valid', kernel_initializer=glorot_uniform(seed=0))(X_shortcut)
if epsilon != 0:
X_shortcut = BatchNormalization(epsilon = epsilon, axis=axis)(X_shortcut)
X = Add()([X, X_shortcut])
X = Activation(activation)(X)
return X
def denseBlock(X, f, nb_filter, nb_layer, padding, activation):
x_input = X
for _ in range(0,nb_layer):
if epsilon != 0:
X = BatchNormalization(epsilon = epsilon, axis=axis)(X)
X = Activation(activation)(X)
X = Conv2D(filters=nb_filter, kernel_size=(f, f), strides=(1, 1), padding=padding)(X)
if dropout_rate != 0:
X = Dropout(dropout_rate)(X)
X = Concatenate()([X, x_input])
return X
def transition_block(X, f, nb_filter, padding, activation, op, stride):
if epsilon != 0:
X = BatchNormalization(epsilon = epsilon, axis=axis)(X)
X = Activation(activation)(X)
X = Conv2D(filters=nb_filter, kernel_size=(f, f), strides=(1, 1), padding=padding)(X)
if dropout_rate != 0:
X = Dropout(dropout_rate)(X)
if (op == 'avg'):
X = AveragePooling2D(pool_size = f, strides=stride, padding=padding)(X)
else :
X = MaxPooling2D(pool_size=f, strides=stride, padding=padding)(X)
return X
try:
def getModel():
X_input = X = Input([32, 32, 3])
X = Conv2D(18, kernel_size=5, strides=5, activation='relu', padding='valid')(X)
X = conv_block(X, 2, 36, 'selu', 1)
X = Conv2D(72, kernel_size=7, strides=2, activation='relu', padding='same')(X)
X = conv_block(X, 7, 144, 'tanh', 7)
X = GlobalMaxPooling2D()(X)
X = Dense(10, activation='softmax')(X)
model = Model(inputs=X_input, outputs=X)
return model
model = getModel()
#plot_model(model, show_shapes=True, to_file="../architecture_img/archi_v3_4.png")
model.compile(optimizer='adam', loss=keras.losses.sparse_categorical_crossentropy, metrics=['accuracy'])
start = time()
es = tf.keras.callbacks.EarlyStopping(monitor='loss', verbose=1, restore_best_weights=True, patience=1)
list_cb = [es]
history = model.fit(train_x, train_y, epochs=50, batch_size=64, validation_split=0.3, callbacks=list_cb)
training_time = time()-start
print(model.evaluate(test_x, test_y))
log_file = open("../architecture_log/archi_v3_4.log" , "w")
# save test result
log_file.write('test result : ' + str(model.evaluate(test_x, test_y)))
test_result_loss = model.evaluate(test_x, test_y)[0]
test_result_acc = model.evaluate(test_x, test_y)[1]
# save train result
log_file.write('train result : ' + str(model.evaluate(test_x, test_y)))
log_file.write('History train result : ' + str(history.history))
train_result_loss = model.evaluate(train_x, train_y)[0]
train_result_acc = model.evaluate(train_x, train_y)[1]
print('OK: file ../architecture_log/archi_v3_4.log has been create')
nb_layers = len(model.layers)
log_file.close()
except:
print('error: file ../architecture_log/archi_v3_4_error.log has been create')
error_file = open("../architecture_log/archi_v3_4_error.log" , "w")
traceback.print_exc(file=error_file)
result_loss = "Error"
result_acc = "Error"
error_file.close()
finally:
file = open('../architecture_results_v3.csv', 'a', newline ='')
with file:
# identifying header
header = ['file_name', 'training_time(s)', 'test_result_loss', 'test_result_acc', 'train_result_acc', 'train_result_loss', 'nb_layers', 'epochs', 'type_archi']
writer = csv.DictWriter(file, fieldnames = header)
# writing data row-wise into the csv file
# writer.writeheader()
writer.writerow({'file_name' : 'archi_v3_4',
'training_time(s)': training_time,
'test_result_loss': test_result_loss,
'test_result_acc': test_result_acc,
'train_result_acc': train_result_acc,
'train_result_loss': train_result_loss,
'nb_layers': nb_layers,
'epochs' : len(history.history['loss']),
'type_archi': type_archi})
print('add line into architecture_results_v3.csv')
file.close()
|
[
"[email protected]"
] | |
4a0714091ddd90df0ea8c7a0b01751aad0843151
|
398089ec2210e1b6a12aecf8ed91cdeced6b36fc
|
/employer/views.py
|
37965cec7645b608559570279fbd8da925ea939d
|
[
"Apache-2.0"
] |
permissive
|
WilliamQLiu/job-waffle
|
7ca8cb6357884e99a9c054bbd25d10222816dde7
|
59e4bc550dc1c2131fa427f188fbc2bb287aa938
|
refs/heads/master
| 2022-05-04T12:18:53.018609 | 2015-04-10T03:18:34 | 2015-04-10T03:18:34 | 27,843,538 | 1 | 1 |
Apache-2.0
| 2021-06-10T17:29:08 | 2014-12-10T22:48:48 |
JavaScript
|
UTF-8
|
Python
| false | false | 7,702 |
py
|
"""
A view takes a web request and returns a web response
The response can be a web page, a redirect, a 404 error, etc
GET is used for requests that do not affect the state of the system
POST is used for making changes in the database
Under the hood, Django just converts HTTP POST and GET objects into a
'QueryDict', which is a Django dict, which is a Python dict
"""
from __future__ import absolute_import
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.shortcuts import render, render_to_response, RequestContext, Http404
from django.utils.decorators import method_decorator # Allow LoggedInMixin
from django.views.generic import TemplateView, View, ListView, UpdateView, DeleteView, CreateView
from django.http import HttpResponse, HttpResponseRedirect
from django.utils import timezone
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.contrib import messages
import django_filters
# For debugging
from django.http.request import QueryDict
from django.utils.datastructures import MultiValueDict
import logging
from .models import Job
from .forms import JobForm, JobSearchForm
from .serializers import JobSerializer
from rest_framework import viewsets, authentication, permissions, filters
from haystack.query import SearchQuerySet
from haystack.inputs import AutoQuery, Exact, Clean, Raw
# Debugging: Log levels (DEBUG, INFO, WARNING, ERROR, CRITICAL)
logger = logging.getLogger(__name__) # get instance of a logger
class LoggedInMixin(object):
""" Mixin to ensure user is logged in """
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(LoggedInMixin, self).dispatch(*args, **kwargs)
def find_job(request):
""" 'Find Job' Page """
query_what = None
query_where = None
form = JobSearchForm(request.GET) # <class 'employer.forms.JobSearchForm'>
form_search = form.search() # Get Search Results from the form
# GET data from the form; make sure fields aren't non-empty values
# filter Haystack's SearchQuerySet, for details see:
# http://django-haystack.readthedocs.org/en/v2.3.1/searchqueryset_api.html
if ('query_what' in request.GET and request.GET['query_what']) or \
('query_where' in request.GET and request.GET['query_where']):
query_what = request.GET['query_what'] # query for what field
query_where = request.GET['query_where'] # query for where field
myquery = query_what + " " + query_where # combine search queries
search_results = form_search.filter(content__contains=myquery) # AND
else:
query_what = 'You submitted an empty form'
query_where = 'You submitted an empty form'
search_results = form_search
# If you want to filter by Model instead of by Haystack's SearchQuerySet
#my_data = Job.objects.filter(active=True).order_by('timestamp_created')
context = {'search_results': search_results}
return render(request, 'find_job.html', context)
def post_job(request):
""" 'Post Job' Page """
if request.method == 'POST':
form = JobForm(data=request.POST) # create form, populate data from request
if form.is_valid():
#Return authenticated user, if any
#username = None
#if request.user.is_authenticated():
# username = request.user.username
company = form.cleaned_data['company']
location = form.cleaned_data['location']
title = form.cleaned_data['title']
description = form.cleaned_data['description']
status = form.cleaned_data['status']
salary_min = form.cleaned_data['salary_min']
salary_max = form.cleaned_data['salary_max']
my_data = Job(created_by=request.user, company=company,
location=location, timestamp_created=timezone.now(),
title=title, description=description, status=status,
salary_min=salary_min, salary_max=salary_max)
my_data.save()
messages.success(request, 'Thanks!')
return HttpResponseRedirect('/')
else: # Request is a 'GET' instead of 'POST'
form = JobForm() # get a blank form
#logger.info("Not a POST")
return render(request, 'post_job.html', {'form': form})
def manage_job_posts(request):
""" 'Manage Job Posts' Page """
my_data = Job.objects.filter(active=True).order_by('timestamp_created')
context = {'my_data': my_data}
return render(request, 'manage_job_posts.html', context)
class JobCreateView(LoggedInMixin, CreateView):
""" Allow Users to Create Jobs """
model = Job
template_name = "job_create.html"
def get_success_url(self):
""" After posting job, go to job management """
return reverse('job-post')
def get_context_data(self, **kwargs):
context = super(JobCreateView, self).get_context_data(**kwargs)
context['action'] = reverse('job-create')
return context
def form_valid(self, form):
form.instance.user = self.request.user
return super(JobCreateView, self).form_valid(form)
class JobUpdateView(LoggedInMixin, UpdateView):
""" Allow Users to Update Job """
model = Job
template_name = 'job_update.html'
def get_success_url(self):
""" After updating a job, takes you back to job profile """
return reverse('manage_job_posts')
def get_queryset(self):
specific_id = self.kwargs['pk'] # Pass variable 'pk' from urls.py
return Job.objects.filter(id=specific_id)
class JobListView(LoggedInMixin, ListView):
""" View a specific job """
model = Job
template_name = "job_view.html"
def get_success_url(self):
return reverse('job-list')
def get_queryset(self):
specific_id = self.kwargs['pk'] # Pass variable 'pk' from urls.py
return Job.objects.filter(id=specific_id)
class JobDeleteView(LoggedInMixin, DeleteView):
""" Delete a specific job """
model = Job
template_name = "job_delete.html"
def get_success_url(self):
""" After deleting a job, takes you back to profile """
return reverse('manage_job_posts')
def get_queryset(self):
specific_id = self.kwargs['pk'] # Pass variable 'pk' from urls.py
return Job.objects.filter(id=specific_id)
# FOR DJANGO REST FRAMEWORK (DRF)
class DefaultsMixin(object):
"""
Default settings for view authentication, permissions,
filtering and pagination
"""
authentication_classes = (
authentication.BasicAuthentication,
authentication.TokenAuthentication,
)
permission_classes = (
permissions.IsAuthenticated, # Access to GET, POST, HEAD, OPTIONS
#IsReadOnlyRequest,
#permissions.IsAuthenticatedOrReadOnly
)
filter_backends = (
filters.DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter,
)
paginate_by = 50
paginate_by_param = 'page_size'
max_paginate_by = 500
# DRF FILTERS
class JobFilter(django_filters.FilterSet):
company = django_filters.CharFilter(name='company')
class Meta:
model = Job
fields = ('timestamp_updated', 'company', 'title')
# DRF VIEWSETS
class JobViewSet(DefaultsMixin, viewsets.ModelViewSet):
queryset = Job.objects.all()
serializer_class = JobSerializer
filter_class = JobFilter
search_fields = ('name')
ordering_fields = ('timestamp_updated')
|
[
"[email protected]"
] | |
6432cf6c0bb2012d7369a431a646f38b43800201
|
29a4e8ffa77a09c418712bb243e1b4d24336e0c1
|
/nbgrader/formgrader/base.py
|
326ee4c2852b3763977915e7b9e277acf09f721b
|
[
"BSD-3-Clause"
] |
permissive
|
silky/nbgrader
|
f52634438d79df80de077569e94562f08f123f0b
|
30f461ee06a03a1e2ed1789016bb49e9f59e61eb
|
refs/heads/master
| 2021-01-18T00:23:18.300627 | 2016-01-08T22:06:45 | 2016-01-08T22:06:45 | 50,624,512 | 1 | 0 | null | 2016-01-29T00:21:36 | 2016-01-29T00:21:36 | null |
UTF-8
|
Python
| false | false | 2,554 |
py
|
import json
import functools
from tornado import web
def authenticated(f):
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
result = self.auth.authenticate(self.request)
if result is True:
return f(self, *args, **kwargs) # Success
elif result is False:
raise web.HTTPError(403) # Forbidden
else:
self.redirect(result, permanent=False) # Redirect
return wrapper
class BaseHandler(web.RequestHandler):
@property
def gradebook(self):
return self.settings['gradebook']
@property
def auth(self):
return self.settings['auth']
@property
def mathjax_url(self):
return self.settings['mathjax_url']
@property
def notebook_dir(self):
return self.settings['notebook_dir']
@property
def notebook_dir_format(self):
return self.settings['notebook_dir_format']
@property
def nbgrader_step(self):
return self.settings['nbgrader_step']
@property
def exporter(self):
return self.settings['exporter']
@property
def log(self):
return self.settings['log']
def render(self, name, **ns):
template = self.settings['jinja2_env'].get_template(name)
return template.render(**ns)
def write_error(self, status_code, **kwargs):
if status_code == 500:
html = self.render(
'gradebook_500.tpl',
base_url=self.auth.base_url,
error_code=500)
elif status_code == 502:
html = self.render(
'gradebook_500.tpl',
base_url=self.auth.base_url,
error_code=502)
elif status_code == 403:
html = self.render(
'gradebook_403.tpl',
base_url=self.auth.base_url,
error_code=403)
else:
return super(BaseHandler, self).write_error(status_code, **kwargs)
self.write(html)
self.finish()
class BaseApiHandler(BaseHandler):
def get_json_body(self):
"""Return the body of the request as JSON data."""
if not self.request.body:
return None
body = self.request.body.strip().decode('utf-8')
try:
model = json.loads(body)
except Exception:
self.log.debug("Bad JSON: %r", body)
self.log.error("Couldn't parse JSON", exc_info=True)
raise web.HTTPError(400, 'Invalid JSON in body of request')
return model
|
[
"[email protected]"
] | |
6685eda1a70bab345ddc6f996c018feac6a6c741
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03371/s173081591.py
|
e631cebeb539f9cb5923fd6e498f3a402e717958
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 386 |
py
|
import sys
read = sys.stdin.read
readline = sys.stdin.buffer.readline
sys.setrecursionlimit(10 ** 8)
INF = float('inf')
MOD = 10 ** 9 + 7
def main():
A, B, C, X, Y = map(int, readline().split())
ans = A*X+B*Y
if X>=Y:
ans = min(ans, C*Y*2+A*(X-Y), C*X*2)
else:
ans = min(ans, C*X*2+B*(Y-X), C*Y*2)
print(ans)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
8f26465aaa04312133e55a3be07fa3ebfdaba5af
|
3196460db64eded2daa77457643c8dd1ed1ba99e
|
/codechef/steve/COINS-wrong2.py
|
b8ec3851c8d755ef25b8402d188b8199eba086e0
|
[] |
no_license
|
prototypemagic/proto-mastery
|
94c649958792f00ea2a057b63ed0f7717b5ab05d
|
45f7ef2e998fa7dbc071f5c42217a83fd9340f51
|
refs/heads/master
| 2020-05-28T08:55:45.769199 | 2012-09-10T22:12:00 | 2012-09-10T22:12:00 | 3,097,117 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 823 |
py
|
#!/usr/bin/env python
# Steve Phillips / elimisteve
# 2012.01.04
# The following is wrong, to say the least, because input == 13 should
# produce output == 13, not 14. As the problem states, you cannot
# exchange Bytelandian coins for other Bytelandian coins.
def naive_max(num):
# Given in problem description
return num/2 + num/3 + num/4
def clever_max(num):
'''Turns every 12 bytelandian coins into 13, plus remainder'''
# NOT given in problem description
naive = naive_max(num)
maybe_bigger = (num/12) * 13 + (num % 12) # WRONG!
return maybe_bigger if maybe_bigger > naive else naive
n = 0
while True:
try:
n = int( raw_input().strip() )
print max([n, clever_max(n),
clever_max(n/2) + clever_max(n/3) + clever_max(n/4)])
except:
break
|
[
"[email protected]"
] | |
143fe68d7f6815fea8d18f1fb028024f23bd7c51
|
bd02997a44218468b155eda45dd9dd592bb3d124
|
/leetcode_course-schedule2.py
|
ca4b99612fab4cd4749f3814a1054bbfb691055d
|
[] |
no_license
|
rheehot/ProblemSolving_Python
|
88b1eb303ab97624ae6c97e05393352695038d14
|
4d6dc6aea628f0e6e96530646c66216bf489427f
|
refs/heads/master
| 2023-02-13T03:30:07.039231 | 2021-01-04T06:04:11 | 2021-01-04T06:04:11 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,089 |
py
|
'''
Problem Solving leetcode course-schedule2
Author: Injun Son
Date: October 25, 2020
'''
import sys
import collections
import heapq
import functools
import itertools
import re
import math
import bisect
from typing import *
def canFinish(numCourses: int, prerequisites: List[List[int]]) -> bool:
graph = collections.defaultdict(list)
# 그래프 구성
for x, y in prerequisites:
graph[x].append(y)
traced = set()
visited = set()
def dfs(i):
# 순환 구조이면 False
if i in traced:
return False
# 이미 방문 했던 노드이면 True
if i in visited:
return True
traced.add(i)
for y in graph[i]:
if not dfs(y):
return False
#탐색 종료 후 순환 노드 삭제
traced.remove(i)
#탐색 종료 후 방문 노드 추가
visited.add(i)
return True
for x in list(graph):
if not dfs(x):
return False
return True
print(canFinish(2, [[1,0]]))
print(canFinish(2, [[1,0], [0,1]]))
|
[
"[email protected]"
] | |
066e81a0fbe03a8fbc53b78c094138284f850ede
|
6c80119e02bb29761fc7854c5a2f2a144451ca5a
|
/tests/fakeIDP.py
|
971281cd5d87940746d418b565f9b43de490a12b
|
[
"BSD-2-Clause"
] |
permissive
|
josjevv/pysaml2
|
c412a21db7a52334bf67feeabc38f877a121f973
|
f806786f6dad8fc2b03daa0e1d55682daead3ec8
|
refs/heads/master
| 2020-12-25T12:17:41.628279 | 2013-04-22T11:45:25 | 2013-04-22T11:45:25 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,520 |
py
|
from urlparse import parse_qs
from saml2.saml import AUTHN_PASSWORD
from saml2.samlp import attribute_query_from_string, logout_request_from_string
from saml2 import BINDING_HTTP_REDIRECT, pack
from saml2 import BINDING_HTTP_POST
from saml2 import BINDING_SOAP
from saml2.server import Server
from saml2.soap import parse_soap_enveloped_saml_attribute_query
from saml2.soap import parse_soap_enveloped_saml_logout_request
from saml2.soap import make_soap_enveloped_saml_thingy
__author__ = 'rolandh'
TYP = {
"GET": [BINDING_HTTP_REDIRECT],
"POST": [BINDING_HTTP_POST, BINDING_SOAP]
}
def unpack_form(_str, ver="SAMLRequest"):
SR_STR = "name=\"%s\" value=\"" % ver
RS_STR = 'name="RelayState" value="'
i = _str.find(SR_STR)
i += len(SR_STR)
j = _str.find('"', i)
sr = _str[i:j]
k = _str.find(RS_STR, j)
k += len(RS_STR)
l = _str.find('"', k)
rs = _str[k:l]
return {ver:sr, "RelayState":rs}
class DummyResponse(object):
def __init__(self, code, data, headers=None):
self.status_code = code
self.text = data
self.headers = headers or []
class FakeIDP(Server):
def __init__(self, config_file=""):
Server.__init__(self, config_file)
#self.sign = False
def receive(self, url, method="GET", **kwargs):
"""
Interface to receive HTTP calls on
:param url:
:param method:
:param kwargs:
:return:
"""
if method == "GET":
path, query = url.split("?")
qs_dict = parse_qs(kwargs["data"])
req = qs_dict["SAMLRequest"][0]
rstate = qs_dict["RelayState"][0]
else:
# Could be either POST or SOAP
path = url
try:
qs_dict = parse_qs(kwargs["data"])
req = qs_dict["SAMLRequest"][0]
rstate = qs_dict["RelayState"][0]
except KeyError:
req = kwargs["data"]
rstate = ""
response = ""
# Get service from path
for key, vals in self.config.getattr("endpoints", "idp").items():
for endp, binding in vals:
if path == endp:
assert binding in TYP[method]
if key == "single_sign_on_service":
return self.authn_request_endpoint(req, binding,
rstate)
elif key == "single_logout_service":
return self.logout_endpoint(req, binding)
for key, vals in self.config.getattr("endpoints", "aa").items():
for endp, binding in vals:
if path == endp:
assert binding in TYP[method]
if key == "attribute_service":
return self.attribute_query_endpoint(req, binding)
return response
def authn_request_endpoint(self, req, binding, relay_state):
req = self.parse_authn_request(req, binding)
if req.message.protocol_binding == BINDING_HTTP_REDIRECT:
_binding = BINDING_HTTP_POST
else:
_binding = req.message.protocol_binding
try:
resp_args = self.response_args(req.message, [_binding])
except Exception:
raise
identity = { "surName":"Hedberg", "givenName": "Roland",
"title": "supertramp", "mail": "[email protected]"}
userid = "Pavill"
authn_resp = self.create_authn_response(identity,
userid=userid,
authn=(AUTHN_PASSWORD,
"http://www.example.com/login"),
**resp_args)
response = "%s" % authn_resp
_dict = pack.factory(_binding, response,
resp_args["destination"], relay_state,
"SAMLResponse")
return DummyResponse(200, **_dict)
def attribute_query_endpoint(self, xml_str, binding):
if binding == BINDING_SOAP:
_str = parse_soap_enveloped_saml_attribute_query(xml_str)
else:
_str = xml_str
aquery = attribute_query_from_string(_str)
extra = {"eduPersonAffiliation": "faculty"}
userid = "Pavill"
name_id = aquery.subject.name_id
attr_resp = self.create_attribute_response(extra, aquery.id,
None,
sp_entity_id=aquery.issuer.text,
name_id=name_id,
attributes=aquery.attribute)
if binding == BINDING_SOAP:
# SOAP packing
#headers = {"content-type": "application/soap+xml"}
soap_message = make_soap_enveloped_saml_thingy(attr_resp)
# if self.sign and self.sec:
# _signed = self.sec.sign_statement_using_xmlsec(soap_message,
# class_name(attr_resp),
# nodeid=attr_resp.id)
# soap_message = _signed
response = "%s" % soap_message
else: # Just POST
response = "%s" % attr_resp
return DummyResponse(200, response)
def logout_endpoint(self, xml_str, binding):
if binding == BINDING_SOAP:
_str = parse_soap_enveloped_saml_logout_request(xml_str)
else:
_str = xml_str
req = logout_request_from_string(_str)
_resp = self.create_logout_response(req, [binding])
if binding == BINDING_SOAP:
# SOAP packing
#headers = {"content-type": "application/soap+xml"}
soap_message = make_soap_enveloped_saml_thingy(_resp)
# if self.sign and self.sec:
# _signed = self.sec.sign_statement_using_xmlsec(soap_message,
# class_name(attr_resp),
# nodeid=attr_resp.id)
# soap_message = _signed
response = "%s" % soap_message
else: # Just POST
response = "%s" % _resp
return DummyResponse(200, response)
|
[
"[email protected]"
] | |
94cd40578f30825025b17f2297e50eb9b0f8a635
|
c1960138a37d9b87bbc6ebd225ec54e09ede4a33
|
/adafruit-circuitpython-bundle-py-20210402/lib/adafruit_epd/ssd1680.py
|
66392a74f69fe9a3ecffe574e03a380758bc1e95
|
[] |
no_license
|
apalileo/ACCD_PHCR_SP21
|
76d0e27c4203a2e90270cb2d84a75169f5db5240
|
37923f70f4c5536b18f0353470bedab200c67bad
|
refs/heads/main
| 2023-04-07T00:01:35.922061 | 2021-04-15T18:02:22 | 2021-04-15T18:02:22 | 332,101,844 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,592 |
py
|
# SPDX-FileCopyrightText: 2018 Dean Miller for Adafruit Industries
#
# SPDX-License-Identifier: MIT
"""
`adafruit_epd.ssd1680` - Adafruit SSD1680 - ePaper display driver
====================================================================================
CircuitPython driver for Adafruit SSD1680 display breakouts
* Author(s): Melissa LeBlanc-Williams
"""
import time
from micropython import const
import adafruit_framebuf
from adafruit_epd.epd import Adafruit_EPD
__version__ = "2.9.3"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_EPD.git"
_SSD1680_DRIVER_CONTROL = const(0x01)
_SSD1680_GATE_VOLTAGE = const(0x03)
_SSD1680_SOURCE_VOLTAGE = const(0x04)
_SSD1680_INIT_SETTING = const(0x08)
_SSD1680_INIT_WRITE_REG = const(0x09)
_SSD1680_INIT_READ_REG = const(0x0A)
_SSD1680_BOOSTER_SOFT_START = const(0x0C)
_SSD1680_DEEP_SLEEP = const(0x10)
_SSD1680_DATA_MODE = const(0x11)
_SSD1680_SW_RESET = const(0x12)
_SSD1680_HV_DETECT = const(0x14)
_SSD1680_VCI_DETECT = const(0x15)
_SSD1680_TEMP_CONTROL = const(0x18)
_SSD1680_TEMP_WRITE = const(0x1A)
_SSD1680_TEMP_READ = const(0x1B)
_SSD1680_EXTTEMP_WRITE = const(0x1C)
_SSD1680_MASTER_ACTIVATE = const(0x20)
_SSD1680_DISP_CTRL1 = const(0x21)
_SSD1680_DISP_CTRL2 = const(0x22)
_SSD1680_WRITE_BWRAM = const(0x24)
_SSD1680_WRITE_REDRAM = const(0x26)
_SSD1680_READ_RAM = const(0x27)
_SSD1680_VCOM_SENSE = const(0x28)
_SSD1680_VCOM_DURATION = const(0x29)
_SSD1680_WRITE_VCOM_OTP = const(0x2A)
_SSD1680_WRITE_VCOM_CTRL = const(0x2B)
_SSD1680_WRITE_VCOM_REG = const(0x2C)
_SSD1680_READ_OTP = const(0x2D)
_SSD1680_READ_USERID = const(0x2E)
_SSD1680_READ_STATUS = const(0x2F)
_SSD1680_WRITE_WS_OTP = const(0x30)
_SSD1680_LOAD_WS_OTP = const(0x31)
_SSD1680_WRITE_LUT = const(0x32)
_SSD1680_CRC_CALC = const(0x34)
_SSD1680_CRC_READ = const(0x35)
_SSD1680_PROG_OTP = const(0x36)
_SSD1680_WRITE_DISPLAY_OPT = const(0x37)
_SSD1680_WRITE_USERID = const(0x38)
_SSD1680_OTP_PROGMODE = const(0x39)
_SSD1680_WRITE_BORDER = const(0x3C)
_SSD1680_END_OPTION = const(0x3F)
_SSD1680_SET_RAMXPOS = const(0x44)
_SSD1680_SET_RAMYPOS = const(0x45)
_SSD1680_AUTOWRITE_RED = const(0x46)
_SSD1680_AUTOWRITE_BW = const(0x47)
_SSD1680_SET_RAMXCOUNT = const(0x4E)
_SSD1680_SET_RAMYCOUNT = const(0x4F)
_SSD1680_NOP = const(0xFF)
class Adafruit_SSD1680(Adafruit_EPD):
"""driver class for Adafruit SSD1680 ePaper display breakouts"""
# pylint: disable=too-many-arguments
def __init__(
self, width, height, spi, *, cs_pin, dc_pin, sramcs_pin, rst_pin, busy_pin
):
super().__init__(
width, height, spi, cs_pin, dc_pin, sramcs_pin, rst_pin, busy_pin
)
if width % 8 != 0:
width += 8 - width % 8
self._buffer1_size = int(width * height / 8)
self._buffer2_size = self._buffer1_size
if sramcs_pin:
self._buffer1 = self.sram.get_view(0)
self._buffer2 = self.sram.get_view(self._buffer1_size)
else:
self._buffer1 = bytearray(self._buffer1_size)
self._buffer2 = bytearray(self._buffer2_size)
self._framebuf1 = adafruit_framebuf.FrameBuffer(
self._buffer1, width, height, buf_format=adafruit_framebuf.MHMSB
)
self._framebuf2 = adafruit_framebuf.FrameBuffer(
self._buffer2, width, height, buf_format=adafruit_framebuf.MHMSB
)
self.set_black_buffer(0, True)
self.set_color_buffer(1, False)
# pylint: enable=too-many-arguments
def begin(self, reset=True):
"""Begin communication with the display and set basic settings"""
if reset:
self.hardware_reset()
self.power_down()
def busy_wait(self):
"""Wait for display to be done with current task, either by polling the
busy pin, or pausing"""
if self._busy:
while self._busy.value:
time.sleep(0.01)
else:
time.sleep(0.5)
def power_up(self):
"""Power up the display in preparation for writing RAM and updating"""
self.hardware_reset()
self.busy_wait()
self.command(_SSD1680_SW_RESET)
self.busy_wait()
# driver output control
self.command(
_SSD1680_DRIVER_CONTROL,
bytearray([self._height - 1, (self._height - 1) >> 8, 0x00]),
)
# data entry mode
self.command(_SSD1680_DATA_MODE, bytearray([0x03]))
# Set voltages
self.command(_SSD1680_WRITE_VCOM_REG, bytearray([0x36]))
self.command(_SSD1680_GATE_VOLTAGE, bytearray([0x17]))
self.command(_SSD1680_SOURCE_VOLTAGE, bytearray([0x41, 0x00, 0x32]))
# Set ram X start/end postion
self.command(_SSD1680_SET_RAMXPOS, bytearray([0x01, 0x10]))
# Set ram Y start/end postion
self.command(
_SSD1680_SET_RAMYPOS,
bytearray([0, 0, self._height - 1, (self._height - 1) >> 8]),
)
# Set border waveform
self.command(_SSD1680_WRITE_BORDER, bytearray([0x05]))
# Set ram X count
self.command(_SSD1680_SET_RAMXCOUNT, bytearray([0x01]))
# Set ram Y count
self.command(_SSD1680_SET_RAMYCOUNT, bytearray([self._height - 1, 0]))
self.busy_wait()
def power_down(self):
"""Power down the display - required when not actively displaying!"""
self.command(_SSD1680_DEEP_SLEEP, bytearray([0x01]))
time.sleep(0.1)
def update(self):
"""Update the display from internal memory"""
self.command(_SSD1680_DISP_CTRL2, bytearray([0xF4]))
self.command(_SSD1680_MASTER_ACTIVATE)
self.busy_wait()
if not self._busy:
time.sleep(3) # wait 3 seconds
def write_ram(self, index):
"""Send the one byte command for starting the RAM write process. Returns
the byte read at the same time over SPI. index is the RAM buffer, can be
0 or 1 for tri-color displays."""
if index == 0:
return self.command(_SSD1680_WRITE_BWRAM, end=False)
if index == 1:
return self.command(_SSD1680_WRITE_REDRAM, end=False)
raise RuntimeError("RAM index must be 0 or 1")
def set_ram_address(self, x, y): # pylint: disable=unused-argument, no-self-use
"""Set the RAM address location, not used on this chipset but required by
the superclass"""
# Set RAM X address counter
self.command(_SSD1680_SET_RAMXCOUNT, bytearray([x + 1]))
# Set RAM Y address counter
self.command(_SSD1680_SET_RAMYCOUNT, bytearray([y, y >> 8]))
|
[
"[email protected]"
] | |
2c577726ddb93acc298d9aa48b796d856a11327a
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02818/s867983819.py
|
672fccfd638abcdbc2e8bfd4c826f7fa452e2450
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 204 |
py
|
A, B, K = map(int, input().split())
count = 0
if K == 0:
print('%d %d' % (A, B))
elif K <= A:
print('%d %d' % (A-K, B))
elif A < K <= B+A:
print('%d %d' % (0, B-(K-A)))
else:
print('0 0')
|
[
"[email protected]"
] | |
5a77ff53b47783a74d0756216f1c09c0dcf2c10e
|
8796273a71427c8d9869431926341fbcac54095f
|
/imdemo/utils/singleton.py
|
efdc68aae590b919e315b4fbb42972ee95d1400c
|
[] |
no_license
|
fatelei/im-demo
|
e2c377a4fc9c7ce5ab31210ed76f1532d537a790
|
032bac4e0cfe7365e389c64a1ce3a5aec7dd9208
|
refs/heads/master
| 2021-01-09T21:46:21.401059 | 2016-01-17T08:14:55 | 2016-01-17T08:14:55 | 45,176,036 | 4 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 383 |
py
|
# -*- coding: utf8 -*-
"""
imdemo.utils.singleton
~~~~~~~~~~~~~~~~~~~~~~
Singleton mode.
"""
def singleton_class(obj):
instances = {}
def wrapper(*args, **kwargs):
name = obj.__name__
if name not in instances:
instance = obj(*args, **kwargs)
instances[name] = instance
return instances[name]
return wrapper
|
[
"[email protected]"
] | |
34e948024f0bec94ff0ac644ed0ec34b906fbcf6
|
c058f51b99f91faebf27183b2b579e9f96e0d8f5
|
/botorch/sampling/index_sampler.py
|
ac64388a6725fbe6d9d097bcda515413de547a4f
|
[
"MIT"
] |
permissive
|
pytorch/botorch
|
255d62f698cc615c750e9343c278a63c7e96a586
|
4cc5ed59b2e8a9c780f786830c548e05cc74d53c
|
refs/heads/main
| 2023-08-22T15:23:51.071048 | 2023-08-22T05:30:38 | 2023-08-22T05:30:38 | 142,940,093 | 2,891 | 373 |
MIT
| 2023-09-13T00:16:13 | 2018-07-30T23:59:57 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 2,289 |
py
|
#!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
r"""
Sampler to be used with `EnsemblePosteriors` to enable
deterministic optimization of acquisition functions with ensemble models.
"""
from __future__ import annotations
import torch
from botorch.posteriors.ensemble import EnsemblePosterior
from botorch.sampling.base import MCSampler
from torch import Tensor
class IndexSampler(MCSampler):
r"""A sampler that calls `posterior.rsample_from_base_samples` to
generate the samples via index base samples."""
def forward(self, posterior: EnsemblePosterior) -> Tensor:
r"""Draws MC samples from the posterior.
Args:
posterior: The ensemble posterior to sample from.
Returns:
The samples drawn from the posterior.
"""
self._construct_base_samples(posterior=posterior)
samples = posterior.rsample_from_base_samples(
sample_shape=self.sample_shape, base_samples=self.base_samples
)
return samples
def _construct_base_samples(self, posterior: EnsemblePosterior) -> None:
r"""Constructs base samples as indices to sample with them from
the Posterior.
Args:
posterior: The ensemble posterior to construct the base samples
for.
"""
if self.base_samples is None or self.base_samples.shape != self.sample_shape:
with torch.random.fork_rng():
torch.manual_seed(self.seed)
base_samples = torch.multinomial(
posterior.weights,
num_samples=self.sample_shape.numel(),
replacement=True,
).reshape(self.sample_shape)
self.register_buffer("base_samples", base_samples)
if self.base_samples.device != posterior.device:
self.to(device=posterior.device) # pragma: nocover
def _update_base_samples(
self, posterior: EnsemblePosterior, base_sampler: IndexSampler
) -> None:
r"""Null operation just needed for compatibility with
`CachedCholeskyAcquisitionFunction`."""
pass
|
[
"[email protected]"
] | |
2bb7d800683997697c30b40167e239a1b671acbd
|
9f5fcff2513f2d78f27e5313698dcc47fce1e754
|
/Experiment/RL_EA_search/graphnas/rs_trainer.py
|
60d453b4a9dc3b195ba7af0fdb0ad1d16b376820
|
[
"Apache-2.0"
] |
permissive
|
ncucjm/notebook
|
c2495f790e9fc2ca55c1c29a8eaa2dc1bfe7463f
|
7271a0d1b10cdd6298e223c7ff150d4df031aa76
|
refs/heads/master
| 2023-07-20T05:55:48.946687 | 2021-01-27T09:12:19 | 2021-01-27T09:12:19 | 202,633,012 | 0 | 0 | null | 2023-07-06T21:28:29 | 2019-08-16T00:58:45 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 1,510 |
py
|
import time
import torch
import numpy as np
from collections import deque
from graphnas.trainer import Trainer
class RandomSearch_Trainer(Trainer):
"""
This class implements a Random Search method, on the Search Space
provided to it.
"""
def __init__(self, args):
super(RandomSearch_Trainer, self).__init__(args)
self.args = args
self.random_seed = args.random_seed
self.cycles = args.cycles
def train(self):
print("\n\n===== Random Search ====")
start_time = time.time()
self.best_ind_acc = 0.0
self.best_ind = []
while self.cycles > 0:
individual = self._generate_random_individual()
ind_actions = self._construct_action([individual])
gnn = self.form_gnn_info(ind_actions[0])
_, ind_acc = \
self.submodel_manager.train(gnn, format=self.args.format)
print("individual:", individual, " val_score:", ind_acc)
if ind_acc > self.best_ind_acc:
self.best_ind = individual.copy()
self.best_ind_acc = ind_acc
end_time = time.time()
total_time = end_time - start_time
print('Total elapsed time: ' + str(total_time))
print('[BEST STRUCTURE]', self.best_ind)
print('[BEST STRUCTURE] Actions: ',
self._construct_action([self.best_ind]))
print('[BEST STRUCTURE] Accuracy: ', self.best_ind_acc)
print("===== Random Search DONE ====")
|
[
"[email protected]"
] | |
d48f8bec41176e377a39ba8177cac60f159340b7
|
297497957c531d81ba286bc91253fbbb78b4d8be
|
/third_party/python/esprima/esprima/__init__.py
|
aa2398f4102b2e9d4553bb39f890861fda8ee0ea
|
[
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] |
permissive
|
marco-c/gecko-dev-comments-removed
|
7a9dd34045b07e6b22f0c636c0a836b9e639f9d3
|
61942784fb157763e65608e5a29b3729b0aa66fa
|
refs/heads/master
| 2023-08-09T18:55:25.895853 | 2023-08-01T00:40:39 | 2023-08-01T00:40:39 | 211,297,481 | 0 | 0 |
NOASSERTION
| 2019-09-29T01:27:49 | 2019-09-27T10:44:24 |
C++
|
UTF-8
|
Python
| false | false | 154 |
py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
version = '4.0.1'
__version__ = (4, 0, 1)
from .esprima import *
|
[
"[email protected]"
] | |
4ec3a3ad39f84c17851919fc61bb7c8ea7077454
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/benchmark/startCirq1255.py
|
433c18dd8d62c95a5ce1435e5cbc4fa0ed500276
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,849 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=5
# total number=49
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=3
c.append(cirq.rx(-1.3603096190043806).on(input_qubit[2])) # number=28
c.append(cirq.H.on(input_qubit[1])) # number=4
c.append(cirq.H.on(input_qubit[2])) # number=5
c.append(cirq.H.on(input_qubit[3])) # number=6
c.append(cirq.H.on(input_qubit[4])) # number=21
for i in range(2):
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=7
c.append(cirq.H.on(input_qubit[3])) # number=8
c.append(cirq.H.on(input_qubit[3])) # number=34
c.append(cirq.CZ.on(input_qubit[4],input_qubit[3])) # number=35
c.append(cirq.H.on(input_qubit[3])) # number=36
c.append(cirq.H.on(input_qubit[0])) # number=17
c.append(cirq.H.on(input_qubit[1])) # number=18
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) # number=43
c.append(cirq.X.on(input_qubit[2])) # number=44
c.append(cirq.H.on(input_qubit[2])) # number=46
c.append(cirq.CZ.on(input_qubit[0],input_qubit[2])) # number=47
c.append(cirq.H.on(input_qubit[2])) # number=48
c.append(cirq.rx(-1.9697785938008003).on(input_qubit[1])) # number=37
c.append(cirq.H.on(input_qubit[2])) # number=19
c.append(cirq.H.on(input_qubit[3])) # number=20
c.append(cirq.H.on(input_qubit[0])) # number=38
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=39
c.append(cirq.H.on(input_qubit[0])) # number=40
c.append(cirq.X.on(input_qubit[0])) # number=32
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=33
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=24
c.append(cirq.X.on(input_qubit[1])) # number=25
c.append(cirq.X.on(input_qubit[1])) # number=41
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=26
c.append(cirq.X.on(input_qubit[2])) # number=11
c.append(cirq.CNOT.on(input_qubit[2],input_qubit[3])) # number=30
c.append(cirq.X.on(input_qubit[3])) # number=12
c.append(cirq.H.on(input_qubit[2])) # number=42
c.append(cirq.X.on(input_qubit[0])) # number=13
c.append(cirq.X.on(input_qubit[1])) # number=14
c.append(cirq.X.on(input_qubit[2])) # number=15
c.append(cirq.X.on(input_qubit[3])) # number=16
c.append(cirq.X.on(input_qubit[1])) # number=22
c.append(cirq.X.on(input_qubit[1])) # number=23
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 5
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq1255.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close()
|
[
"[email protected]"
] | |
5e504538adc68c06ea2082edf5674a0e82a28dc0
|
4f75cc33b4d65d5e4b054fc35b831a388a46c896
|
/.history/app_20210903181729.py
|
d0919a5fe032e1f8eaa9d4770a1d04d5bbe154c3
|
[] |
no_license
|
Lr-2002/newpage
|
c3fe2acc451e24f6408996ea1271c61c321de702
|
c589ad974e7100aa9b1c2ccc095a959ff68069b6
|
refs/heads/main
| 2023-09-03T06:13:53.428236 | 2021-11-23T10:41:21 | 2021-11-23T10:41:21 | 402,606,000 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,625 |
py
|
from flask import Flask ,render_template,url_for
from flask_sqlalchemy import SQLAlchemy
import os
import sys
import click
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join(app.root_path,'data.db')
# / / / / 是文件的绝对路径
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] =False
db = SQLAlchemy(app)
@app.cli.command()
@click.option('--drop',is_flag=True,help = 'Create after drop.')
def initdb(drop):
if drop:
db.drop_all()
db.create_all()
click.echo('Initialize database.')
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(20))
class Movie(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(20))
year = db.Column(db.String(4))
# name = 'Grey Li'
# movies = [
# {'title': 'My Neighbor Totoro', 'year': '1988'},
# {'title': 'Dead Poets Society', 'year': '1989'},
# {'title': 'A Perfect World', 'year': '1993'},
# {'title': 'Leon', 'year': '1994'},
# {'title': 'Mahjong', 'year': '1996'},
# {'title': 'Swallowtail Butterfly', 'year': '1996'},
# {'title': 'King of Comedy', 'year': '1999'},
# {'title': 'Devils on the Doorstep', 'year': '1999'},
# {'title': 'WALL-E', 'year': '2008'},
# {'title': 'The Pork of Music', 'year': '2012'},
# ]
# @app.route('/static/<name>')
# def static(name):
# # url_for('static')
# return name
@app.route('/')
def hello():
user =
return render_template('index.html',name=name,movies = movies)
# if __name__ == '__main__':
# app.run()
|
[
"[email protected]"
] | |
63616405b27720b76566b120e130bee0ac7bae8e
|
cfa464f5e4ec36b740d6e884f0ca1e170ebd2efb
|
/0x15-api/1-export_to_CSV.py
|
402fb19b0b58ff81079e112f6fdb96aead0b7b14
|
[] |
no_license
|
Immaannn2222/holberton-system_engineering-devops
|
6ea0c4f3af2943c242e1928a2b4e66932f193a34
|
bcf001f3693fc55d54842ad92848ee783edee37a
|
refs/heads/master
| 2020-12-22T21:12:22.507064 | 2020-10-12T19:46:37 | 2020-10-12T19:46:37 | 236,933,884 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 798 |
py
|
#!/usr/bin/python3
"""HTTP WITH PYTHON"""
import csv
import requests
from sys import argv
if __name__ == "__main__":
"""main"""
to_do = requests.get('https://jsonplaceholder.typicode.com/todos/',
params={"userId": argv[1]})
user = requests.get('https://jsonplaceholder.typicode.com/users',
params={"id": argv[1]})
list_json = to_do.json()
user_json = user.json()
for i in user_json:
name = i.get("username")
i_d = i.get('id')
with open(str(i_d) + '.csv', mode='w') as f:
csv = csv.writer(f, delimiter=',', quoting=csv.QUOTE_ALL)
for t in list_json:
status = t.get("completed")
task_title = t.get("title")
csv.writerow([i_d, name, status, task_title])
|
[
"[email protected]"
] | |
59a610eb83b8706f74f0002b97f722652d711751
|
83c57f25a1c8b29bb84078340efabaf527a9452e
|
/pytest/xiaoniu88/pipelines.py
|
bcf58746076d6fa5a9859ffc60911edbe065bfe3
|
[] |
no_license
|
ifzz/py
|
df06cf5da5920dae979c2c8454bfa02c36dfeeb1
|
2305e651613725ca51d6a87306f3ef83d6c51939
|
refs/heads/master
| 2021-01-18T08:32:21.256271 | 2016-03-11T10:30:28 | 2016-03-11T10:30:28 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 289 |
py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class Xiaoniu88Pipeline(object):
def process_item(self, item, spider):
return item
|
[
"[email protected]"
] | |
cca8684959f85d2e036f5c9887666fd2e912318b
|
5865cc1b70db72b7a9a9a07547f05a1f47959bb1
|
/supervised_learning/0x02-tensorflow/0-create_placeholders.py
|
030bfd54225f665c91036d412c47e2ec3b3197fd
|
[] |
no_license
|
nildiert/holbertonschool-machine_learning
|
c8cefc3a784348f09128c0f4d82d65b9d56000c5
|
273f81feaa14fe24ac4db5d82be0d13299e857b8
|
refs/heads/master
| 2020-12-21T12:27:48.280880 | 2020-09-25T17:58:33 | 2020-09-25T17:58:33 | 236,429,499 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 326 |
py
|
#!/usr/bin/env python3
""" This function creates two placeholders """
import tensorflow as tf
def create_placeholders(nx, classes):
""" Method to create placeholders """
x = tf.placeholder(tf.float32, shape=(None, nx), name='x')
y = tf.placeholder(tf.float32, shape=(None, classes), name='y')
return (x, y)
|
[
"[email protected]"
] | |
179469489a69ca59e2930a07ad28fb243302e0f3
|
12c1b33e27d841bb25899d6601f1de75b522d88d
|
/python/udacity/draw_turtles.py
|
6d504799dbe85482a068035ecfff4a600108ee55
|
[] |
no_license
|
conflabermits/Scripts
|
ec27456b5b26ad7b1edaf30686addff2cacc6619
|
c91ef0594dda1228a523fcaccb4af3313d370718
|
refs/heads/main
| 2023-07-06T21:41:12.033118 | 2023-06-25T19:24:54 | 2023-06-25T19:24:54 | 66,151,253 | 4 | 0 | null | 2023-09-10T19:56:17 | 2016-08-20T14:35:34 |
HTML
|
UTF-8
|
Python
| false | false | 893 |
py
|
import turtle
def draw_square(a_turtle):
for i in range(0, 4):
a_turtle.forward(100)
a_turtle.right(90)
def draw_circle(a_turtle):
a_turtle.circle(100)
def draw_triangle(a_turtle):
for i in range(0, 3):
a_turtle.forward(100)
a_turtle.left(120)
def draw_turtles():
window = turtle.Screen()
window.bgcolor("red")
brad = turtle.Turtle()
brad.shape("turtle")
brad.color("black", "green")
brad.speed(10)
# angie = turtle.Turtle()
# angie.shape("circle")
# angie.color("blue")
# charlie = turtle.Turtle()
# charlie.shape("arrow")
# charlie.color("yellow")
# charlie.speed(4)
# charlie.left(180)
for i in range(0, 72):
draw_square(brad)
brad.right(95)
# draw_square(brad)
# draw_circle(angie)
# draw_triangle(charlie)
window.exitonclick()
draw_turtles()
|
[
"[email protected]"
] | |
42835590fa2d772e8fd35ff631574e8c3dda8650
|
2f30cf20d58e2cde4037441e67213223c69a6998
|
/lesson19_接口2/d02request.py
|
34aa5e3f987860394f8ccb9da1afab99314bd07e
|
[] |
no_license
|
zengcong1314/python1205
|
b11db7de7d0ad1f8401b8b0c9b20024b4405ae6c
|
da800ed3374d1d43eb75485588ddb8c3a159bb41
|
refs/heads/master
| 2023-05-25T07:17:25.065004 | 2021-06-08T08:27:54 | 2021-06-08T08:27:54 | 318,685,835 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 618 |
py
|
"""
第三方库
pip install requests
python 用来发送HTTP 请求
"""
import requests
#发送get 请求
url = "http://www.keyou.site:8000/projects/"
p = {"a":"b"}
resp = requests.get(url,params=p)
print(resp)
print(resp.status_code)
# 字符串
print(resp.text)
# 字典
print(resp.json())
# post请求
url = "http://www.keyou.site:8000/user/login/"
# 请求参数:json 格式的body
data = {
"username":"lemon1",
"password":"123456"
}
header = {
"Authorization":"JWT fow"
}
resp2 = requests.post(url,json=data,headers=header)
print(resp2.json())
# query string: params={}
# json json={}
# headers
|
[
"[email protected]"
] | |
ef12a61a3f9668366b02a4f68d57fc5cb87247f6
|
f9d4eee81dda90e41ee755f333e0d787dab749db
|
/eth2/beacon/scripts/quickstart_state/generate_beacon_genesis.py
|
2ce8f76b744662551c3a38e4b0081d708144e70a
|
[
"MIT"
] |
permissive
|
Palem1988/trinity
|
f10f21119a7ea98a7fc9458e5ff05b1e4cf6a021
|
79c21f8ae90bc765a78cb8052af0e4271e4c25e1
|
refs/heads/master
| 2022-02-20T05:21:18.576796 | 2019-09-24T22:09:21 | 2019-09-24T22:40:24 | 210,869,348 | 0 | 1 |
MIT
| 2019-09-25T14:45:12 | 2019-09-25T14:45:04 | null |
UTF-8
|
Python
| false | false | 1,982 |
py
|
from pathlib import Path
import time
from eth2._utils.hash import hash_eth2
from eth2.beacon.genesis import initialize_beacon_state_from_eth1
from eth2.beacon.tools.builder.initializer import create_mock_deposits_and_root
from eth2.beacon.tools.fixtures.config_types import Minimal
from eth2.beacon.tools.fixtures.loading import load_config_at_path, load_yaml_at
from eth2.beacon.tools.misc.ssz_vector import override_lengths
ROOT_DIR = Path("eth2/beacon/scripts")
KEY_SET_FILE = Path("keygen_16_validators.yaml")
def _load_config(config):
config_file_name = ROOT_DIR / Path(f"config_{config.name}.yaml")
return load_config_at_path(config_file_name)
def _main():
config_type = Minimal
config = _load_config(config_type)
override_lengths(config)
key_set = load_yaml_at(ROOT_DIR / KEY_SET_FILE)
pubkeys = ()
privkeys = ()
withdrawal_credentials = ()
keymap = {}
for key_pair in key_set:
pubkey = key_pair["pubkey"].to_bytes(48, byteorder="big")
privkey = key_pair["privkey"]
withdrawal_credential = (
config.BLS_WITHDRAWAL_PREFIX.to_bytes(1, byteorder="big")
+ hash_eth2(pubkey)[1:]
)
pubkeys += (pubkey,)
privkeys += (privkey,)
withdrawal_credentials += (withdrawal_credential,)
keymap[pubkey] = privkey
deposits, _ = create_mock_deposits_and_root(
pubkeys, keymap, config, withdrawal_credentials
)
eth1_block_hash = b"\x42" * 32
# NOTE: this timestamp is a placeholder
eth1_timestamp = 10000
state = initialize_beacon_state_from_eth1(
eth1_block_hash=eth1_block_hash,
eth1_timestamp=eth1_timestamp,
deposits=deposits,
config=config,
)
genesis_time = int(time.time())
print(f"creating genesis at time {genesis_time}")
genesis_state = state.copy(genesis_time=genesis_time)
print(genesis_state.hash_tree_root.hex())
if __name__ == "__main__":
_main()
|
[
"[email protected]"
] | |
3ea5bee3bd4871ba78ed230af082be4efae65c9f
|
d76224386c2b359d6d3228567cbb274fea8fcaab
|
/final_back/asgi.py
|
3b0602bc765f04f8b1a90f1b18b5d63842de6062
|
[] |
no_license
|
SSAFY-5th-seungwoon/Moya_backend
|
2a270525dc3d0d53ee4b42274696d19f84edce9d
|
ac8d7004dafef9a4f9030dbe3a5762661f3f06ac
|
refs/heads/master
| 2023-05-22T20:45:07.230178 | 2021-06-16T07:44:05 | 2021-06-16T07:44:05 | 369,787,652 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 397 |
py
|
"""
ASGI config for final_back project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'final_back.settings')
application = get_asgi_application()
|
[
"[email protected]"
] | |
0f3aaea69808c239b235c44f472f9e05b0f6e1ab
|
63cf686bf970d28c045719de2f0e7e9dae5bed15
|
/Contains Duplicate .py
|
f7d07154cc28b4a5d52c30ce29ed8bc9695a4146
|
[] |
no_license
|
menard-noe/LeetCode
|
6461bda4a076849cf69f2cd87999275f141cc483
|
4e9c50d256c84d1b830a7642b265619a0b69d542
|
refs/heads/master
| 2022-12-13T09:41:41.682555 | 2020-09-14T12:46:53 | 2020-09-14T12:46:53 | 282,481,920 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 443 |
py
|
import math
from typing import List
class Solution:
def containsDuplicate(self, nums: List[int]) -> bool:
dico = dict()
for num in nums:
if num in dico:
return True
else:
dico[num] = 0
return False
if __name__ == "__main__":
# execute only if run as a script
input = [1,2,3,3]
solution = Solution()
print(solution.containsDuplicate(input))
|
[
"[email protected]"
] | |
7ff1948228505514fa2fc18802fadd69dee1abbb
|
81cac5d646fc14e52b3941279d59fdd957b10f7e
|
/tests/components/homekit_controller/specific_devices/test_ecobee_occupancy.py
|
293ecd07dd2394ceabecc9061354e93c5bf4a172
|
[
"Apache-2.0"
] |
permissive
|
arsaboo/home-assistant
|
6b6617f296408a42874a67a71ad9bc6074acd000
|
554e51017e7b1b6949783d9684c4a0e8ca21e466
|
refs/heads/dev
| 2023-07-27T20:56:52.656891 | 2022-01-19T19:30:57 | 2022-01-19T19:30:57 | 207,046,472 | 2 | 0 |
Apache-2.0
| 2019-09-08T01:35:16 | 2019-09-08T01:35:16 | null |
UTF-8
|
Python
| false | false | 1,226 |
py
|
"""
Regression tests for Ecobee occupancy.
https://github.com/home-assistant/core/issues/31827
"""
from tests.components.homekit_controller.common import (
DeviceTestInfo,
EntityTestInfo,
assert_devices_and_entities_created,
setup_accessories_from_file,
setup_test_accessories,
)
async def test_ecobee_occupancy_setup(hass):
"""Test that an Ecbobee occupancy sensor be correctly setup in HA."""
accessories = await setup_accessories_from_file(hass, "ecobee_occupancy.json")
await setup_test_accessories(hass, accessories)
await assert_devices_and_entities_created(
hass,
DeviceTestInfo(
unique_id="00:00:00:00:00:00",
name="Master Fan",
model="ecobee Switch+",
manufacturer="ecobee Inc.",
sw_version="4.5.130201",
hw_version="",
serial_number="111111111111",
devices=[],
entities=[
EntityTestInfo(
entity_id="binary_sensor.master_fan",
friendly_name="Master Fan",
unique_id="homekit-111111111111-56",
state="off",
),
],
),
)
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.