blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
febb96c94ce8da0983d61e5f72ae3c876d19bac4 | a4e1093cd868cc16cf909b8f7b84832a823a97bf | /utils/criterion.py | 1d10cbdcb99de7980901e1cde9c148fb1d57396b | [] | no_license | hermanprawiro/gan-playground | 8fb7eed54314661d9d1b908fe2cb1695eb1e3881 | bf4c270ad4696d61df0dbe2afb8c9ebafb9c2ba3 | refs/heads/master | 2020-09-06T19:43:22.685332 | 2019-12-24T10:45:12 | 2019-12-24T10:45:12 | 220,529,636 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,740 | py | import torch
import torch.nn as nn
class GANLoss(nn.Module):
def __init__(self, gan_mode, target_real_label=1.0, target_fake_label=0.0, target_fake_G_label=1.0):
super().__init__()
self.gan_mode = gan_mode
self.register_buffer('real_label', torch.tensor(target_real_label))
self.register_buffer('fake_label', torch.tensor(target_fake_label))
self.register_buffer('fake_G_label', torch.tensor(target_fake_G_label))
if gan_mode == 'vanilla':
self.loss = nn.BCEWithLogitsLoss()
elif gan_mode == 'lsgan':
self.loss = nn.MSELoss()
elif gan_mode == 'hinge':
self.loss = None
else:
raise NotImplementedError('GAN mode %s is not implemented' % gan_mode)
def _get_target_tensor(self, prediction, is_real, is_generator=False):
if is_real:
target_tensor = self.real_label
elif is_generator:
target_tensor = self.fake_G_label
else:
target_tensor = self.fake_label
return target_tensor.expand_as(prediction)
def forward(self, prediction, is_real, is_generator=False):
if self.gan_mode in ['vanilla', 'lsgan']:
target_tensor = self._get_target_tensor(prediction, is_real, is_generator)
loss = self.loss(prediction, target_tensor)
else:
if is_real:
loss = nn.functional.relu(1. - prediction).mean()
elif is_generator:
loss = - prediction.mean()
else:
loss = nn.functional.relu(1. + prediction).mean()
return loss
class VAELoss(nn.Module):
def __init__(self, recon_mode='l2', beta=1):
super().__init__()
recon_dict = {
'bce': nn.BCELoss(reduction='none'),
'l1': nn.L1Loss(reduction='none'),
'l2': nn.MSELoss(reduction='none'),
'smoothl1': nn.SmoothL1Loss(reduction='none'),
'none': None,
}
if recon_mode not in recon_dict.keys():
raise NotImplementedError('Reconstruction mode %s is not implemented' % recon_mode)
# Normalized beta = beta * latent_dim / input_dim
self.beta = beta
self.recon_criterion = recon_dict[recon_mode]
def forward(self, mu, logvar, recon_x=None, target_x=None):
if self.recon_criterion is not None:
recon_loss = self.recon_criterion(recon_x, target_x).sum((1, 2, 3)).mean()
else:
recon_loss = 0
# KL Divergence => 0.5 * sum(1 + log(sigma^2) - mu^2 - sigma^2)
kld_loss = (-0.5 * torch.sum(1 + logvar - mu.pow(2) - logvar.exp(), 1)).mean()
return recon_loss + self.beta * kld_loss | [
"[email protected]"
] | |
e185e0a8d0306c187a69813419c3f2bffe3f8ef2 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /pwAdsffHkxdhSwXKc_16.py | a9d24417389bf368548a6f4fbb949210895a81fd | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 48 | py |
import re
pattern = "(\w+)(?: = )(?!yes)"
| [
"[email protected]"
] | |
ad1476c4e3d727a594748fbd46e8750466f874b9 | f6d7c30a7ed343e5fe4859ceaae1cc1965d904b7 | /htdocs/submissions/ad1476c4e3d727a594748fbd46e8750466f874b9.py | 21655121ccb580a53180397a9efc156aa7c8ae71 | [] | no_license | pycontest/pycontest.github.io | ed365ebafc5be5d610ff9d97001240289de697ad | 606015cad16170014c41e335b1f69dc86250fb24 | refs/heads/master | 2021-01-10T04:47:46.713713 | 2016-02-01T11:03:46 | 2016-02-01T11:03:46 | 50,828,627 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 124 | py | a=''.join
seven_seg=lambda x:a(a(' ||| _ ___ | |||'[ord('C0WY"KJaQR'[int(l)])/k%7::7]for l in x)+'\n'for k in(49,7,1))
| [
"[email protected]"
] | |
0f08a7de43e780c241ac24fe25e5d88f5c7bb850 | a46d135ba8fd7bd40f0b7d7a96c72be446025719 | /packages/python/plotly/plotly/validators/icicle/outsidetextfont/_family.py | 9f2822cabc11769355cd3ba0b30e64667849668e | [
"MIT"
] | permissive | hugovk/plotly.py | 5e763fe96f225d964c4fcd1dea79dbefa50b4692 | cfad7862594b35965c0e000813bd7805e8494a5b | refs/heads/master | 2022-05-10T12:17:38.797994 | 2021-12-21T03:49:19 | 2021-12-21T03:49:19 | 234,146,634 | 0 | 0 | MIT | 2020-01-15T18:33:43 | 2020-01-15T18:33:41 | null | UTF-8 | Python | false | false | 571 | py | import _plotly_utils.basevalidators
class FamilyValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self, plotly_name="family", parent_name="icicle.outsidetextfont", **kwargs
):
super(FamilyValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "plot"),
no_blank=kwargs.pop("no_blank", True),
strict=kwargs.pop("strict", True),
**kwargs
)
| [
"[email protected]"
] | |
17f6b3138f1968e2507a1f897c7af44cf61fed90 | 0981ddfcd812cdd66f3509b4a052d30e098d8f44 | /start-content/gerando_mensagens.py | af6f574bbb6c37ef8ad9963c8f71c3fa2e19a8c7 | [] | no_license | ricardofelixmont/Udacity-Fundamentos-IA-ML | 3c49532e0a3bd662d6b5262cab056281f0020689 | dda2afc0e31ab94fef937be31b1b12685f447a71 | refs/heads/master | 2020-04-10T13:44:19.472140 | 2019-04-16T21:24:22 | 2019-04-16T21:24:22 | 161,058,181 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 514 | py | names = input('digite os nomes: ').split().title()
assignments = input('Digite os assignments: ').split()
grades = input('Digite as notas: ').split()
message = 'Hi {},This is a reminder that you have {} assignments left to submit before you can graduate. Your current grade is {} and can increase to {} if you submit all assignments before the due date.'
for nome, tarefa, nota in names, assignments, grades:
notaPotencial = nota + 2*assignments
print(message.format(nome, tarefa, nota, notaPotencial))
| [
"[email protected]"
] | |
5a6434d9a3bdeaec69b62fd4db7eeaa128efe908 | f66da3c9293d9f680ab54cf3c562ef5dd661137c | /docs/source/examples/index.py | 731aa4fd9d3096b22bd8ae9a9f1393d90efd68fb | [
"MIT"
] | permissive | dvincentwest/gtimer | c49ef33ecdfeaf55298283cfd24953d372a3f4e5 | 2146dab459e5d959feb291821733d3d3ba7c523c | refs/heads/master | 2022-04-21T22:08:06.103426 | 2016-10-05T07:44:04 | 2016-10-05T07:44:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | import gtimer as gt
import time
@gt.wrap
def some_function():
time.sleep(1)
gt.stamp('some_method')
time.sleep(2)
gt.stamp('another_function')
some_function()
gt.stamp('some_function')
time.sleep(1)
gt.stamp('another_method')
print gt.report()
| [
"[email protected]"
] | |
4df46f23c7e369d02d8672af00dd27c657d333c3 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-cce/huaweicloudsdkcce/v3/model/nic_spec.py | b40e87c841b9dc1783c70313cbd1039699834425 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 5,520 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class NicSpec:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'subnet_id': 'str',
'fixed_ips': 'list[str]',
'ip_block': 'str'
}
attribute_map = {
'subnet_id': 'subnetId',
'fixed_ips': 'fixedIps',
'ip_block': 'ipBlock'
}
def __init__(self, subnet_id=None, fixed_ips=None, ip_block=None):
"""NicSpec
The model defined in huaweicloud sdk
:param subnet_id: 网卡所在子网的网络ID。主网卡创建时若未指定subnetId,将使用集群子网。拓展网卡创建时必须指定subnetId。
:type subnet_id: str
:param fixed_ips: 主网卡的IP将通过fixedIps指定,数量不得大于创建的节点数。fixedIps或ipBlock同时只能指定一个。
:type fixed_ips: list[str]
:param ip_block: 主网卡的IP段的CIDR格式,创建的节点IP将属于该IP段内。fixedIps或ipBlock同时只能指定一个。
:type ip_block: str
"""
self._subnet_id = None
self._fixed_ips = None
self._ip_block = None
self.discriminator = None
if subnet_id is not None:
self.subnet_id = subnet_id
if fixed_ips is not None:
self.fixed_ips = fixed_ips
if ip_block is not None:
self.ip_block = ip_block
@property
def subnet_id(self):
"""Gets the subnet_id of this NicSpec.
网卡所在子网的网络ID。主网卡创建时若未指定subnetId,将使用集群子网。拓展网卡创建时必须指定subnetId。
:return: The subnet_id of this NicSpec.
:rtype: str
"""
return self._subnet_id
@subnet_id.setter
def subnet_id(self, subnet_id):
"""Sets the subnet_id of this NicSpec.
网卡所在子网的网络ID。主网卡创建时若未指定subnetId,将使用集群子网。拓展网卡创建时必须指定subnetId。
:param subnet_id: The subnet_id of this NicSpec.
:type subnet_id: str
"""
self._subnet_id = subnet_id
@property
def fixed_ips(self):
"""Gets the fixed_ips of this NicSpec.
主网卡的IP将通过fixedIps指定,数量不得大于创建的节点数。fixedIps或ipBlock同时只能指定一个。
:return: The fixed_ips of this NicSpec.
:rtype: list[str]
"""
return self._fixed_ips
@fixed_ips.setter
def fixed_ips(self, fixed_ips):
"""Sets the fixed_ips of this NicSpec.
主网卡的IP将通过fixedIps指定,数量不得大于创建的节点数。fixedIps或ipBlock同时只能指定一个。
:param fixed_ips: The fixed_ips of this NicSpec.
:type fixed_ips: list[str]
"""
self._fixed_ips = fixed_ips
@property
def ip_block(self):
"""Gets the ip_block of this NicSpec.
主网卡的IP段的CIDR格式,创建的节点IP将属于该IP段内。fixedIps或ipBlock同时只能指定一个。
:return: The ip_block of this NicSpec.
:rtype: str
"""
return self._ip_block
@ip_block.setter
def ip_block(self, ip_block):
"""Sets the ip_block of this NicSpec.
主网卡的IP段的CIDR格式,创建的节点IP将属于该IP段内。fixedIps或ipBlock同时只能指定一个。
:param ip_block: The ip_block of this NicSpec.
:type ip_block: str
"""
self._ip_block = ip_block
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NicSpec):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
420fe9952d25789d462ba42a6cd0d82c5d3005e8 | 8a1686aeeefa80afeb0aa9f45ed72a75883458c4 | /dit/divergences/maximum_correlation.py | 80b8a60874ec9a78929efb6b7f35b7813c08a5da | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | heleibin/dit | 70afd57f31be346e48b9b28c67fd6e019132ac36 | ebd0c11600e559bf34cf12a6b4e451057838e324 | refs/heads/master | 2020-09-27T07:42:15.991500 | 2019-11-23T06:10:11 | 2019-11-23T06:10:11 | 226,466,522 | 1 | 0 | BSD-3-Clause | 2019-12-07T06:26:50 | 2019-12-07T06:26:50 | null | UTF-8 | Python | false | false | 3,451 | py | """
Compute the maximum correlation:
\rho(X:Y) = max_{f, g} E(f(X)g(Y))
"""
import numpy as np
from ..exceptions import ditException
from ..helpers import normalize_rvs
svdvals = lambda m: np.linalg.svd(m, compute_uv=False)
def conditional_maximum_correlation_pmf(pmf):
"""
Compute the conditional maximum correlation from a 3-dimensional
pmf. The maximum correlation is computed between the first two dimensions
given the third.
Parameters
----------
pmf : np.ndarray
The probability distribution.
Returns
-------
rho_max : float
The conditional maximum correlation.
"""
pXYgZ = pmf / pmf.sum(axis=(0,1), keepdims=True)
pXgZ = pXYgZ.sum(axis=1, keepdims=True)
pYgZ = pXYgZ.sum(axis=0, keepdims=True)
Q = np.where(pmf, pXYgZ / (np.sqrt(pXgZ)*np.sqrt(pYgZ)), 0)
Q[np.isnan(Q)] = 0
rho_max = max([svdvals(np.squeeze(m))[1] for m in np.dsplit(Q, Q.shape[2])])
return rho_max
def maximum_correlation_pmf(pXY):
"""
Compute the maximum correlation from a 2-dimensional
pmf. The maximum correlation is computed between the two dimensions.
Parameters
----------
pmf : np.ndarray
The probability distribution.
Returns
-------
rho_max : float
The maximum correlation.
"""
pX = pXY.sum(axis=1, keepdims=True)
pY = pXY.sum(axis=0, keepdims=True)
Q = pXY / (np.sqrt(pX)*np.sqrt(pY))
Q[np.isnan(Q)] = 0
rho_max = svdvals(Q)[1]
return rho_max
def maximum_correlation(dist, rvs=None, crvs=None, rv_mode=None):
"""
Compute the (conditional) maximum or Renyi correlation between two variables:
rho_max = max_{f, g} rho(f(X,Z), g(Y,Z) | Z)
Parameters
----------
dist : Distribution
The distribution for which the maximum correlation is to computed.
rvs : list, None; len(rvs) == 2
A list of lists. Each inner list specifies the indexes of the random
variables for which the maximum correlation is to be computed. If None,
then all random variables are used, which is equivalent to passing
`rvs=dist.rvs`.
crvs : list, None
A single list of indexes specifying the random variables to
condition on. If None, then no variables are conditioned on.
rv_mode : str, None
Specifies how to interpret `rvs` and `crvs`. Valid options are:
{'indices', 'names'}. If equal to 'indices', then the elements of
`crvs` and `rvs` are interpreted as random variable indices. If
equal to 'names', the the elements are interpreted as random
variable names. If `None`, then the value of `dist._rv_mode` is
consulted, which defaults to 'indices'.
Returns
-------
rho_max : float; -1 <= rho_max <= 1
The conditional maximum correlation between `rvs` given `crvs`.
"""
rvs, crvs, rv_mode = normalize_rvs(dist, rvs, crvs, rv_mode)
if len(rvs) != 2:
msg = 'Maximum correlation can only be computed for 2 variables, not {}.'.format(len(rvs))
raise ditException(msg)
if crvs:
dist = dist.copy().coalesce(rvs + [crvs])
else:
dist = dist.copy().coalesce(rvs)
dist.make_dense()
pmf = dist.pmf.reshape(list(map(len, dist.alphabet)))
if crvs:
rho_max = conditional_maximum_correlation_pmf(pmf)
else:
rho_max = maximum_correlation_pmf(pmf)
return rho_max
| [
"[email protected]"
] | |
867a8ce5135318155e8e9463699c47f23b4859b6 | d7016f69993570a1c55974582cda899ff70907ec | /sdk/recoveryservices/azure-mgmt-recoveryservicesbackup/generated_samples/get_resource_guard_proxy.py | a085c61f352a1766d44f4096824fb137a79e154c | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 1,712 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.recoveryservicesbackup import RecoveryServicesBackupClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-recoveryservicesbackup
# USAGE
python get_resource_guard_proxy.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = RecoveryServicesBackupClient(
credential=DefaultAzureCredential(),
subscription_id="0b352192-dcac-4cc7-992e-a96190ccc68c",
)
response = client.resource_guard_proxy.get(
vault_name="sampleVault",
resource_group_name="SampleResourceGroup",
resource_guard_proxy_name="swaggerExample",
)
print(response)
# x-ms-original-file: specification/recoveryservicesbackup/resource-manager/Microsoft.RecoveryServices/stable/2023-01-01/examples/ResourceGuardProxyCRUD/GetResourceGuardProxy.json
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
51ec22fa4a3789ebde8822eb40cf8fb7046df5fc | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02646/s812265777.py | bf9bfeae85ecdf39cf4a7df800b93778ecaceb82 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 717 | py | A, V = map(int, input().split(" "))
B, W = map(int, input().split(" "))
T = int(input())
if W > V:
print("NO")
else:
if abs(A - B) <= T * (V - W):
print("YES")
else:
print("NO")
# if A - B > 0:
# # 負の方向に逃げる
# if (B + 10**9) / W <= T:
# # print("left reach")
# # T立つよりも先に端に到達してしまう場合
# Bdist = (B + 10**9)
# else:
# # print("run left")
# Bdist = W * T
# else:
# # 正の方向に逃げる
# if (10**9 - B) / W <= T:
# # print("right reach")
# Bdist = (10**9 - B)
# else:
# # print("run right")
# Bdist = W * T
# Adist = V * T
# if Bdist + abs(A - B) <= Adist:
# print("YES")
# else:
# print("NO")
| [
"[email protected]"
] | |
86b3f2d69deb2248d91d4be3c74f7213daf6915f | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_84/33.py | 57b6d1e3842473c752fac1c09401d02031ba1c9c | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 621 | py | def solve():
r,c = map(int,raw_input().split())
a = [list(raw_input()) for _ in xrange(r)]
for i in xrange(r):
for j in xrange(c):
if a[i][j]!='#': continue
if i==r-1 or j==c-1: return None
if a[i+1][j]!='#': return None
if a[i+1][j+1]!='#': return None
if a[i][j+1]!='#': return None
a[i][j]=a[i+1][j+1]='/'
a[i+1][j]=a[i][j+1]='\\'
return a
t = input()
for tn in xrange(t):
r = solve()
print "Case #%d:"%(tn+1)
if r is None: print "Impossible"
else: print '\n'.join(''.join(x) for x in r)
| [
"[email protected]"
] | |
f2c2740bf34812d6361ee4b4a9a39f9f1a9d983b | 86318359cde2629f68113f7fe097757ef8e4923a | /mep/books/migrations/0023_merge_20200406_1206.py | 507c1ce7a10f2e1cdb1c549e0585c76c6e7fc95f | [
"LicenseRef-scancode-free-unknown",
"Apache-2.0"
] | permissive | Princeton-CDH/mep-django | 214f68a9c99b5025722c7301c976004edd277d9b | 6103855f07c2c0123ab21b93b794ea5d5ca39aa2 | refs/heads/main | 2023-08-03T10:15:02.287018 | 2022-09-06T21:16:01 | 2022-09-06T21:16:01 | 94,891,547 | 6 | 0 | Apache-2.0 | 2023-07-20T22:07:34 | 2017-06-20T13:00:00 | Python | UTF-8 | Python | false | false | 271 | py | # Generated by Django 2.2.11 on 2020-04-06 16:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('books', '0021_creator_type_order'),
('books', '0022_generate_work_slugs'),
]
operations = [
]
| [
"[email protected]"
] | |
97d1539bed50afcef0d7e0baa3186bd2c9887471 | 50d331aec35c1429e0d9b68822623ee9a45b251f | /IPTVPlayer/hosts/hostfilisertv.py | 14e90dce85575e0ca3901bcf74824d94a03c9321 | [] | no_license | openmb/iptvplayer | cd00c693adcac426214cc45d7ae5c97b9d7cbe91 | bbc3f5b6f445f83639cd1ebb5992dc737bc9023d | refs/heads/master | 2021-01-17T09:58:09.202306 | 2017-03-26T18:19:10 | 2017-03-26T18:19:10 | 83,997,131 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 19,625 | py | # -*- coding: utf-8 -*-
###################################################
# LOCAL import
###################################################
from Plugins.Extensions.IPTVPlayer.components.iptvplayerinit import TranslateTXT as _, SetIPTVPlayerLastHostError
from Plugins.Extensions.IPTVPlayer.components.ihost import CHostBase, CBaseHostClass, CDisplayListItem, RetHost, CUrlItem, ArticleContent
from Plugins.Extensions.IPTVPlayer.tools.iptvtools import printDBG, printExc, GetLogoDir, GetCookieDir, byteify
from Plugins.Extensions.IPTVPlayer.tools.iptvtypes import strwithmeta
###################################################
###################################################
# FOREIGN import
###################################################
import re
import urllib
import string
import base64
try: import json
except Exception: import simplejson as json
from random import randint
from datetime import datetime
from time import sleep
from copy import deepcopy
from Components.config import config, ConfigSelection, ConfigYesNo, ConfigText, getConfigListEntry
###################################################
###################################################
# E2 GUI COMMPONENTS
###################################################
from Plugins.Extensions.IPTVPlayer.components.asynccall import MainSessionWrapper
from Screens.MessageBox import MessageBox
###################################################
###################################################
# Config options for HOST
###################################################
def GetConfigList():
optionList = []
return optionList
###################################################
def gettytul():
return 'http://filiser.tv/'
class FiliserTv(CBaseHostClass):
def __init__(self):
CBaseHostClass.__init__(self, {'history':'FiliserTv.tv', 'cookie':'filisertv.cookie'})
self.HEADER = {'User-Agent': 'Mozilla/5.0', 'Accept': 'text/html'}
self.AJAX_HEADER = dict(self.HEADER)
self.AJAX_HEADER.update( {'X-Requested-With': 'XMLHttpRequest'} )
self.defaultParams = {'header':self.HEADER, 'use_cookie': True, 'load_cookie': True, 'save_cookie': True, 'cookiefile': self.COOKIE_FILE}
self.MAIN_URL = 'http://filiser.tv/'
self.DEFAULT_ICON_URL = self.MAIN_URL + 'assets/img/logo.png'
self.MAIN_CAT_TAB = [{'category':'list_items', 'title': _('Movies'), 'url':self.getFullUrl('filmy') },
{'category':'list_items', 'title': _('Series'), 'url':self.getFullUrl('seriale') },
{'category':'search', 'title': _('Search'), 'search_item':True, },
{'category':'search_history', 'title': _('Search history'), }
]
self.cacheFilters = {}
self.cacheLinks = {}
self.cacheSeasons = {}
FiliserTv.SALT_CACHE = {}
self.WaitALittleBit = None
def getStr(self, item, key):
if key not in item: return ''
if item[key] == None: return ''
return str(item[key])
def fillFilters(self, cItem):
self.cacheFilters = {}
sts, data = self.cm.getPage(cItem['url'])
if not sts: return
def addFilter(data, key, addAny, titleBase, marker):
self.cacheFilters[key] = []
for item in data:
value = self.cm.ph.getSearchGroups(item, '''%s=['"]([^'^"]+?)['"]''' % marker)[0]
if value == '': continue
title = self.cleanHtmlStr(item)
if titleBase == '':
title = title.title()
self.cacheFilters[key].append({'title':titleBase + title, key:value})
if addAny and len(self.cacheFilters[key]):
self.cacheFilters[key].insert(0, {'title':'Wszystkie'})
# language
tmpData = self.cm.ph.getAllItemsBeetwenMarkers(data, '<div class="vBox"', '</div>', withMarkers=True)
addFilter(tmpData, 'language', True, '', 'data-type')
# genres
tmpData = self.cm.ph.getAllItemsBeetwenMarkers(data, '<li data-gen', '</li>', withMarkers=True)
addFilter(tmpData, 'genres', True, '', 'data-gen')
# year
self.cacheFilters['year'] = [{'title':_('Year: ') + _('Any')}]
year = datetime.now().year
while year >= 1978:
self.cacheFilters['year'].append({'title': _('Year: ') + str(year), 'year': year})
year -= 1
# sort
self.cacheFilters['sort_by'] = []
for item in [('date', 'data dodania/aktualizacji'), ('views', ' liczba wyświetleń'), ('rate', ' ocena')]:
self.cacheFilters['sort_by'].append({'title': _('Sort by: ') + str(item[1]), 'sort_by': item[0]})
# add order to sort_by filter
orderLen = len(self.cacheFilters['sort_by'])
for idx in range(orderLen):
item = deepcopy(self.cacheFilters['sort_by'][idx])
# desc
self.cacheFilters['sort_by'][idx].update({'title':'\xe2\x86\x93 ' + self.cacheFilters['sort_by'][idx]['title'], 'order':'desc'})
# asc
item.update({'title': '\xe2\x86\x91 ' + item['title'], 'order':'asc'})
self.cacheFilters['sort_by'].append(item)
def listFilter(self, cItem, filters):
params = dict(cItem)
idx = params.get('f_idx', 0)
params['f_idx'] = idx + 1
if idx == 0:
self.fillFilters(cItem)
tab = self.cacheFilters.get(filters[idx], [])
self.listsTab(tab, params)
def listItems(self, cItem, nextCategory):
printDBG("FiliserTv.listItems")
baseUrl = cItem['url']
if '?' not in baseUrl:
baseUrl += '?'
else:
baseUrl += '&'
page = cItem.get('page', 1)
if page > 1:
baseUrl += 'page={0}&'.format(page)
if cItem.get('genres', '') not in ['-', '']:
baseUrl += 'kat={0}&'.format(cItem['genres'])
if cItem.get('language', '') not in ['-', '']:
baseUrl += 'ver={0}&'.format(cItem['language'])
if cItem.get('year', '0') not in ['0', '-', '']:
baseUrl += 'start_year={0}&end_year={1}&'.format(cItem['year'], cItem['year'])
if cItem.get('sort_by', '0') not in ['0', '-', '']:
baseUrl += 'sort_by={0}&'.format(cItem['sort_by'])
if cItem.get('order', '0') not in ['0', '-', '']:
baseUrl += 'type={0}&'.format(cItem['order'])
sts, data = self.cm.getPage(self.getFullUrl(baseUrl), self.defaultParams)
if not sts: return
if '>Następna<' in data:
nextPage = True
else: nextPage = False
data = self.cm.ph.getAllItemsBeetwenMarkers(data, '<section class="item"', '</section>', withMarkers=True)
for item in data:
url = self.getFullUrl(self.cm.ph.getSearchGroups(item, '''href=['"]([^'^"]+?)['"]''')[0])
icon = self.getFullIconUrl(self.cm.ph.getSearchGroups(item, '''src=['"]([^'^"]+?)['"]''')[0].strip())
title = self.cleanHtmlStr(self.cm.ph.getSearchGroups(item, '''alt=['"]([^'^"]+?)['"]''')[0])
if title == '': title = self.cleanHtmlStr(self.cm.ph.getSearchGroups(item, '''title=['"]([^'^"]+?)['"]''')[0])
title1 = self.cleanHtmlStr(self.cm.ph.getDataBeetwenMarkers(item, '<h3', '</h3>')[1])
title2 = self.cleanHtmlStr(self.cm.ph.getDataBeetwenMarkers(item, '<h4', '</h4>')[1])
desc = self.cleanHtmlStr(item.split('<div class="block2">')[-1].replace('<p class="desc">', '[/br]'))
params = {'good_for_fav': True, 'title':title, 'url':url, 'icon':icon, 'desc':desc}
if '/film/' in url:
self.addVideo(params)
elif '/serial/' in url:
params['category'] = nextCategory
self.addDir(params)
if nextPage:
params = dict(cItem)
params.update({'title':_('Next page'), 'page':page + 1})
self.addDir(params)
def listSeasons(self, cItem, nextCategory):
printDBG("FiliserTv.listSeasons")
self.cacheSeasons = {'keys':[], 'dict':{}}
sts, data = self.cm.getPage(cItem['url'], self.defaultParams)
if not sts: return
data = data.split('<div id="episodes">')
if 2 != len(data): return
tmp = self.cm.ph.getDataBeetwenMarkers(data[0], '<div id="seasons_list">', '<div class="clear">')[1]
tmp = re.compile('<[^>]+?num\="([0-9]+?)"[^>]*?>([^<]+?)<').findall(tmp)
for item in tmp:
self.cacheSeasons['keys'].append({'key':item[0], 'title':self.cleanHtmlStr(item[1])})
del data[0]
# fill episodes
for season in self.cacheSeasons['keys']:
tmp = self.cm.ph.getDataBeetwenMarkers(data[0], 'data-season-num="%s"' % season['key'], '</ul>')[1]
tmp = self.cm.ph.getAllItemsBeetwenMarkers(tmp, '<li', '</li>', withMarkers=True)
self.cacheSeasons['dict'][season['key']] = []
for item in tmp:
url = self.getFullUrl(self.cm.ph.getSearchGroups(item, '''href=['"]([^'^"]+?)['"]''')[0])
title = self.cleanHtmlStr(self.cm.ph.getDataBeetwenMarkers(item, '<a class="episodeName"', '</a>')[1])
es = self.cm.ph.getSearchGroups(url, '''/(s[0-9]+?e[0-9]+?)/''')[0]
self.cacheSeasons['dict'][season['key']].append({'good_for_fav': True, 'title': '%s: %s %s' % (cItem['title'], es, title), 'url':url})
for season in self.cacheSeasons['keys']:
params = dict(cItem)
params.update({'good_for_fav': False, 'category':nextCategory, 'title':season['title'], 's_key':season['key']})
self.addDir(params)
def listEpisodes(self, cItem):
printDBG("FiliserTv.listEpisodes")
tab = self.cacheSeasons.get('dict', {}).get(cItem['s_key'], [])
for item in tab:
params = dict(cItem)
params.update(item)
self.addVideo(params)
def listSearchResult(self, cItem, searchPattern, searchType):
printDBG("FiliserTv.listSearchResult cItem[%s], searchPattern[%s] searchType[%s]" % (cItem, searchPattern, searchType))
baseUrl = self.getFullUrl('szukaj?q=' + urllib.quote_plus(searchPattern))
sts, data = self.cm.getPage(baseUrl)
if not sts: return
data = self.cm.ph.getDataBeetwenMarkers(data, '<ul id="resultList2">', '</ul>', withMarkers=False)[1]
data = self.cm.ph.getAllItemsBeetwenMarkers(data, '<li', '</li>', withMarkers=True)
for item in data:
tmp = item.split('<div class="info">')
url = self.getFullUrl(self.cm.ph.getSearchGroups(item, '''href=['"]([^'^"]+?)['"]''')[0])
title = self.cleanHtmlStr(tmp[0].replace('<div class="title_org">', '/'))
icon = self.getFullIconUrl(self.cm.ph.getSearchGroups(item, '''src=['"]([^'^"]+?)['"]''')[0].strip())
desc = self.cleanHtmlStr(tmp[-1])
params = {'good_for_fav': True, 'title':title, 'url':url, 'icon':icon, 'desc':desc}
if '/film/' in url:
self.addVideo(params)
elif '/serial/' in url:
params['category'] = 'list_seasons'
self.addDir(params)
def getLinksForVideo(self, cItem):
printDBG("FiliserTv.getLinksForVideo [%s]" % cItem)
urlTab = []
if len(self.cacheLinks.get(cItem['url'], [])):
return self.cacheLinks[cItem['url']]
sts, data = self.cm.getPage(cItem['url'], self.defaultParams)
if not sts: return []
errorMessage = self.cleanHtmlStr(self.cm.ph.getDataBeetwenMarkers(data, '<h2 class="title_block">', '</section>')[1])
if '' != errorMessage: SetIPTVPlayerLastHostError(errorMessage)
data = data.split('<div id="links">')
if 2 != len(data): return []
tabs = []
tmp = self.cm.ph.getDataBeetwenMarkers(data[0], '<div id="video_links">', '<div class="clear">')[1]
tmp = re.compile('<[^>]+?data-type\="([^"]+?)"[^>]*?>([^<]+?)<').findall(tmp)
for item in tmp:
tabs.append({'key':item[0], 'title':self.cleanHtmlStr(item[1])})
del data[0]
for tab in tabs:
tmp = self.cm.ph.getDataBeetwenMarkers(data[0], 'data-type="%s"' % tab['key'], '</ul>')[1]
tmp = self.cm.ph.getAllItemsBeetwenMarkers(tmp, '<li', '</li>', withMarkers=True)
for item in tmp:
url = self.cm.ph.getSearchGroups(item, '''data-ref=['"]([^'^"]+?)['"]''')[0]
title = self.cleanHtmlStr(item.split('<div class="rightSide">')[0])
urlTab.append({'name': '%s: %s' % (tab['title'], title), 'url':url, 'need_resolve':1})
self.cacheLinks[cItem['url']] = urlTab
return urlTab
def getHeaders(self, tries):
header = dict(self.HEADER)
if tries == 1:
return header
if self.WaitALittleBit == None:
try:
tmp = 'ZGVmIHphcmF6YShpbl9hYmMpOg0KICAgIGRlZiByaGV4KGEpOg0KICAgICAgICBoZXhfY2hyID0gJzAxMjM0NTY3ODlhYmNkZWYnDQogICAgICABiID0gZmYoYiwgYywgZCwgYSwgdGFiQlszXSwgMjIsIC0xMDQ0NTI1MzMwKTsN\rZGVmIFdhaXRBTGl0dGxlQml0KHRyaWVzKToNCiAgICBmaXJzdEJ5dGUgPSBbODUsMTA5LDg5LDkxLDQ2LDE3OCwyMTcsMjEzXQ0KICAgIGlwID0gJyVzLiVzLiVzLiVzJyAlIChmaXJzdEJ5dGVbcmFuZGludCgwLCBsZW4oZmlyc3RCeXRlKSldLCByYW5kaW50KDAsIDI0NiksICByYW5kaW50KDAsIDI0NiksICByYW5kaW50KDAsIDI0NikpDQogICAgcmV0dXJuIHsnVXNlci1BZ2VudCc6J01vemlsbGEvNS4wJywnQWNjZXB0JzondGV4dC9odG1sJywnWC1Gb3J3YXJkZWQtRm9yJzppcH0NCg0K'
tmp = base64.b64decode(tmp.split('\r')[-1]).replace('\r', '')
WaitALittleBit = compile(tmp, '', 'exec')
vGlobals = {"__builtins__": None, 'len': len, 'list': list, 'dict':dict, 'randint':randint}
vLocals = { 'WaitALittleBit': '' }
exec WaitALittleBit in vGlobals, vLocals
self.WaitALittleBit = vLocals['WaitALittleBit']
except Exception:
printExc()
try:
header.update(self.WaitALittleBit(tries))
except Exception:
printExc()
return header
def getVideoLinks(self, videoUrl):
printDBG("FiliserTv.getVideoLinks [%s]" % videoUrl)
urlTab = []
# mark requested link as used one
if len(self.cacheLinks.keys()):
key = self.cacheLinks.keys()[0]
for idx in range(len(self.cacheLinks[key])):
if videoUrl in self.cacheLinks[key][idx]['url']:
if not self.cacheLinks[key][idx]['name'].startswith('*'):
self.cacheLinks[key][idx]['name'] = '*' + self.cacheLinks[key][idx]['name']
break
reCaptcha = False
if not self.cm.isValidUrl(videoUrl):
salt = videoUrl
if salt not in FiliserTv.SALT_CACHE:
httpParams = dict(self.defaultParams)
tries = 0
while tries < 6:
tries += 1
url = 'http://filiser.tv/embed?salt=' + videoUrl
httpParams['header'] = self.getHeaders(tries)
sts, data = self.cm.getPage(url, httpParams)
if not sts: return urlTab
if '/captchaResponse' in data:
reCaptcha = True
sleep(1)
continue
reCaptcha = False
videoUrl = self.cm.ph.getSearchGroups(data, '''var\s*url\s*=\s*['"](http[^'^"]+?)['"]''')[0]
videoUrl = videoUrl.replace('#WIDTH', '800').replace('#HEIGHT', '600')
if self.cm.isValidUrl(videoUrl):
FiliserTv.SALT_CACHE[salt] = base64.b64encode(videoUrl)
break
else:
videoUrl = base64.b64decode(FiliserTv.SALT_CACHE[salt])
if self.cm.isValidUrl(videoUrl):
urlTab = self.up.getVideoLinkExt(videoUrl)
if reCaptcha:
self.sessionEx.open(MessageBox, 'Otwórz stronę http://filiser.tv/ w przeglądarce i odtwórz dowolny film potwierdzając, że jesteś człowiekiem.', type = MessageBox.TYPE_ERROR, timeout = 10 )
return urlTab
def getFavouriteData(self, cItem):
printDBG('FiliserTv.getFavouriteData')
return json.dumps(cItem)
def getLinksForFavourite(self, fav_data):
printDBG('FiliserTv.getLinksForFavourite')
links = []
try:
cItem = byteify(json.loads(fav_data))
links = self.getLinksForVideo(cItem)
except Exception: printExc()
return links
def setInitListFromFavouriteItem(self, fav_data):
printDBG('FiliserTv.setInitListFromFavouriteItem')
try:
params = byteify(json.loads(fav_data))
except Exception:
params = {}
printExc()
self.addDir(params)
return True
def handleService(self, index, refresh = 0, searchPattern = '', searchType = ''):
printDBG('handleService start')
CBaseHostClass.handleService(self, index, refresh, searchPattern, searchType)
name = self.currItem.get("name", '')
category = self.currItem.get("category", '')
mode = self.currItem.get("mode", '')
printDBG( "handleService: |||||||||||||||||||||||||||||||||||| name[%s], category[%s] " % (name, category) )
self.currList = []
#MAIN MENU
if name == None:
self.listsTab(self.MAIN_CAT_TAB, {'name':'category'})
elif 'list_items' == category:
filtersTab = ['language', 'genres', 'year', 'sort_by']
idx = self.currItem.get('f_idx', 0)
if idx < len(filtersTab):
self.listFilter(self.currItem, filtersTab)
else:
self.listItems(self.currItem, 'list_seasons')
elif category == 'list_seasons':
self.listSeasons(self.currItem, 'list_episodes')
elif category == 'list_episodes':
self.listEpisodes(self.currItem)
#SEARCH
elif category in ["search", "search_next_page"]:
cItem = dict(self.currItem)
cItem.update({'search_item':False, 'name':'category'})
self.listSearchResult(cItem, searchPattern, searchType)
#HISTORIA SEARCH
elif category == "search_history":
self.listsHistory({'name':'history', 'category': 'search'}, 'desc', _("Type: "))
else:
printExc()
CBaseHostClass.endHandleService(self, index, refresh)
class IPTVHost(CHostBase):
def __init__(self):
CHostBase.__init__(self, FiliserTv(), True, [])
| [
"[email protected]"
] | |
b18beae3273c85da402f38b9ec0bc6ec5fee5418 | c53b3e120c59557daaa2fa5b7626413105eb5965 | /tendenci/apps/corporate_memberships/templatetags/corporate_memberships_tags.py | 541c5e793ea1ad4b6f702a52f3951f2a8cd441e2 | [] | no_license | chendong0444/ams | 8483334d9b687708d533190b62c1fa4fd4690f2c | f2ac4ecc076b223c262f2cde4fa3b35b4a5cd54e | refs/heads/master | 2021-05-01T03:59:18.682836 | 2018-07-23T06:33:41 | 2018-07-23T06:33:41 | 121,194,728 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,075 | py | import random
from django.contrib.auth.models import AnonymousUser, User
from django.db import models
from django.template import Node, Library, TemplateSyntaxError, Variable
from django.utils.translation import ugettext_lazy as _
from tendenci.apps.corporate_memberships.models import CorpMembership
from tendenci.apps.base.template_tags import ListNode, parse_tag_kwargs
from tendenci.apps.site_settings.utils import get_setting
from tendenci.apps.base.utils import tcurrency
register = Library()
@register.inclusion_tag(
"memberships/applications/render_membership_field.html")
def render_corpmembership_field(request, field_obj,
corpprofile_form,
corpmembership_form):
if field_obj.field_type == "section_break":
field = None
else:
field_name = field_obj.field_name
if field_name in corpprofile_form.field_names \
and not field_obj.display_only:
field = corpprofile_form[field_name]
elif field_name in corpmembership_form.field_names \
and not field_obj.display_only:
field = corpmembership_form[field_name]
else:
field = None
return {'request': request, 'field_obj': field_obj,
'field': field, 'field_pwd': None}
@register.assignment_tag
def individual_pricing_desp(corp_membership):
"""
Return the description of pricing for the individual memberships
joining under this corp_membership.
"""
if corp_membership:
corporate_type = corp_membership.corporate_membership_type
membership_type = corporate_type.membership_type
admin_fee = membership_type.admin_fee
if not admin_fee:
admin_fee = 0
if not (membership_type.price + admin_fee):
membership_price = 'free'
else:
membership_price = tcurrency(membership_type.price)
if membership_type.admin_fee:
membership_price = '%s + %s' % (
membership_price,
tcurrency(membership_type.admin_fee))
return membership_price
return ''
@register.inclusion_tag("corporate_memberships/nav.html", takes_context=True)
def corpmemb_nav(context, user, corp_memb=None):
context.update({
'nav_object': corp_memb,
"user": user
})
return context
@register.inclusion_tag("corporate_memberships/top_nav_items.html", takes_context=True)
def corpmemb_current_app(context, user, corp_memb=None):
context.update({
'app_object': corp_memb,
"user": user
})
return context
@register.inclusion_tag("corporate_memberships/options.html", takes_context=True)
def corpmemb_options(context, user, corp_memb):
context.update({
"opt_object": corp_memb,
"user": user
})
return context
@register.inclusion_tag("corporate_memberships/applications/search_form.html", takes_context=True)
def corpmembership_search(context):
return context
class AllowViewCorpNode(Node):
def __init__(self, corp_memb, user, context_var):
self.corp_memb = corp_memb
self.user = user
self.var_name = context_var
def resolve(self, var, context):
return Variable(var).resolve(context)
def render(self, context):
corp_memb = self.resolve(self.corp_memb, context)
user = self.resolve(self.user, context)
boo = corp_memb.allow_view_by(user)
if self.var_name:
context[self.var_name] = boo
return ""
else:
return boo
@register.tag
def allow_view_corp(parser, token):
"""
{% allow_view_corp corp_memb user as allow_view %}
"""
bits = token.split_contents()
try: corp_memb = bits[1]
except: corp_memb = None
try: user = bits[2]
except: user = None
if len(bits) >= 5:
context_var = bits[4]
else:
context_var = None
return AllowViewCorpNode(corp_memb, user, context_var=context_var)
class AllowEditCorpNode(Node):
def __init__(self, corp_memb, user, context_var):
self.corp_memb = corp_memb
self.user = user
self.var_name = context_var
def resolve(self, var, context):
return Variable(var).resolve(context)
def render(self, context):
corp_memb = self.resolve(self.corp_memb, context)
user = self.resolve(self.user, context)
boo = corp_memb.allow_edit_by(user)
if self.var_name:
context[self.var_name] = boo
return ""
else:
return boo
@register.tag
def allow_edit_corp(parser, token):
"""
{% allow_edit_corp corp_memb user as allow_edit %}
"""
bits = token.split_contents()
try: corp_memb = bits[1]
except: corp_memb = None
try: user = bits[2]
except: user = None
if len(bits) >= 5:
context_var = bits[4]
else:
context_var = None
return AllowEditCorpNode(corp_memb, user, context_var=context_var)
class ListCorpMembershipNode(ListNode):
model = CorpMembership
def __init__(self, context_var, *args, **kwargs):
self.context_var = context_var
self.kwargs = kwargs
if not self.model:
raise AttributeError(_('Model attribute must be set'))
if not issubclass(self.model, models.Model):
raise AttributeError(_('Model attribute must derive from Model'))
if not hasattr(self.model.objects, 'search'):
raise AttributeError(_('Model.objects does not have a search method'))
def render(self, context):
tags = u''
query = u''
user = AnonymousUser()
limit = 3
order = '-join_dt'
randomize = False
allow_anonymous_search = get_setting('module',
'corporate_memberships',
'anonymoussearchcorporatemembers')
allow_member_search = get_setting('module',
'corporate_memberships',
'membersearchcorporatemembers')
allow_member_search = allow_member_search or allow_anonymous_search
if 'random' in self.kwargs:
randomize = bool(self.kwargs['random'])
if 'user' in self.kwargs:
try:
user = Variable(self.kwargs['user'])
user = user.resolve(context)
except:
user = self.kwargs['user']
if user == "anon" or user == "anonymous":
user = AnonymousUser()
else:
# check the context for an already existing user
# and see if it is really a user object
if 'user' in context:
if isinstance(context['user'], User):
user = context['user']
if 'limit' in self.kwargs:
try:
limit = Variable(self.kwargs['limit'])
limit = limit.resolve(context)
except:
limit = self.kwargs['limit']
limit = int(limit)
if 'query' in self.kwargs:
try:
query = Variable(self.kwargs['query'])
query = query.resolve(context)
except:
query = self.kwargs['query'] # context string
if 'order' in self.kwargs:
try:
order = Variable(self.kwargs['order'])
order = order.resolve(context)
except:
order = self.kwargs['order']
items = CorpMembership.objects.all()
if user.is_authenticated():
if not user.profile.is_superuser:
if user.profile.is_member and allow_member_search:
items = items.distinct()
else:
items = items.none()
else:
if not allow_anonymous_search:
items = items.none()
objects = []
# if order is not specified it sorts by relevance
if order:
items = items.order_by(order)
if randomize:
objects = [item for item in random.sample(items, items.count())][:limit]
else:
objects = [item for item in items[:limit]]
context[self.context_var] = objects
return ""
@register.tag
def list_corporate_memberships(parser, token):
"""
Used to pull a list of :model:`corporate_memberships.CorpMembership` items.
Usage::
{% list_corporate_memberships as [varname] [options] %}
Be sure the [varname] has a specific name like ``corpmembership_sidebar`` or
``corpmembership_list``. Options can be used as [option]=[value]. Wrap text values
in quotes like ``query="cool"``. Options include:
``limit``
The number of items that are shown. **Default: 3**
``order``
The order of the items. **Default: Newest Approved**
``user``
Specify a user to only show public items to all. **Default: Viewing user**
``query``
The text to search for items. Will not affect order.
``random``
Use this with a value of true to randomize the items included.
Example::
{% list_corporate_memberships as corpmembership_list limit=5 %}
{% for corpmembership in corpmembership_list %}
{{ corpmembership.corp_profile.name }}
{% endfor %}
"""
args, kwargs = [], {}
bits = token.split_contents()
context_var = bits[2]
if len(bits) < 3:
message = "'%s' tag requires at least 2 parameters" % bits[0]
raise TemplateSyntaxError(_(message))
if bits[1] != "as":
message = "'%s' second argument must be 'as'" % bits[0]
raise TemplateSyntaxError(_(message))
kwargs = parse_tag_kwargs(bits)
if 'order' not in kwargs:
kwargs['order'] = '-join_dt'
return ListCorpMembershipNode(context_var, *args, **kwargs)
| [
"[email protected]"
] | |
d893501915448c66c6bda1af06cc5994eb4911be | 3428950daafacec9539a83809cf9752000508f63 | /data_struct/10_combination.py | 136a9f2a426e90a1e6863a66530fc3a6d61f865a | [] | no_license | HyunAm0225/Python_Algorithm | 759b91743abf2605dfd996ecf7791267b0b5979a | 99fb79001d4ee584a9c2d70f45644e9101317764 | refs/heads/master | 2023-05-24T05:29:12.838390 | 2021-06-15T16:36:33 | 2021-06-15T16:36:33 | 274,587,523 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 304 | py | def combination(s):
if len(s) < 2:
return s
res = []
for i, c in enumerate(s):
res.append(c) # 추가된 부분
for j in combination(s[:i]+s[i+1:]):
res.append(c+j)
return res
if __name__ == "__main__":
data = "abc"
print(combination(data))
| [
"[email protected]"
] | |
9c99e9c602c04babded94612e54aa4147608301e | 265403cd620f1729176343626db909ccecc67243 | /runserver.py | c80b199e83158ccb7fb8c091ddaef40bbfda8d39 | [
"MIT"
] | permissive | djpnewton/beerme | a5486efe8f3a632ff479f75fde825264f8c45d42 | b06564f897c5e63a6af6229aa5234ef5de4820a7 | refs/heads/master | 2020-04-05T23:21:15.375723 | 2015-07-01T07:37:02 | 2015-07-01T07:37:02 | 37,122,757 | 1 | 1 | null | 2015-07-05T12:26:55 | 2015-06-09T09:25:34 | Python | UTF-8 | Python | false | false | 2,544 | py | #!/usr/bin/python
from beerme import app
import os
host = os.getenv('HOST', '127.0.0.1')
port = int(os.getenv('PORT', 5000))
# get filenames before daemonizing
app_log_filename = os.path.realpath('log/beerme.log')
access_log_filename = os.path.realpath('log/access.log')
def log_app():
import cherrypy
from paste.translogger import TransLogger
import logging
from logging.handlers import RotatingFileHandler
# Enable app/error logging
handler = RotatingFileHandler(app_log_filename, maxBytes=10000000, backupCount=5)
handler.setLevel(logging.DEBUG)
app.logger.addHandler(handler)
cherrypy.log.error_file = ''
cherrypy.log.error_log.addHandler(handler)
# Enable WSGI access logging access via Paste
app_logged = TransLogger(app)
handler = RotatingFileHandler(access_log_filename, maxBytes=10000000, backupCount=5)
logger = logging.getLogger('wsgi')
logger.addHandler(handler)
cherrypy.log.access_file = ''
return app_logged
def start_cherrypy():
import cherrypy
# create cherrypy logged app
app_logged = log_app()
# Mount the WSGI callable object (app) on the root directory
cherrypy.tree.graft(app_logged, '/')
# Set the configuration of the web server
cherrypy.config.update({
'engine.autoreload.on': True,
'log.screen': True,
'server.socket_host': host,
'server.socket_port': port,
# Do not just listen on localhost
'server.socket_host': '0.0.0.0'
})
# Start the CherryPy WSGI web server
cherrypy.engine.start()
cherrypy.engine.block()
def start_debug():
app.debug = True
app.run(host, port)
if __name__ == '__main__':
import sys
if len(sys.argv) == 2 and 'debug' == sys.argv[1]:
start_debug()
else:
from beerme.daemon import Daemon
class BitriskDaemon(Daemon):
def run(self):
start_cherrypy()
daemon = BitriskDaemon('/tmp/beerme-daemon.pid')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
elif 'foreground' == sys.argv[1]:
daemon.run()
else:
print "Unknown command"
sys.exit(2)
sys.exit(0)
else:
print "usage: %s start|stop|restart|foreground|debug" % sys.argv[0]
sys.exit(2)
| [
"[email protected]"
] | |
1a7a6d6691359a38be4991eefc4ff9dd19b73511 | 4edb067c8c748e503e154bb2b9190843f6f1684a | /tests/test_utils/test_decorators.py | 99e6a477f5b2abc16262433e3225e0fbb78a035e | [
"Apache-2.0"
] | permissive | DistrictDataLabs/yellowbrick-docs-zh | 5ecbdccfaff4a6822d60250719b37af9b8d37f61 | 3118e67f2bed561a00885e6edb2cabb3520ad66b | refs/heads/master | 2021-04-09T11:00:29.709555 | 2019-04-06T15:23:55 | 2019-04-06T15:23:55 | 125,447,764 | 22 | 5 | Apache-2.0 | 2019-04-06T14:52:40 | 2018-03-16T01:37:09 | Python | UTF-8 | Python | false | false | 2,788 | py | # tests.test_utils.test_decorators
# Tests for the decorators module in Yellowbrick utils.
#
# Author: Benjamin Bengfort <[email protected]>
# Created: Thu May 18 15:14:34 2017 -0400
#
# Copyright (C) 2017 District Data Labs
# For license information, see LICENSE.txt
#
# ID: test_decorators.py [79cd8cf] [email protected] $
"""
Tests for the decorators module in Yellowbrick utils.
"""
##########################################################################
## Imports
##########################################################################
import unittest
from yellowbrick.utils.decorators import *
##########################################################################
## Decorator Tests
##########################################################################
class DecoratorTests(unittest.TestCase):
"""
Tests for the decorator utilities.
"""
def test_memoization(self):
"""
Test the memoized property decorator on a class.
"""
class Visualizer(object):
@memoized
def foo(self):
return "bar"
viz = Visualizer()
self.assertFalse(hasattr(viz, "_foo"))
self.assertEqual(viz.foo, "bar")
self.assertEqual(viz._foo, "bar")
def test_docutil(self):
"""
Test the docutil docstring copying methodology.
"""
class Visualizer(object):
def __init__(self):
"""
This is the correct docstring.
"""
pass
def undecorated(*args, **kwargs):
"""
This is an undecorated function string.
"""
pass
# Test the undecorated string to protect from magic
self.assertEqual(
undecorated.__doc__.strip(), "This is an undecorated function string."
)
# Decorate manually and test the newly decorated return function.
decorated = docutil(Visualizer.__init__)(undecorated)
self.assertEqual(
decorated.__doc__.strip(), "This is the correct docstring."
)
# Assert that decoration modifies the original function.
self.assertEqual(
undecorated.__doc__.strip(), "This is the correct docstring."
)
@docutil(Visualizer.__init__)
def sugar(*args, **kwargs):
pass
# Assert that syntactic sugar works as expected.
self.assertEqual(
sugar.__doc__.strip(), "This is the correct docstring."
)
##########################################################################
## Execute Tests
##########################################################################
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
397243dd9250d0356dc9e27a00dfd091286e41c7 | 7233716fbf9fff94240d14770b3fc3f3ada10d9b | /devel/lib/python2.7/dist-packages/gazebo_logical_camera/msg/_LogicalCameraImage.py | 2d13505c04971b0a5fc8394f5ba3c1a9f34572c3 | [] | no_license | shashankseth01/E-yantra | 58d42dce90667ca37f31f2cf111ee98c39468617 | 23432e058fce7733bd1a8399fd6edc20967fa6a3 | refs/heads/main | 2023-02-04T00:36:57.230996 | 2020-12-21T09:55:23 | 2020-12-21T09:55:23 | 316,716,460 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 137 | py | /home/shashank/ws_task3/devel/.private/gazebo_logical_camera/lib/python2.7/dist-packages/gazebo_logical_camera/msg/_LogicalCameraImage.py | [
"[email protected]"
] | |
1808985b4ac9d43d43c5b8cc0ebbc2b85b51a3ec | 51f887286aa3bd2c3dbe4c616ad306ce08976441 | /pybind/nos/v7_2_0/interface/hundredgigabitethernet/storm_control/__init__.py | 129d9c09e4a7d15c7a89324f305901c6ecfdcf8a | [
"Apache-2.0"
] | permissive | b2220333/pybind | a8c06460fd66a97a78c243bf144488eb88d7732a | 44c467e71b2b425be63867aba6e6fa28b2cfe7fb | refs/heads/master | 2020-03-18T09:09:29.574226 | 2018-04-03T20:09:50 | 2018-04-03T20:09:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,095 | py |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import ingress
class storm_control(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-interface - based on the path /interface/hundredgigabitethernet/storm-control. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__ingress',)
_yang_name = 'storm-control'
_rest_name = 'storm-control'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__ingress = YANGDynClass(base=YANGListType("protocol_type",ingress.ingress, yang_name="ingress", rest_name="ingress", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='protocol-type', extensions={u'tailf-common': {u'info': u'Ingress Direction', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'cli-full-no': None}}), is_container='list', yang_name="ingress", rest_name="ingress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Ingress Direction', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bum-storm-control', defining_module='brocade-bum-storm-control', yang_type='list', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'interface', u'hundredgigabitethernet', u'storm-control']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'interface', u'HundredGigabitEthernet', u'storm-control']
def _get_ingress(self):
"""
Getter method for ingress, mapped from YANG variable /interface/hundredgigabitethernet/storm_control/ingress (list)
"""
return self.__ingress
def _set_ingress(self, v, load=False):
"""
Setter method for ingress, mapped from YANG variable /interface/hundredgigabitethernet/storm_control/ingress (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_ingress is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ingress() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("protocol_type",ingress.ingress, yang_name="ingress", rest_name="ingress", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='protocol-type', extensions={u'tailf-common': {u'info': u'Ingress Direction', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'cli-full-no': None}}), is_container='list', yang_name="ingress", rest_name="ingress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Ingress Direction', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bum-storm-control', defining_module='brocade-bum-storm-control', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ingress must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("protocol_type",ingress.ingress, yang_name="ingress", rest_name="ingress", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='protocol-type', extensions={u'tailf-common': {u'info': u'Ingress Direction', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'cli-full-no': None}}), is_container='list', yang_name="ingress", rest_name="ingress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Ingress Direction', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bum-storm-control', defining_module='brocade-bum-storm-control', yang_type='list', is_config=True)""",
})
self.__ingress = t
if hasattr(self, '_set'):
self._set()
def _unset_ingress(self):
self.__ingress = YANGDynClass(base=YANGListType("protocol_type",ingress.ingress, yang_name="ingress", rest_name="ingress", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='protocol-type', extensions={u'tailf-common': {u'info': u'Ingress Direction', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'cli-full-no': None}}), is_container='list', yang_name="ingress", rest_name="ingress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Ingress Direction', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bum-storm-control', defining_module='brocade-bum-storm-control', yang_type='list', is_config=True)
ingress = __builtin__.property(_get_ingress, _set_ingress)
_pyangbind_elements = {'ingress': ingress, }
| [
"[email protected]"
] | |
63546a63ea14ce3fc5fadc04ebc9988761286182 | d1ddb9e9e75d42986eba239550364cff3d8f5203 | /google-cloud-sdk/lib/surface/compute/instance_groups/unmanaged/delete.py | 955dc6c5dfd2b844563c81ffbc6b0310d1d95757 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | bopopescu/searchparty | 8ecd702af0d610a7ad3a8df9c4d448f76f46c450 | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | refs/heads/master | 2022-11-19T14:44:55.421926 | 2017-07-28T14:55:43 | 2017-07-28T14:55:43 | 282,495,798 | 0 | 0 | Apache-2.0 | 2020-07-25T17:48:53 | 2020-07-25T17:48:52 | null | UTF-8 | Python | false | false | 2,378 | py | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for deleting unmanaged instance groups."""
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute import utils
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.compute import flags as compute_flags
from googlecloudsdk.command_lib.compute.instance_groups import flags
class Delete(base.DeleteCommand):
r"""Delete Google Compute Engine unmanaged instance groups.
*{command}* deletes one or more Google Compute Engine unmanaged
instance groups. This command just deletes the instance group and does
not delete the individual virtual machine instances
in the instance group.
For example:
$ {command} example-instance-group-1 example-instance-group-2 \
--zone us-central1-a
The above example deletes two instance groups, example-instance-group-1
and example-instance-group-2, in the ``us-central1-a'' zone.
"""
@staticmethod
def Args(parser):
Delete.ZonalInstanceGroupArg = flags.MakeZonalInstanceGroupArg(plural=True)
Delete.ZonalInstanceGroupArg.AddArgument(parser, operation_type='delete')
def Run(self, args):
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
client = holder.client
instance_group_refs = Delete.ZonalInstanceGroupArg.ResolveAsResource(
args,
holder.resources,
scope_lister=compute_flags.GetDefaultScopeLister(client))
utils.PromptForDeletion(instance_group_refs, 'zone')
requests = []
for instance_group_ref in instance_group_refs:
requests.append((client.apitools_client.instanceGroups, 'Delete',
client.messages.ComputeInstanceGroupsDeleteRequest(
**instance_group_ref.AsDict())))
return client.MakeRequests(requests)
| [
"[email protected]"
] | |
8b37cfa66542bbb293a615730337867c31ead081 | bb932a93d3face2458dc00329e3811664baa6b52 | /unittest/asyncio_queue.py | f02aba922836f18e965d7c7bc443e2e333a97c6c | [
"MIT"
] | permissive | Emptyset110/Morphe | 5dfc5d836b13d8edd074a231faa923add6b98bbe | 762a031298addf9a34425dbecaa793727aeed4c2 | refs/heads/master | 2021-01-19T09:21:11.323822 | 2017-04-15T05:16:05 | 2017-04-15T05:16:05 | 87,750,665 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,435 | py | # coding: utf-8
"""
决定将queue.Queue换成asyncio.Queue前试写的demo
"""
import asyncio
from asyncio import Queue
import queue
import random
import threading
import functools
@asyncio.coroutine
def queue_feeder(q):
for i in range(0, 100):
print("Put: ", i)
yield from q.put(i)
yield from asyncio.sleep(random.random()*3)
@asyncio.coroutine
def async_get(q):
while True:
print("Start get: ...")
data = yield from q.get()
print("Get: ", data)
@asyncio.coroutine
def async_get_sync(q, out):
while True:
print("Start get: ...")
data = yield from q.get()
out.put(data)
def get(q):
import time
while True:
print("Start get from sync queue: ...")
data = q.get()
print("Sync get", data)
if __name__ == "__main__":
# q1, q2是两个异步队列
q1 = Queue()
q2 = Queue()
# 做一个同步队列用于将上游所有的异步消息同步化
out = queue.Queue()
t = threading.Thread(target=functools.partial(get, out), daemon=True)
t.start()
# 开启一个协程池, 异步执行q1, q2的异步生产和消费,消费过程是将数据丢进out这个同步队列以同步化
loop = asyncio.get_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(asyncio.gather(*[queue_feeder(q1), queue_feeder(q2), async_get_sync(q1, out), async_get_sync(q2, out)]))
| [
"[email protected]"
] | |
169e9831bcc3567be42765c6f6d3db237d70ede8 | 49e3663fea29ae7fabec581ebae1fda5361effcd | /events/migrations/0003_auto__add_field_event_short_body.py | ff8f2567acac4b37f3f29ef89ba73caeb15a2d8f | [] | no_license | ikonitas/old_causenaffect | 8fdabdc3108efa16046f6bdfa542f71d58a4e6eb | 145f7ae82d02e39bda17aad380dac0f190f1882c | refs/heads/master | 2021-05-28T08:46:49.663299 | 2014-02-17T21:42:36 | 2014-02-17T21:42:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,212 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Event.short_body'
db.add_column('events_event', 'short_body', self.gf('django.db.models.fields.TextField')(default=1), keep_default=False)
def backwards(self, orm):
# Deleting field 'Event.short_body'
db.delete_column('events_event', 'short_body')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'events.event': {
'Meta': {'ordering': "['-event_date']", 'object_name': 'Event'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'body': ('django.db.models.fields.TextField', [], {}),
'enable_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'event_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 1, 6, 13, 53, 36, 581221)'}),
'flayer': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'flayer_thumb': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'price': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'pub_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 1, 6, 13, 53, 36, 581195)', 'auto_now_add': 'True', 'blank': 'True'}),
'short_body': ('django.db.models.fields.TextField', [], {}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['events']
| [
"[email protected]"
] | |
302f65518292a696e7f129cd6da7782047494676 | d7e41fee3b404cc30d93da72eb92ac466c4fe3ae | /aplpy/colorbar.py | c8a2d6c107a27ccf8a8f1624d08df868e8ed978c | [
"MIT"
] | permissive | lpsinger/aplpy | ca257d0da48aac9e29e0e71470e90147c710cecf | a59ec76d1f0b04ece7a78accd9d6672d5def5a0a | refs/heads/master | 2020-04-08T22:43:37.778800 | 2012-12-21T14:27:56 | 2012-12-21T14:27:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,469 | py | import warnings
import matplotlib.axes as maxes
from mpl_toolkits.axes_grid import make_axes_locatable
from matplotlib.font_manager import FontProperties
from .decorators import auto_refresh, fixdocstring
# As of matplotlib 0.99.1.1, any time a colorbar property is updated, the axes
# need to be removed and re-created. This has been fixed in svn r8213 but we
# should wait until we up the required version of matplotlib before changing the
# code here
class Colorbar(object):
def __init__(self, parent):
self._figure = parent._figure
self._colorbar_axes = None
self._parent = parent
# Save plotting parameters (required for @auto_refresh)
self._parameters = parent._parameters
self._base_settings = {}
self._label_fontproperties = FontProperties()
@auto_refresh
def show(self, location='right', width=0.2, pad=0.05, ticks=None, labels=True, box=None, box_orientation='vertical'):
'''
Show a colorbar on the side of the image.
Optional Keyword Arguments:
*location*: [ string ]
Where to place the colorbar. Should be one of 'left', 'right', 'top', 'bottom'.
*width*: [ float ]
The width of the colorbar relative to the canvas size.
*pad*: [ float ]
The spacing between the colorbar and the image relative to the canvas size.
*ticks*: [ None or list ]
The position of the ticks on the colorbar.
*labels*: [ True or False ]
Whether to show numerical labels.
*box*: [ list ]
A custom box within which to place the colorbar. This should
be in the form [xmin, ymin, dx, dy] and be in relative figure
units. This overrides the location argument.
*box_orientation* [ str ]
The orientation of the colorbar within the box. Can be
'horizontal' or 'vertical'
'''
self._base_settings['location'] = location
self._base_settings['width'] = width
self._base_settings['pad'] = pad
self._base_settings['ticks'] = ticks
self._base_settings['labels'] = labels
self._base_settings['box'] = box
self._base_settings['box_orientation'] = box_orientation
if self._parent.image:
if self._colorbar_axes:
self._parent._figure.delaxes(self._colorbar_axes)
if box is None:
divider = make_axes_locatable(self._parent._ax1)
if location == 'right':
self._colorbar_axes = divider.new_horizontal(size=width, pad=pad, axes_class=maxes.Axes)
orientation = 'vertical'
elif location == 'top':
self._colorbar_axes = divider.new_vertical(size=width, pad=pad, axes_class=maxes.Axes)
orientation = 'horizontal'
elif location == 'left':
warnings.warn("Left colorbar not fully implemented")
self._colorbar_axes = divider.new_horizontal(size=width, pad=pad, pack_start=True, axes_class=maxes.Axes)
locator = divider.new_locator(nx=0, ny=0)
self._colorbar_axes.set_axes_locator(locator)
orientation = 'vertical'
elif location == 'bottom':
warnings.warn("Bottom colorbar not fully implemented")
self._colorbar_axes = divider.new_vertical(size=width, pad=pad, pack_start=True, axes_class=maxes.Axes)
locator = divider.new_locator(nx=0, ny=0)
self._colorbar_axes.set_axes_locator(locator)
orientation = 'horizontal'
else:
raise Exception("location should be one of: right/top")
self._parent._figure.add_axes(self._colorbar_axes)
else:
self._colorbar_axes = self._parent._figure.add_axes(box)
orientation = box_orientation
self._colorbar = self._parent._figure.colorbar(self._parent.image, cax=self._colorbar_axes, orientation=orientation, ticks=ticks)
if location == 'right':
for tick in self._colorbar_axes.yaxis.get_major_ticks():
tick.tick1On = True
tick.tick2On = True
tick.label1On = False
tick.label2On = labels
elif location == 'top':
for tick in self._colorbar_axes.xaxis.get_major_ticks():
tick.tick1On = True
tick.tick2On = True
tick.label1On = False
tick.label2On = labels
elif location == 'left':
for tick in self._colorbar_axes.yaxis.get_major_ticks():
tick.tick1On = True
tick.tick2On = True
tick.label1On = labels
tick.label2On = False
elif location == 'bottom':
for tick in self._colorbar_axes.xaxis.get_major_ticks():
tick.tick1On = True
tick.tick2On = True
tick.label1On = labels
tick.label2On = False
else:
warnings.warn("No image is shown, therefore, no colorbar will be plotted")
@auto_refresh
def update(self):
if self._colorbar_axes:
self.show(**self._base_settings)
@auto_refresh
def hide(self):
self._parent._figure.delaxes(self._colorbar_axes)
self._colorbar_axes = None
@auto_refresh
def _remove(self):
self._parent._figure.delaxes(self._colorbar_axes)
# LOCATION AND SIZE
@auto_refresh
def set_location(self, location):
'''
Set the location of the colorbar. Should be one of 'left', 'right', 'top', 'bottom'.
'''
self._base_settings['location'] = location
self.show(**self._base_settings)
self.set_font(fontproperties=self._label_fontproperties)
@auto_refresh
def set_width(self, width):
'''
Set the width of the colorbar relative to the canvas size.
'''
self._base_settings['width'] = width
self.show(**self._base_settings)
self.set_font(fontproperties=self._label_fontproperties)
@auto_refresh
def set_pad(self, pad):
'''
Set the spacing between the colorbar and the image relative to the canvas size.
'''
self._base_settings['pad'] = pad
self.show(**self._base_settings)
self.set_font(fontproperties=self._label_fontproperties)
@auto_refresh
def set_ticks(self, ticks):
'''
Set the position of the ticks on the colorbar.
'''
self._base_settings['ticks'] = ticks
self.show(**self._base_settings)
self.set_font(fontproperties=self._label_fontproperties)
@auto_refresh
def set_labels(self, labels):
'''
Set whether to show numerical labels.
'''
self._base_settings['labels'] = labels
self.show(**self._base_settings)
self.set_font(fontproperties=self._label_fontproperties)
@auto_refresh
def set_box(self, box, box_orientation='vertical'):
'''
Set the box within which to place the colorbar. This should be in the
form [xmin, ymin, dx, dy] and be in relative figure units. The
orientation of the colorbar within the box can be controlled with the
box_orientation argument.
'''
self._base_settings['box'] = box
self._base_settings['box_orientation'] = box_orientation
self.show(**self._base_settings)
self.set_font(fontproperties=self._label_fontproperties)
# FONT PROPERTIES
@auto_refresh
def set_label_properties(self, *args, **kwargs):
warnings.warn("set_label_properties is deprecated - use set_font instead", DeprecationWarning)
self.set_font(*args, **kwargs)
@auto_refresh
@fixdocstring
def set_font(self, family=None, style=None, variant=None, stretch=None, weight=None, size=None, fontproperties=None):
'''
Set the font of the tick labels
Optional Keyword Arguments:
common: family, style, variant, stretch, weight, size, fontproperties
Default values are set by matplotlib or previously set values if
set_font has already been called. Global default values can be set by
editing the matplotlibrc file.
'''
if family:
self._label_fontproperties.set_family(family)
if style:
self._label_fontproperties.set_style(style)
if variant:
self._label_fontproperties.set_variant(variant)
if stretch:
self._label_fontproperties.set_stretch(stretch)
if weight:
self._label_fontproperties.set_weight(weight)
if size:
self._label_fontproperties.set_size(size)
if fontproperties:
self._label_fontproperties = fontproperties
# Update the tick label font properties
for label in self._colorbar_axes.get_xticklabels():
label.set_fontproperties(self._label_fontproperties)
for label in self._colorbar_axes.get_yticklabels():
label.set_fontproperties(self._label_fontproperties)
# Also update the offset text font properties
label = self._colorbar_axes.xaxis.get_offset_text()
label.set_fontproperties(self._label_fontproperties)
label = self._colorbar_axes.yaxis.get_offset_text()
label.set_fontproperties(self._label_fontproperties)
# FRAME PROPERTIES
@auto_refresh
def set_frame_linewidth(self, linewidth):
'''
Set the linewidth of the colorbar frame, in points
'''
warnings.warn("This method is not functional at this time")
for key in self._colorbar_axes.spines:
self._colorbar_axes.spines[key].set_linewidth(linewidth)
@auto_refresh
def set_frame_color(self, color):
'''
Set the color of the colorbar frame, in points
'''
warnings.warn("This method is not functional at this time")
for key in self._colorbar_axes.spines:
self._colorbar_axes.spines[key].set_edgecolor(color)
| [
"[email protected]"
] | |
1e4797753fd3e0fcebccf3be0b971ddb534b7452 | 9129a791f45cd3b25d8a5da57ee6936bfe4e73a2 | /learn-django/db_ORM2/one/migrations/0002_article_username.py | 36eb6412db4e75d303331bc845e1d0aeff1d4ef2 | [] | no_license | Sunsetjue/Django2.0 | 94be49ed9d65dab6398ab8f0ddd02bb1871afb6b | 102bf0f2bd2d309b76f3247e396b7e83c5f6c2f8 | refs/heads/master | 2020-04-22T03:25:24.014196 | 2019-02-15T16:18:23 | 2019-02-15T16:18:23 | 170,086,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 494 | py | # Generated by Django 2.0 on 2018-12-10 08:02
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('two', '0001_initial'),
('one', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='article',
name='username',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='two.Front'),
),
]
| [
"10073631822qq.com"
] | 10073631822qq.com |
9c70343fb4ede60d25a4826018132491b6ce3728 | 2daa3894e6d6929fd04145100d8a3be5eedbe21c | /tests/artificial/transf_/trend_constant/cycle_12/ar_/test_artificial_128__constant_12__20.py | 53da275217edee4022050200a1f5d3f55f20f941 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Henri-Lo/pyaf | a1f73a0cc807873bd7b79648fe51de9cfd6c126a | 08c968425d85dcace974d90db7f07c845a0fe914 | refs/heads/master | 2021-07-01T12:27:31.600232 | 2017-09-21T11:19:04 | 2017-09-21T11:19:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
dataset = tsds.generate_random_TS(N = 128 , FREQ = 'D', seed = 0, trendtype = "constant", cycle_length = 12, transform = "", sigma = 0.0, exog_count = 20, ar_order = 0);
art.process_dataset(dataset); | [
"[email protected]"
] | |
ea41284abeaed9935fe87782c57638229ca46372 | 73832d62f4e982517f36265617c17bea3b715719 | /04cem/Programming_Software_Tools/PYTHON/untitled/untitled/urls.py | 675eecba0135ea31bb1e5a83c85b278aeb2fd7a1 | [] | no_license | keipa/bsuir-labs | fa9018a121ec1b040bae27326607cd82ac7652a4 | a7a3eca34d095568c7f8cbbe976cc91e515dc756 | refs/heads/master | 2021-06-15T23:37:27.249024 | 2020-06-09T09:33:32 | 2020-06-09T09:33:32 | 34,018,391 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 764 | py | """untitled URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
5e70ad84a6cda2ce2c916562e762ea79f62edafc | 1c0e889b5f216549160eba5fa6960078926b6f27 | /flaskServer.py | a9e1fe60fe430ae1ecbd3b75de3e2629082eb37f | [
"Artistic-2.0"
] | permissive | ttm/aars | 477d42f7f4627d0178ca5fabf1445d7c1819d2ef | 481ea568321255ddc60b167325c545f84f3284ff | refs/heads/master | 2021-01-01T17:32:55.940835 | 2014-12-16T06:09:05 | 2014-12-16T06:09:05 | 21,177,370 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,102 | py | #-*- coding: utf-8 -*-
from flask import Flask, render_template, make_response, session, redirect, url_for, escape, request,jsonify,Response
import pymongo, __builtin__, datetime, string
from dateutil import parser
import time as T, networkx as x, json # json.dumps
#import MySQLdb, cPickle, numpy as n
from maccess import mdc as U
HTAG="#testeteste"
HTAG_=HTAG.replace("#","NEW")
U=U.u1
app = Flask(__name__)
@app.route("/")
def hello_world():
client=pymongo.MongoClient(U)
db = client[U.split("/")[-1]]
C = db[HTAG_] #collection
msgs=[i for i in C.find()]
info=[(mm["user"]["screen_name"],mm["text"],mm["created_at"]) for mm in msgs]
text_block=string.join([str(ii) for ii in info],"<br />")
return text_block
@app.route("/jsonMe/")
def jsonMe():
client=pymongo.MongoClient(U)
db = client[U.split("/")[-1]]
C = db[HTAG_] #collection
msgs=[i for i in C.find()]
info=[[mm["user"]["screen_name"],mm["text"],mm["created_at"]] for mm in msgs]
return jsonify(info=info)
if __name__ == "__main__":
app.debug = True
app.run(host='0.0.0.0')
| [
"[email protected]"
] | |
e7f67e86dc59350d2e03716109cfd53bf3497274 | 9ee5e51e96fe8baff2ccc7a03981e453b60e3bed | /test/technical_analysis/stock_analysis.py | 2b69e76bf748ca07296f428bfbecce36e24c8a7e | [] | no_license | marciopocebon/myInvestor | 0b97a065e9f5dd9beaefca99955d4fb31a619c03 | f4861b462b1161fb49534ec85ce259026fa20fd3 | refs/heads/master | 2021-12-24T03:21:05.829680 | 2017-12-05T15:45:04 | 2017-12-05T15:45:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,055 | py | # must run using ipython console
# or jupyter-console
import pandas as pd
import pandas_datareader as web # Package and modules for importing data; this code may change depending on pandas version
import datetime
import matplotlib
import numpy as np
import matplotlib.pyplot as plt # Import matplotlib
# We will look at stock prices over the past year, starting at January 1, 2016
start = datetime.datetime(2016,1,1)
end = datetime.date.today()
# Let's get Apple stock data; Apple's ticker symbol is AAPL
# First argument is the series we want, second is the source ("yahoo" for Yahoo! Finance), third is the start date, fourth is the end date
apple = web.DataReader("AAPL", "yahoo", start, end)
type(apple)
apple.head()
# This line is necessary for the plot to appear in a Jupyter notebook
%matplotlib inline
# Control the default size of figures in this Jupyter notebook
%pylab inline
pylab.rcParams['figure.figsize'] = (15, 9) # Change the size of plots
apple["Adj Close"].plot(grid = True) # Plot the adjusted closing price of AAPL
| [
"[email protected]"
] | |
cbe8aff3d8002af75c4697557ef1744bc3204f6d | ab825ee0326e98d115b6dc02bbda02b302787d46 | /基礎編/パスの結合・連結/モジュール/パスの結合・連結.py | dacca7b719dece6f3e5051edcf80d20667fd444b | [] | no_license | holothuria/python_study | 295dd7c30a566b5a9688b9196e25bf6e065401a0 | 7e98090e64d646d23a4189e0efd68c2905b78d04 | refs/heads/master | 2020-03-23T20:04:38.900368 | 2019-03-05T12:47:53 | 2019-03-05T12:47:53 | 142,019,995 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py |
import os
PROJECT_DIR = 'C:\python-izm'
SETTINGS_FILE = 'settings.ini'
print(os.path.join(PROJECT_DIR, SETTINGS_FILE))
print(os.path.join(PROJECT_DIR, 'settings_dir', SETTINGS_FILE))
PROJECT_DIR = 'python-izm'
print(os.path.join('C:', PROJECT_DIR, 'settings_dir', SETTINGS_FILE))
# ↑「(アルファベット一文字):」のみだと、区切りを入れて貰えない?
| [
"[email protected]"
] | |
34483c2ca07b40eaabcb779de868fcf8094cdac5 | ebf3e0fdb7ca2c19e04f893b49120cd4fdf10649 | /django/models_and_database/manage.py | b86d7b4ab80a51e710716e63299291a21b5f82f7 | [] | no_license | Sem31/Django-Basics | 19f8528f683c1bbc67a4d5d01988fa8d64d251c2 | 3219143d135a1d918e0994b61128375b01b35a5d | refs/heads/master | 2022-12-12T00:01:04.518559 | 2019-09-29T18:16:57 | 2019-09-29T18:16:57 | 190,061,340 | 0 | 0 | null | 2022-12-08T05:22:25 | 2019-06-03T18:37:25 | Python | UTF-8 | Python | false | false | 639 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'models_and_database.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
f0002acb5507cad1be812360cbda655027b4a4ee | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03836/s741210941.py | 115e3d1b0023264fc7ddea4090c60e0bdb2df46d | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py | sx, sy, tx, ty = map(int, input().split())
X, Y = tx - sx, ty - sy
for d, n in zip("RULD", [X, Y, X, Y]):
print(d * n, end="")
for d, n in zip("DRULULDR", [1, X + 1, Y + 1, 1, 1, X + 1, Y + 1, 1]):
print(d * n, end="")
print() | [
"[email protected]"
] | |
9c4c6adaab2653c87efba4f9e32323052ec9309e | aa0270b351402e421631ebc8b51e528448302fab | /sdk/servicebus/azure-servicebus/tests/async_tests/test_sb_client_async.py | d42e1dfd218fd8cc898361b417cdf68817ff1533 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | fangchen0601/azure-sdk-for-python | d04a22109d0ff8ff209c82e4154b7169b6cb2e53 | c2e11d6682e368b2f062e714490d2de42e1fed36 | refs/heads/master | 2023-05-11T16:53:26.317418 | 2023-05-04T20:02:16 | 2023-05-04T20:02:16 | 300,440,803 | 0 | 0 | MIT | 2020-10-16T18:45:29 | 2020-10-01T22:27:56 | null | UTF-8 | Python | false | false | 29,537 | py | #--------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import logging
import time
import pytest
from azure.core.credentials import AzureSasCredential, AzureNamedKeyCredential
from azure.mgmt.servicebus.models import AccessRights
from azure.servicebus.aio import ServiceBusClient, ServiceBusSender, ServiceBusReceiver
from azure.servicebus import ServiceBusMessage
from azure.servicebus.aio._base_handler_async import ServiceBusSharedKeyCredential
from azure.servicebus.exceptions import (
ServiceBusError,
ServiceBusAuthenticationError,
ServiceBusAuthorizationError
)
from devtools_testutils import AzureMgmtTestCase
from servicebus_preparer import (
CachedServiceBusNamespacePreparer,
ServiceBusTopicPreparer,
ServiceBusQueuePreparer,
ServiceBusNamespaceAuthorizationRulePreparer,
ServiceBusQueueAuthorizationRulePreparer,
CachedServiceBusQueuePreparer,
CachedServiceBusTopicPreparer,
CachedServiceBusSubscriptionPreparer,
CachedServiceBusResourceGroupPreparer,
SERVICEBUS_ENDPOINT_SUFFIX
)
from utilities import get_logger
_logger = get_logger(logging.DEBUG)
class ServiceBusClientAsyncTests(AzureMgmtTestCase):
@pytest.mark.asyncio
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedServiceBusResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@CachedServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
async def test_sb_client_bad_credentials_async(self, servicebus_namespace, servicebus_queue, **kwargs):
client = ServiceBusClient(
fully_qualified_namespace=f"{servicebus_namespace.name}{SERVICEBUS_ENDPOINT_SUFFIX}",
credential=ServiceBusSharedKeyCredential('invalid', 'invalid'),
logging_enable=False)
async with client:
with pytest.raises(ServiceBusAuthenticationError):
async with client.get_queue_sender(servicebus_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("test"))
@pytest.mark.asyncio
@pytest.mark.liveTest
@pytest.mark.live_test_only
async def test_sb_client_bad_namespace_async(self, **kwargs):
client = ServiceBusClient(
fully_qualified_namespace=f'invalid{SERVICEBUS_ENDPOINT_SUFFIX}',
credential=ServiceBusSharedKeyCredential('invalid', 'invalid'),
logging_enable=False)
async with client:
with pytest.raises(ServiceBusError):
async with client.get_queue_sender('invalidqueue') as sender:
await sender.send_messages(ServiceBusMessage("test"))
@pytest.mark.asyncio
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedServiceBusResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
async def test_sb_client_bad_entity_async(self):
fake_str = f"Endpoint=sb://mock{SERVICEBUS_ENDPOINT_SUFFIX}/;" \
f"SharedAccessKeyName=mock;SharedAccessKey=mock;EntityPath=mockentity"
fake_client = ServiceBusClient.from_connection_string(fake_str)
with pytest.raises(ValueError):
fake_client.get_queue_sender('queue')
with pytest.raises(ValueError):
fake_client.get_queue_receiver('queue')
with pytest.raises(ValueError):
fake_client.get_topic_sender('topic')
with pytest.raises(ValueError):
fake_client.get_subscription_receiver('topic', 'subscription')
fake_client.get_queue_sender('mockentity')
fake_client.get_queue_receiver('mockentity')
fake_client.get_topic_sender('mockentity')
fake_client.get_subscription_receiver('mockentity', 'subscription')
fake_str = f"Endpoint=sb://mock{SERVICEBUS_ENDPOINT_SUFFIX}/;" \
f"SharedAccessKeyName=mock;SharedAccessKey=mock"
fake_client = ServiceBusClient.from_connection_string(fake_str)
fake_client.get_queue_sender('queue')
fake_client.get_queue_receiver('queue')
fake_client.get_topic_sender('topic')
fake_client.get_subscription_receiver('topic', 'subscription')
@pytest.mark.asyncio
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedServiceBusResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@CachedServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
@ServiceBusNamespaceAuthorizationRulePreparer(name_prefix='servicebustest', access_rights=[AccessRights.listen])
async def test_sb_client_readonly_credentials(self, servicebus_authorization_rule_connection_string, servicebus_queue, **kwargs):
client = ServiceBusClient.from_connection_string(servicebus_authorization_rule_connection_string)
async with client:
async with client.get_queue_receiver(servicebus_queue.name) as receiver:
messages = await receiver.receive_messages(max_message_count=1, max_wait_time=1)
with pytest.raises(ServiceBusAuthorizationError):
async with client.get_queue_sender(servicebus_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("test"))
@pytest.mark.asyncio
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedServiceBusResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@CachedServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
@ServiceBusNamespaceAuthorizationRulePreparer(name_prefix='servicebustest', access_rights=[AccessRights.send])
async def test_sb_client_writeonly_credentials_async(self, servicebus_authorization_rule_connection_string, servicebus_queue, **kwargs):
client = ServiceBusClient.from_connection_string(servicebus_authorization_rule_connection_string)
async with client:
with pytest.raises(ServiceBusError):
async with client.get_queue_receiver(servicebus_queue.name) as receiver:
messages = await receiver.receive_messages(max_message_count=1, max_wait_time=1)
async with client.get_queue_sender(servicebus_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("test"))
with pytest.raises(TypeError):
await sender.send_messages("cat")
@pytest.mark.asyncio
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedServiceBusResourceGroupPreparer()
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@CachedServiceBusQueuePreparer(name_prefix='servicebustest', dead_lettering_on_message_expiration=True)
@CachedServiceBusTopicPreparer(name_prefix='servicebustest')
@CachedServiceBusSubscriptionPreparer(name_prefix='servicebustest')
async def test_async_sb_client_close_spawned_handlers(self, servicebus_namespace_connection_string, servicebus_queue, servicebus_topic, servicebus_subscription, **kwargs):
client = ServiceBusClient.from_connection_string(servicebus_namespace_connection_string)
await client.close()
# context manager
async with client:
assert len(client._handlers) == 0
sender = client.get_queue_sender(servicebus_queue.name)
receiver = client.get_queue_receiver(servicebus_queue.name)
await sender._open()
await receiver._open()
assert sender._handler and sender._running
assert receiver._handler and receiver._running
assert len(client._handlers) == 2
assert not sender._handler and not sender._running
assert not receiver._handler and not receiver._running
assert len(client._handlers) == 0
# close operation
sender = client.get_queue_sender(servicebus_queue.name)
receiver = client.get_queue_receiver(servicebus_queue.name)
await sender._open()
await receiver._open()
assert sender._handler and sender._running
assert receiver._handler and receiver._running
assert len(client._handlers) == 2
await client.close()
assert not sender._handler and not sender._running
assert not receiver._handler and not receiver._running
assert len(client._handlers) == 0
queue_sender = client.get_queue_sender(servicebus_queue.name)
queue_receiver = client.get_queue_receiver(servicebus_queue.name)
assert len(client._handlers) == 2
queue_sender = client.get_queue_sender(servicebus_queue.name)
queue_receiver = client.get_queue_receiver(servicebus_queue.name)
# the previous sender/receiver can not longer be referenced, there might be a delay in CPython
# to remove the reference, so len of handlers should be less than 4
assert len(client._handlers) < 4
await client.close()
queue_sender = client.get_queue_sender(servicebus_queue.name)
queue_receiver = client.get_queue_receiver(servicebus_queue.name)
assert len(client._handlers) == 2
queue_sender = None
queue_receiver = None
assert len(client._handlers) < 2
await client.close()
topic_sender = client.get_topic_sender(servicebus_topic.name)
subscription_receiver = client.get_subscription_receiver(servicebus_topic.name, servicebus_subscription.name)
assert len(client._handlers) == 2
topic_sender = None
subscription_receiver = None
# the previous sender/receiver can not longer be referenced, so len of handlers should just be 2 instead of 4
assert len(client._handlers) < 4
await client.close()
topic_sender = client.get_topic_sender(servicebus_topic.name)
subscription_receiver = client.get_subscription_receiver(servicebus_topic.name, servicebus_subscription.name)
assert len(client._handlers) == 2
topic_sender = client.get_topic_sender(servicebus_topic.name)
subscription_receiver = client.get_subscription_receiver(servicebus_topic.name, servicebus_subscription.name)
# the previous sender/receiver can not longer be referenced, so len of handlers should just be 2 instead of 4
assert len(client._handlers) < 4
await client.close()
for _ in range(5):
queue_sender = client.get_queue_sender(servicebus_queue.name)
queue_receiver = client.get_queue_receiver(servicebus_queue.name)
topic_sender = client.get_topic_sender(servicebus_topic.name)
subscription_receiver = client.get_subscription_receiver(servicebus_topic.name,
servicebus_subscription.name)
assert len(client._handlers) < 15
await client.close()
@pytest.mark.asyncio
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedServiceBusResourceGroupPreparer(name_prefix='servicebustest')
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@ServiceBusNamespaceAuthorizationRulePreparer(name_prefix='servicebustest')
@ServiceBusQueuePreparer(name_prefix='servicebustest_qone', parameter_name='wrong_queue', dead_lettering_on_message_expiration=True)
@ServiceBusQueuePreparer(name_prefix='servicebustest_qtwo', dead_lettering_on_message_expiration=True)
@ServiceBusQueueAuthorizationRulePreparer(name_prefix='servicebustest_qtwo')
async def test_sb_client_incorrect_queue_conn_str_async(self, servicebus_queue_authorization_rule_connection_string, servicebus_queue, wrong_queue, **kwargs):
client = ServiceBusClient.from_connection_string(servicebus_queue_authorization_rule_connection_string)
async with client:
# Validate that the wrong sender/receiver queues with the right credentials fail.
with pytest.raises(ValueError):
async with client.get_queue_sender(wrong_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("test"))
with pytest.raises(ValueError):
async with client.get_queue_receiver(wrong_queue.name) as receiver:
messages = await receiver.receive_messages(max_message_count=1, max_wait_time=1)
# But that the correct ones work.
async with client.get_queue_sender(servicebus_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("test"))
async with client.get_queue_receiver(servicebus_queue.name) as receiver:
messages = await receiver.receive_messages(max_message_count=1, max_wait_time=1)
# Now do the same but with direct connstr initialization.
with pytest.raises(ValueError):
async with ServiceBusSender._from_connection_string(
servicebus_queue_authorization_rule_connection_string,
queue_name=wrong_queue.name,
) as sender:
await sender.send_messages(ServiceBusMessage("test"))
with pytest.raises(ValueError):
async with ServiceBusReceiver._from_connection_string(
servicebus_queue_authorization_rule_connection_string,
queue_name=wrong_queue.name,
) as receiver:
messages = await receiver.receive_messages(max_message_count=1, max_wait_time=1)
async with ServiceBusSender._from_connection_string(
servicebus_queue_authorization_rule_connection_string,
queue_name=servicebus_queue.name,
) as sender:
await sender.send_messages(ServiceBusMessage("test"))
async with ServiceBusReceiver._from_connection_string(
servicebus_queue_authorization_rule_connection_string,
queue_name=servicebus_queue.name,
) as receiver:
messages = await receiver.receive_messages(max_message_count=1, max_wait_time=1)
@pytest.mark.asyncio
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedServiceBusResourceGroupPreparer()
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@CachedServiceBusQueuePreparer(name_prefix='servicebustest')
async def test_client_sas_credential_async(self,
servicebus_queue,
servicebus_namespace,
servicebus_namespace_key_name,
servicebus_namespace_primary_key,
servicebus_namespace_connection_string,
**kwargs):
# This should "just work" to validate known-good.
credential = ServiceBusSharedKeyCredential(servicebus_namespace_key_name, servicebus_namespace_primary_key)
hostname = f"{servicebus_namespace.name}{SERVICEBUS_ENDPOINT_SUFFIX}"
auth_uri = "sb://{}/{}".format(hostname, servicebus_queue.name)
token = (await credential.get_token(auth_uri)).token
# Finally let's do it with SAS token + conn str
token_conn_str = "Endpoint=sb://{}/;SharedAccessSignature={};".format(hostname, token.decode())
client = ServiceBusClient.from_connection_string(token_conn_str)
async with client:
assert len(client._handlers) == 0
async with client.get_queue_sender(servicebus_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("foo"))
@pytest.mark.asyncio
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedServiceBusResourceGroupPreparer()
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@CachedServiceBusQueuePreparer(name_prefix='servicebustest')
async def test_client_credential_async(self,
servicebus_queue,
servicebus_namespace,
servicebus_namespace_key_name,
servicebus_namespace_primary_key,
servicebus_namespace_connection_string,
**kwargs):
# This should "just work" to validate known-good.
credential = ServiceBusSharedKeyCredential(servicebus_namespace_key_name, servicebus_namespace_primary_key)
hostname = f"{servicebus_namespace.name}{SERVICEBUS_ENDPOINT_SUFFIX}"
client = ServiceBusClient(hostname, credential)
async with client:
assert len(client._handlers) == 0
async with client.get_queue_sender(servicebus_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("foo"))
hostname = f"sb://{servicebus_namespace.name}{SERVICEBUS_ENDPOINT_SUFFIX}"
client = ServiceBusClient(hostname, credential)
async with client:
assert len(client._handlers) == 0
async with client.get_queue_sender(servicebus_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("foo"))
hostname = f"https://{servicebus_namespace.name}{SERVICEBUS_ENDPOINT_SUFFIX}"
client = ServiceBusClient(hostname, credential)
async with client:
assert len(client._handlers) == 0
async with client.get_queue_sender(servicebus_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("foo"))
@pytest.mark.asyncio
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedServiceBusResourceGroupPreparer()
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@CachedServiceBusQueuePreparer(name_prefix='servicebustest')
async def test_client_azure_sas_credential_async(self,
servicebus_queue,
servicebus_namespace,
servicebus_namespace_key_name,
servicebus_namespace_primary_key,
servicebus_namespace_connection_string,
**kwargs):
# This should "just work" to validate known-good.
credential = ServiceBusSharedKeyCredential(servicebus_namespace_key_name, servicebus_namespace_primary_key)
hostname = f"{servicebus_namespace.name}{SERVICEBUS_ENDPOINT_SUFFIX}"
auth_uri = "sb://{}/{}".format(hostname, servicebus_queue.name)
token = (await credential.get_token(auth_uri)).token.decode()
credential = AzureSasCredential(token)
client = ServiceBusClient(hostname, credential)
async with client:
assert len(client._handlers) == 0
async with client.get_queue_sender(servicebus_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("foo"))
@pytest.mark.asyncio
@pytest.mark.liveTest
@pytest.mark.live_test_only
@CachedServiceBusResourceGroupPreparer()
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@CachedServiceBusQueuePreparer(name_prefix='servicebustest')
async def test_client_named_key_credential_async(self,
servicebus_queue,
servicebus_namespace,
servicebus_namespace_key_name,
servicebus_namespace_primary_key,
servicebus_namespace_connection_string,
**kwargs):
hostname = f"{servicebus_namespace.name}{SERVICEBUS_ENDPOINT_SUFFIX}"
credential = AzureNamedKeyCredential(servicebus_namespace_key_name, servicebus_namespace_primary_key)
client = ServiceBusClient(hostname, credential)
async with client:
async with client.get_queue_sender(servicebus_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("foo"))
credential.update("foo", "bar")
with pytest.raises(Exception):
async with client:
async with client.get_queue_sender(servicebus_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("foo"))
# update back to the right key again
credential.update(servicebus_namespace_key_name, servicebus_namespace_primary_key)
async with client:
async with client.get_queue_sender(servicebus_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("foo"))
async def test_backoff_fixed_retry(self):
client = ServiceBusClient(
'fake.host.com',
'fake_eh',
retry_mode='fixed'
)
# queue sender
sender = await client.get_queue_sender('fake_name')
backoff = client._config.retry_backoff_factor
start_time = time.time()
sender._backoff(retried_times=1, last_exception=Exception('fake'), abs_timeout_time=None)
sleep_time_fixed = time.time() - start_time
# exp = 0.8 * (2 ** 1) = 1.6
# time.sleep() in _backoff will take AT LEAST time 'exp' for retry_mode='exponential'
# check that fixed is less than 'exp'
assert sleep_time_fixed < backoff * (2 ** 1)
# topic sender
sender = await client.get_topic_sender('fake_name')
backoff = client._config.retry_backoff_factor
start_time = time.time()
sender._backoff(retried_times=1, last_exception=Exception('fake'), abs_timeout_time=None)
sleep_time_fixed = time.time() - start_time
assert sleep_time_fixed < backoff * (2 ** 1)
# queue receiver
receiver = await client.get_queue_receiver('fake_name')
backoff = client._config.retry_backoff_factor
start_time = time.time()
receiver._backoff(retried_times=1, last_exception=Exception('fake'), abs_timeout_time=None)
sleep_time_fixed = time.time() - start_time
assert sleep_time_fixed < backoff * (2 ** 1)
# subscription receiver
receiver = await client.get_subscription_receiver('fake_topic', 'fake_sub')
backoff = client._config.retry_backoff_factor
start_time = time.time()
receiver._backoff(retried_times=1, last_exception=Exception('fake'), abs_timeout_time=None)
sleep_time_fixed = time.time() - start_time
assert sleep_time_fixed < backoff * (2 ** 1)
async def test_custom_client_id_queue_sender_async(self, **kwargs):
servicebus_connection_str = f'Endpoint=sb://resourcename{SERVICEBUS_ENDPOINT_SUFFIX}/;SharedAccessSignature=THISISATESTKEYXXXXXXXXXXXXXXXXXXXXXXXXXXXX=;'
queue_name = "queue_name"
custom_id = "my_custom_id"
servicebus_client = ServiceBusClient.from_connection_string(conn_str=servicebus_connection_str)
async with servicebus_client:
queue_sender = servicebus_client.get_queue_sender(queue_name=queue_name, client_identifier=custom_id)
assert queue_sender.client_identifier is not None
assert queue_sender.client_identifier == custom_id
async def test_default_client_id_queue_sender(self, **kwargs):
servicebus_connection_str = f'Endpoint=sb://resourcename{SERVICEBUS_ENDPOINT_SUFFIX}/;SharedAccessSignature=THISISATESTKEYXXXXXXXXXXXXXXXXXXXXXXXXXXXX=;'
queue_name = "queue_name"
servicebus_client = ServiceBusClient.from_connection_string(conn_str=servicebus_connection_str)
async with servicebus_client:
queue_sender = servicebus_client.get_queue_sender(queue_name=queue_name)
assert queue_sender.client_identifier is not None
assert "SBSender" in queue_sender.client_identifier
async def test_custom_client_id_queue_receiver(self, **kwargs):
servicebus_connection_str = f'Endpoint=sb://resourcename{SERVICEBUS_ENDPOINT_SUFFIX}/;SharedAccessSignature=THISISATESTKEYXXXXXXXXXXXXXXXXXXXXXXXXXXXX=;'
queue_name = "queue_name"
custom_id = "my_custom_id"
servicebus_client = ServiceBusClient.from_connection_string(conn_str=servicebus_connection_str)
async with servicebus_client:
queue_receiver = servicebus_client.get_queue_receiver(queue_name=queue_name, client_identifier=custom_id)
assert queue_receiver.client_identifier is not None
assert queue_receiver.client_identifier == custom_id
async def test_default_client_id_queue_receiver(self, **kwargs):
servicebus_connection_str = f'Endpoint=sb://resourcename{SERVICEBUS_ENDPOINT_SUFFIX}/;SharedAccessSignature=THISISATESTKEYXXXXXXXXXXXXXXXXXXXXXXXXXXXX=;'
queue_name = "queue_name"
servicebus_client = ServiceBusClient.from_connection_string(conn_str=servicebus_connection_str)
async with servicebus_client:
queue_receiver = servicebus_client.get_queue_receiver(queue_name=queue_name)
assert queue_receiver.client_identifier is not None
assert "SBReceiver" in queue_receiver.client_identifier
async def test_custom_client_id_topic_sender(self, **kwargs):
servicebus_connection_str = f'Endpoint=sb://resourcename{SERVICEBUS_ENDPOINT_SUFFIX}/;SharedAccessSignature=THISISATESTKEYXXXXXXXXXXXXXXXXXXXXXXXXXXXX=;'
custom_id = "my_custom_id"
topic_name = "topic_name"
servicebus_client = ServiceBusClient.from_connection_string(conn_str=servicebus_connection_str)
async with servicebus_client:
topic_sender = servicebus_client.get_topic_sender(topic_name=topic_name, client_identifier=custom_id)
assert topic_sender.client_identifier is not None
assert topic_sender.client_identifier == custom_id
async def test_default_client_id_topic_sender(self, **kwargs):
servicebus_connection_str = f'Endpoint=sb://resourcename{SERVICEBUS_ENDPOINT_SUFFIX}/;SharedAccessSignature=THISISATESTKEYXXXXXXXXXXXXXXXXXXXXXXXXXXXX=;'
topic_name = "topic_name"
servicebus_client = ServiceBusClient.from_connection_string(conn_str=servicebus_connection_str)
async with servicebus_client:
topic_sender = servicebus_client.get_topic_sender(topic_name=topic_name)
assert topic_sender.client_identifier is not None
assert "SBSender" in topic_sender.client_identifier
async def test_default_client_id_subscription_receiver(self, **kwargs):
servicebus_connection_str = f'Endpoint=sb://resourcename{SERVICEBUS_ENDPOINT_SUFFIX}/;SharedAccessSignature=THISISATESTKEYXXXXXXXXXXXXXXXXXXXXXXXXXXXX=;'
topic_name = "topic_name"
sub_name = "sub_name"
servicebus_client = ServiceBusClient.from_connection_string(conn_str=servicebus_connection_str)
async with servicebus_client:
subscription_receiver = servicebus_client.get_subscription_receiver(topic_name, sub_name)
assert subscription_receiver.client_identifier is not None
assert "SBReceiver" in subscription_receiver.client_identifier
async def test_custom_client_id_subscription_receiver(self, **kwargs):
servicebus_connection_str = f'Endpoint=sb://resourcename{SERVICEBUS_ENDPOINT_SUFFIX}/;SharedAccessSignature=THISISATESTKEYXXXXXXXXXXXXXXXXXXXXXXXXXXXX=;'
custom_id = "my_custom_id"
topic_name = "topic_name"
sub_name = "sub_name"
servicebus_client = ServiceBusClient.from_connection_string(conn_str=servicebus_connection_str)
async with servicebus_client:
subscription_receiver = servicebus_client.get_subscription_receiver(topic_name, sub_name, client_identifier=custom_id)
assert subscription_receiver.client_identifier is not None
assert subscription_receiver.client_identifier == custom_id
@pytest.mark.asyncio
@pytest.mark.liveTest
@CachedServiceBusResourceGroupPreparer()
@CachedServiceBusNamespacePreparer(name_prefix='servicebustest')
@CachedServiceBusQueuePreparer(name_prefix='servicebustest')
async def test_connection_verify_exception_async(self,
servicebus_queue,
servicebus_namespace,
servicebus_namespace_key_name,
servicebus_namespace_primary_key,
servicebus_namespace_connection_string,
**kwargs):
hostname = f"{servicebus_namespace.name}{SERVICEBUS_ENDPOINT_SUFFIX}"
credential = AzureNamedKeyCredential(servicebus_namespace_key_name, servicebus_namespace_primary_key)
client = ServiceBusClient(hostname, credential, connection_verify="cacert.pem")
async with client:
with pytest.raises(ServiceBusError):
async with client.get_queue_sender(servicebus_queue.name) as sender:
await sender.send_messages(ServiceBusMessage("foo"))
| [
"[email protected]"
] | |
57791e12aecfb2440e48964dc6d5a0c4e3c27d94 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/sieve-big-6589.py | aad7ca56d310f77854a1ba7b3ae9903b4a5cb6d7 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,741 | py | # A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def $ID(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"[email protected]"
] | |
f472bebf8fd8f21a87e1b9ff1e7bf3e60577b9b8 | 1c58aef845d5dc1398249d784450c3825a1a75a5 | /Dynamic_Programming/speed_and_spikes.py | a9cbecd5027d6f47406253b683546dacd977f77a | [] | no_license | AmitKulkarni23/Leet_HackerRank | b1c1d7e5915397fd971d777baf75bb0f6fd27c78 | 047b167311d2fb93a53998a20d73533a4cae2ab8 | refs/heads/master | 2021-06-01T20:24:40.659530 | 2020-02-06T22:10:14 | 2020-02-06T22:10:14 | 123,007,444 | 0 | 0 | null | 2018-07-12T18:42:40 | 2018-02-26T17:58:28 | Python | UTF-8 | Python | false | false | 2,069 | py | # Speed and spikes problem
# Source -> http://blog.refdash.com/dynamic-programming-tutorial-example/
def canStopOnRunway(init_speed, runway, init_position):
"""
Function that returns true if a person on a jumping ball with init
speed can stop on a runway
type: runway - List
type: init_speed - int
type: init_position - int
"""
# Base Cases - All negative bases cases
if init_position >= len(runway) or init_position < 0 or
init_speed < 0 or not runway[init_position]:
return False
# Positive base case
if init_speed == 0:
return True
# Now try all speeds
for adjusted_speed in [init_speed-1, init_speed, init_speed+1]:
if canStopOnRunway(adjusted_speed, runway, init_position + adjusted_speed):
return True
return False
# Iterative solution
def canStopIterative(runway, initSpeed, startIndex = 0):
# maximum speed cannot be larger than length of the runway. We will talk about
# making this bound tighter later on.
maxSpeed = len(runway)
if (startIndex >= len(runway) or startIndex < 0 or initSpeed < 0 or initSpeed > maxSpeed or not runway[startIndex]):
return False
# {position i : set of speeds for which we can stop from position i}
memo = {}
# Base cases, we can stop when a position is not a spike and speed is zero.
for position in range(len(runway)):
if runway[position]:
memo[position] = set([0])
# Outer loop to go over positions from the last one to the first one
for position in reversed(range(len(runway))):
# Skip positions which contain spikes
if not runway[position]:
continue
# For each position, go over all possible speeds
for speed in range(1, maxSpeed + 1):
# Recurrence relation is the same as in the recursive version.
for adjustedSpeed in [speed, speed - 1, speed + 1]:
if (position + adjustedSpeed in memo and
adjustedSpeed in memo[position + adjustedSpeed]):
memo[position].add(speed)
break
return initSpeed in memo[startIndex]
| [
"[email protected]"
] | |
2df8b170c2e69db0ef937e0f736d0c7f99e10784 | 3d962145fe6f6b118e059af8728ec4d302593101 | /dbaas/dbaas/middleware.py | 2e93c087ba2aca960ed1b7a8b12a213aa456d2de | [
"BSD-3-Clause"
] | permissive | perry-contribs/database-as-a-service | f015008a29600ce128c15686c2ff00fa360600f9 | db8fbee3982bbc32abf32aa86f7387de01a243be | refs/heads/master | 2022-12-18T12:17:40.496898 | 2020-10-01T09:12:54 | 2020-10-01T09:12:54 | 300,216,997 | 0 | 0 | BSD-3-Clause | 2020-10-01T09:11:19 | 2020-10-01T09:11:18 | null | UTF-8 | Python | false | false | 1,390 | py | from datetime import datetime, timedelta
from threading import current_thread
from django.conf import settings
from django.contrib import auth
class AutoLogout:
def process_request(self, request):
if not request.user.is_authenticated():
return
if 'last_touch' in request.session:
max_inactive = timedelta(0, settings.AUTO_LOGOUT_DELAY * 60, 0)
current_inactive = datetime.now() - request.session['last_touch']
if current_inactive > max_inactive:
auth.logout(request)
return
request.session['last_touch'] = datetime.now()
class UserMiddleware(object):
_requests = {}
@classmethod
def current_user(cls):
current_request = cls._requests.get(current_thread().ident, None)
if not current_request:
return
return current_request.user
@classmethod
def set_current_user(cls, user):
current_request = cls._requests[current_thread().ident]
current_request.user = user
def process_request(self, request):
self._requests[current_thread().ident] = request
def process_response(self, request, response):
self._requests.pop(current_thread().ident, None)
return response
def process_exception(self, request, exception):
self._requests.pop(current_thread().ident, None)
| [
"[email protected]"
] | |
d2d0f0e9540ff72210ef46f197d0fd14ea054dae | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03546/s313305577.py | d170aacbf34a2d5379c9166b8773528b739fcec0 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,352 | py | # G[i][j]: 頂点v_iから頂点v_jへ到達するための辺コストの和
# クラスにしてしまったけど、これ遅いのでクラスじゃ無い方が良い
class Warshall_Floyd:
def __init__(self, V):
self.V = V
self.G = [[float('inf')] * self.V for i in range(self.V)]
def add_edge(self, fr, to, cost):
self.G[fr][to] = cost
def add_multi_edge(self, v1, v2, cost):
self.G[v1][v2] = cost
self.G[v2][v1] = cost
def solve(self):
for i in range(self.V):
self.G[i][i] = 0
for k in range(self.V):
for i in range(self.V):
for j in range(self.V):
if self.G[i][k] != float("inf") and self.G[k][j] != float('inf'):
self.G[i][j] = min(self.G[i][j], self.G[i][k] + self.G[k][j])
def if_negative(self):
for i in range(self.V):
if self.G[i][i] < 0:
return True
return False
def dist(self, fr, to):
return self.G[fr][to]
G = Warshall_Floyd(10)
H, W = map(int, input().split())
for i in range(10):
for j, c in enumerate(input().split()):
G.add_edge(i, j, int(c))
G.solve()
ans = 0
for i in range(H):
for j, a in enumerate(input().split()):
if a != "-1":
ans += G.dist(int(a), 1)
print(ans) | [
"[email protected]"
] | |
7325dcc029209e10b76f417754456c26de5f846e | 30099a17539959fd445a412c09cf5d5369971dc4 | /test/test.py | 2ed51d8a5db2198d4f3c88cd8b31e78cc63c9a7e | [] | no_license | Kmmanki/bit_seoul | 7e16d6d51c21c449dd5150a64da3233bc562dcb7 | 8d1e86a84e218c7d96d6c6f9b2b5cd69765ebb9c | refs/heads/main | 2023-04-08T02:16:10.372497 | 2021-04-15T01:35:09 | 2021-04-15T01:35:09 | 311,284,716 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,536 | py | import numpy as np
from tensorflow.keras.models import Sequential,Model
from tensorflow.keras.layers import Dense, LSTM, Concatenate, Input
from tensorflow.keras.callbacks import EarlyStopping
#1.data
x1 = np.array([[1,2,3],[2,3,4],[3,4,5],[4,5,6]
,[5,6,7],[6,7,8],[7,8,9,],[8,9,10]
,[9,10,11],[10,11,12]
,[20,30,40],[30,40,50],[40,50,60]
])
x2 = np.array([[10,20,30],[20,30,40],[30,40,50],[40,50,60]
,[50,60,70],[60,70,80],[70,80,90,],[80,90,100]
,[90,100,110],[100,110,120]
,[2,3,4],[3,4,5],[4,5,6]
])
x1_input = np.array([55,65,75])
x2_input = np.array([65,75,85])
y= np.array([4,5,6,7,8,9,10,11,12,13,50,60,70])
x1=x1.reshape(13,3,1)
x2=x2.reshape(13,3,1)
# #2. model
input1 = Input(shape=(3,1))
lstm_d = LSTM(5)(input1)
input1_d = LSTM(5)(lstm_d)
input2 = Input(shape=(3,1))
lstm_d2 = LSTM(5)(input2)
input2_d = LSTM(5)(lstm_d2)
concat_model = Concatenate()([input1_d, input2_d])
model_concat = Dense(1)(concat_model)
model = Model(inputs=[input1, input2], outputs=[model_concat])
model.summary()
#Compile
model.compile(loss= 'mse', metrics=['mse'], optimizer='adam')
earlyStopping = EarlyStopping(monitor='loss', patience=125, mode='min')
model.fit([x1, x2], y, batch_size=3, epochs=10000, verbose=1, callbacks=[earlyStopping])
# #predict
x1_input = x1_input.reshape(1,3)
x2_input = x2_input.reshape(1,3)
y_predict = model.predict([x1_input, x2_input])
print(y_predict)
# loss = model.evaluate(x_input, np.array([80]), batch_size=1)
# print(loss) | [
"[email protected]"
] | |
ff0bc8e563418f52c188920b6d7c290630091b55 | 4742f8b06c0e2d537cd76d0b05664be09f2a08fb | /Python Scripts/_Mono_Framework/MonoButtonElement.py | de9bbe6e71029607e51f7408b3c822e234ac1ebe | [] | no_license | robjac/m4m7 | bf6414bd0dd3ebfd7da72adecb761a85bfa79ce6 | 0dc6043594effd7375e7fb893321109f85288b46 | refs/heads/master | 2021-01-17T23:35:34.276099 | 2015-12-06T08:17:06 | 2015-12-06T08:17:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,989 | py | # by amounra 0413 : http://www.aumhaa.com
import Live
from _Framework.ButtonElement import ButtonElement, ON_VALUE, OFF_VALUE
from _Framework.Skin import Skin, SkinColorMissingError
from MonoBridgeElement import MonoBridgeProxy
from Debug import *
MIDI_NOTE_TYPE = 0
MIDI_CC_TYPE = 1
MIDI_PB_TYPE = 2
MIDI_MSG_TYPES = (MIDI_NOTE_TYPE,
MIDI_CC_TYPE,
MIDI_PB_TYPE)
MIDI_NOTE_ON_STATUS = 144
MIDI_NOTE_OFF_STATUS = 128
MIDI_CC_STATUS = 176
MIDI_PB_STATUS = 224
debug = initialize_debug()
class MonoButtonElement(ButtonElement):
__module__ = __name__
__doc__ = ' Special button class that can be configured with custom on- and off-values, some of which flash at specified intervals called by _Update_Display'
def __init__(self, is_momentary, msg_type, channel, identifier, name = 'Button', script = None, color_map = None, *a, **k):
super(MonoButtonElement, self).__init__(is_momentary, msg_type, channel, identifier, name = name, *a, **k)
self._script = script
self._color_map = color_map or [2, 64, 4, 8, 16, 127, 32]
self._num_colors = 7
self._num_flash_states = 18
self._flash_state = 0
self._color = 0
self._on_value = 127
self._off_value = 0
self._darkened = 0
self._is_enabled = True
self._force_forwarding = False
self._is_notifying = False
self._force_next_value = False
self._parameter = None
self._report_input = True
def set_color_map(self, color_map):
assert isinstance(color_map, tuple)
assert len(color_map) > 0
self._num_colors = len(color_map)
self._num_flash_states = int(127/len(color_map))
self._color_map = color_map
def set_on_off_values(self, on_value, off_value):
self._last_sent_message = None
self._on_value = on_value
self._off_value = off_value
def set_on_value(self, value):
self._last_sent_message = None
self._on_value = value
def set_off_value(self, value):
self._last_sent_message = None
self._off_value = value
def set_darkened_value(self, value = 0):
#debug('setting darkened:', value)
if value:
value = self._color_map[value-1]
self._darkened = value
def set_force_next_value(self):
self._last_sent_message = None
self._force_next_value = True
def set_enabled(self, enabled):
self._is_enabled = enabled
self._request_rebuild()
def turn_on(self, force = False):
self.force_next_send()
if self._on_value in range(0, 128):
self.send_value(self._on_value)
else:
try:
color = self._skin[self._on_value]
color.draw(self)
except SkinColorMissingError:
#super(MonoButtonElement, self).turn_on()
debug('skin color missing', self._on_value)
self.send_value(127)
def turn_off(self, force = False):
self.force_next_send()
#debug('turn off:', self._off_value)
if self._off_value in range(0, 128):
self.send_value(self._off_value)
else:
try:
color = self._skin[self._off_value]
color.draw(self)
except SkinColorMissingError:
#super(MonoButtonElement, self).turn_off()
debug('skin color missing', self._off_value)
self.send_value(0)
def reset(self, force = False):
self._darkened = 0;
self.force_next_send()
self.send_value(0)
def set_light(self, value, *a, **k):
try:
self._skin[value]
except SkinColorMissingError:
#debug('skin missing for', value)
pass
#debug('skin value:', value)
super(MonoButtonElement, self).set_light(value, *a, **k)
def send_value(self, value, force = False):
if (value != None) and isinstance(value, int) and (value in range(128)):
if (force or self._force_next_send or ((value != self._last_sent_value) and self._is_being_forwarded)):
data_byte1 = self._original_identifier
if value in range(1, 127):
data_byte2 = self._color_map[(value - 1) % (self._num_colors)]
elif value == 127:
data_byte2 = self._color_map[self._num_colors-1]
else:
data_byte2 = self._darkened
self._color = data_byte2
status_byte = self._original_channel
if (self._msg_type == MIDI_NOTE_TYPE):
status_byte += MIDI_NOTE_ON_STATUS
elif (self._msg_type == MIDI_CC_TYPE):
status_byte += MIDI_CC_STATUS
else:
assert False
self.send_midi(tuple([status_byte,
data_byte1,
data_byte2]))
self._last_sent_message = [value]
if self._report_output:
is_input = True
self._report_value(value, (not is_input))
self._flash_state = round((value -1)/self._num_colors)
self._force_next_value = False
else:
debug('Button bad send value:', value)
def script_wants_forwarding(self):
if not self._is_enabled and not self._force_forwarding:
return False
else:
return super(MonoButtonElement, self).script_wants_forwarding()
def flash(self, timer):
if (self._is_being_forwarded and self._flash_state in range(1, self._num_flash_states) and (timer % self._flash_state) == 0):
data_byte1 = self._original_identifier
data_byte2 = self._color if int((timer % (self._flash_state * 2)) > 0) else self._darkened
status_byte = self._original_channel
if (self._msg_type == MIDI_NOTE_TYPE):
status_byte += MIDI_NOTE_ON_STATUS
elif (self._msg_type == MIDI_CC_TYPE):
status_byte += MIDI_CC_STATUS
else:
assert False
self.send_midi((status_byte,
data_byte1,
data_byte2))
def release_parameter(self):
self._darkened = 0
super(MonoButtonElement, self).release_parameter()
class DescriptiveMonoButtonElement(MonoButtonElement):
def __init__(self, *a, **k):
super(DescriptiveMonoButtonElement, self).__init__(*a, **k)
self._descriptor = None
self._last_reported_descriptor = None
monobridge = k['monobridge'] if 'monobridge' in k else None
if not monobridge is None:
self._monobridge = monobridge
elif hasattr(self._script, 'notification_to_bridge'):
self._monobridge = self._script
else:
self._monobridge = MonoBridgeProxy()
def set_descriptor(self, descriptor):
self._descriptor = '.' + str(descriptor) if descriptor else ''
def _set_descriptor(self, descriptor):
#debug('_set_descriptor:', descriptor)
self.set_descriptor(descriptor)
def _get_descriptor(self):
#debug('_get_descriptor:', '' if self._descriptor is None else str(self._descriptor))
return '' if self._descriptor is None else str(self._descriptor)
descriptor = property(_get_descriptor, _set_descriptor)
def report_descriptor(self, descriptor = None, force = False):
if force or (descriptor != self._last_reported_descriptor):
self._monobridge._send(self.name, 'button_function', str(descriptor) + self.descriptor)
self._last_reported_descriptor = descriptor
def set_light(self, value, *a, **k):
try:
self._skin[value]
except SkinColorMissingError:
pass
super(MonoButtonElement, self).set_light(value, *a, **k)
self.report_descriptor(value)
def turn_on(self, force = False):
self.force_next_send()
if self._on_value in range(0, 128):
self.send_value(self._on_value)
self.report_descriptor('on')
else:
try:
color = self._skin[self._on_value]
color.draw(self)
except SkinColorMissingError:
#super(MonoButtonElement, self).turn_on()
debug('skin color missing', self._on_value)
self.send_value(127)
self.report_descriptor(self._on_value)
def turn_off(self, force = False):
self.force_next_send()
#debug('turn off:', self._off_value)
if self._off_value in range(0, 128):
self.send_value(self._off_value)
self.report_descriptor('off')
else:
try:
color = self._skin[self._off_value]
color.draw(self)
except SkinColorMissingError:
#super(MonoButtonElement, self).turn_off()
debug('skin color missing', self._off_value)
self.send_value(0)
self.report_descriptor(self._off_value)
| [
"[email protected]"
] | |
0f8e38c0a2f1ee16f600bf711b04253540799ebb | c6292c1dd68f0c4dd3389628de0d2b786fa0ee64 | /0x11-python-network_1/5-hbtn_header.py | b8d01504078e77816dad1f5eb961ca606bc0bf7a | [] | no_license | mj31508/holbertonschool-higher_level_programming2 | 835be695b568cd189c1448c54218a0201830005f | 3fa47001c041cd0c74f88c3a19677e126bee37b4 | refs/heads/master | 2021-07-06T22:31:05.040354 | 2017-09-29T05:28:45 | 2017-09-29T05:28:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 279 | py | #!/usr/bin/python3
# Taking in a URL and sends a request
import requests
import sys
if __name__ == "__main__":
url_arg = sys.argv[1]
req_arg = requests.get(url_arg)
try:
store = req_arg.headers["X-Request-Id"]
print(store)
except:
pass
| [
"[email protected]"
] | |
0a086ba5ffae00e39e59426d3cc287e047cf73a7 | 4d71eb6075ac388a67370fddc1ae0579575b304f | /examples/ssd/train_multi.py | 8fdd055743af395d22b217e7d335ca7d5e61771e | [
"MIT"
] | permissive | apple2373/chainercv | 1b4a6ad80648e848c7108e53fbb3e3fd44aa5c3e | 1442eac6a316c31eab029c156b6d6e151553be2a | refs/heads/master | 2020-03-18T20:53:21.473474 | 2018-05-28T09:37:14 | 2018-05-28T09:37:14 | 135,245,197 | 1 | 0 | MIT | 2018-05-29T05:27:15 | 2018-05-29T05:27:15 | null | UTF-8 | Python | false | false | 5,279 | py | import argparse
import multiprocessing
import numpy as np
import chainer
from chainer.optimizer_hooks import WeightDecay
from chainer import serializers
from chainer import training
from chainer.training import extensions
from chainer.training import triggers
import chainermn
from chainercv.chainer_experimental.datasets.sliceable \
import ConcatenatedDataset
from chainercv.chainer_experimental.datasets.sliceable import TransformDataset
from chainercv.datasets import voc_bbox_label_names
from chainercv.datasets import VOCBboxDataset
from chainercv.extensions import DetectionVOCEvaluator
from chainercv.links.model.ssd import GradientScaling
from chainercv.links.model.ssd import multibox_loss
from chainercv.links import SSD300
from chainercv.links import SSD512
from train import Transform
class MultiboxTrainChain(chainer.Chain):
def __init__(self, model, alpha=1, k=3, comm=None):
super(MultiboxTrainChain, self).__init__()
with self.init_scope():
self.model = model
self.alpha = alpha
self.k = k
self.comm = comm
def __call__(self, imgs, gt_mb_locs, gt_mb_labels):
mb_locs, mb_confs = self.model(imgs)
loc_loss, conf_loss = multibox_loss(
mb_locs, mb_confs, gt_mb_locs, gt_mb_labels, self.k, self.comm)
loss = loc_loss * self.alpha + conf_loss
chainer.reporter.report(
{'loss': loss, 'loss/loc': loc_loss, 'loss/conf': conf_loss},
self)
return loss
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--model', choices=('ssd300', 'ssd512'), default='ssd300')
parser.add_argument('--batchsize', type=int, default=32)
parser.add_argument('--test-batchsize', type=int, default=16)
parser.add_argument('--out', default='result')
parser.add_argument('--resume')
args = parser.parse_args()
comm = chainermn.create_communicator()
device = comm.intra_rank
if args.model == 'ssd300':
model = SSD300(
n_fg_class=len(voc_bbox_label_names),
pretrained_model='imagenet')
elif args.model == 'ssd512':
model = SSD512(
n_fg_class=len(voc_bbox_label_names),
pretrained_model='imagenet')
model.use_preset('evaluate')
train_chain = MultiboxTrainChain(model)
chainer.cuda.get_device_from_id(device).use()
model.to_gpu()
train = TransformDataset(
ConcatenatedDataset(
VOCBboxDataset(year='2007', split='trainval'),
VOCBboxDataset(year='2012', split='trainval')
),
('img', 'mb_loc', 'mb_label'),
Transform(model.coder, model.insize, model.mean))
if comm.rank == 0:
indices = np.arange(len(train))
else:
indices = None
indices = chainermn.scatter_dataset(indices, comm, shuffle=True)
train = train.slice[indices]
# http://chainermn.readthedocs.io/en/latest/tutorial/tips_faqs.html#using-multiprocessiterator
if hasattr(multiprocessing, 'set_start_method'):
multiprocessing.set_start_method('forkserver')
train_iter = chainer.iterators.MultiprocessIterator(
train, args.batchsize // comm.size, n_processes=2)
if comm.rank == 0:
test = VOCBboxDataset(
year='2007', split='test',
use_difficult=True, return_difficult=True)
test_iter = chainer.iterators.SerialIterator(
test, args.test_batchsize, repeat=False, shuffle=False)
# initial lr is set to 1e-3 by ExponentialShift
optimizer = chainermn.create_multi_node_optimizer(
chainer.optimizers.MomentumSGD(), comm)
optimizer.setup(train_chain)
for param in train_chain.params():
if param.name == 'b':
param.update_rule.add_hook(GradientScaling(2))
else:
param.update_rule.add_hook(WeightDecay(0.0005))
updater = training.updaters.StandardUpdater(
train_iter, optimizer, device=device)
trainer = training.Trainer(updater, (120000, 'iteration'), args.out)
trainer.extend(
extensions.ExponentialShift('lr', 0.1, init=1e-3),
trigger=triggers.ManualScheduleTrigger([80000, 100000], 'iteration'))
if comm.rank == 0:
trainer.extend(
DetectionVOCEvaluator(
test_iter, model, use_07_metric=True,
label_names=voc_bbox_label_names),
trigger=(10000, 'iteration'))
log_interval = 10, 'iteration'
trainer.extend(extensions.LogReport(trigger=log_interval))
trainer.extend(extensions.observe_lr(), trigger=log_interval)
trainer.extend(extensions.PrintReport(
['epoch', 'iteration', 'lr',
'main/loss', 'main/loss/loc', 'main/loss/conf',
'validation/main/map']),
trigger=log_interval)
trainer.extend(extensions.ProgressBar(update_interval=10))
trainer.extend(extensions.snapshot(), trigger=(10000, 'iteration'))
trainer.extend(
extensions.snapshot_object(
model, 'model_iter_{.updater.iteration}'),
trigger=(120000, 'iteration'))
if args.resume:
serializers.load_npz(args.resume, trainer)
trainer.run()
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
0f1b7cefd176b3539ee02af4cd1b8710084794b8 | 4e7797d520a68ded9bf77be555c13aff643389f1 | /tests/test_helper.py | e448b376e57a6f296b955df3b01a3e0df4fcf3f7 | [] | no_license | JBorrow/seagen | 654cf06b3191fbd00faaa313afb2822b7ea396f2 | fb1dbc0736593cc2cd6364812e1fcc0fabe3eb32 | refs/heads/master | 2021-04-27T00:20:41.761132 | 2018-03-06T15:23:18 | 2018-03-06T15:23:18 | 123,797,126 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 743 | py | """
Tests the helper functions
"""
import numpy as np
import matplotlib.pyplot as plt
import mpl_toolkits.mplot3d.axes3d as axes3d
from seagen.helper import polar_to_cartesian
def test_polar_to_cartesian():
"""
Tests the polar to cartesian conversion by ensuring
that r is conserved.
"""
phi = np.random.rand(1000) * np.pi
theta = np.random.rand(1000) * np.pi * 2
r = np.array([1])
x, y, z = polar_to_cartesian(r, theta, phi)
# Here we check if r is conserved!
new_r = np.sqrt(x*x + y*y + z*z)
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1, projection="3d")
plot = ax.scatter(x, y, z)
plt.savefig("test_polar_to_cartesian.png")
assert np.isclose(r, new_r, 1e-9).all()
| [
"[email protected]"
] | |
c594f78b733e395a7d369bd262c7cea585fbe150 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03705/s081733428.py | 98e064ca6f0ef5b9a31b74e618de42bb0a8fbdba | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | N, A, B = map(int,input().split())
min = A * (N - 1) + B
max = B * (N - 1) + A
if (max - min) + 1 < 0:
print("0")
else:
print((max - min) + 1) | [
"[email protected]"
] | |
7c0e2ffee816a35c2d057419a714c80b1ebecc75 | f06cf7cf7fe12a50f45edbd56b20acfc28606a44 | /Linked List/intersection_point_in_list.py | 83f9cde211bce58dc409bc7e8d8650e0d63b775f | [] | no_license | prashant97sikarwar/data-structure-and-algorithms | 53e016e50cf2ba26d2504f5a5f1ba39ca35f13f4 | 3ebe367c96a7bd82a427fc2beb7c8edd37247de7 | refs/heads/master | 2023-01-18T16:15:44.935431 | 2020-11-22T14:59:54 | 2020-11-22T14:59:54 | 257,667,068 | 0 | 0 | null | 2020-10-02T09:59:27 | 2020-04-21T17:32:43 | Python | UTF-8 | Python | false | false | 456 | py | class node:
def __init__(self,val):
self.next = None
self.data = val
def intersetPoint(head_a,head_b):
curr = head_a
purr = head_b
while curr is not None:
curr.data = curr.data - 1001
curr = curr.next
ans = -1
while purr is not None:
if purr.data >= 0:
purr = purr.next
else:
purr.data = purr.data + 1001
ans = purr
break
return ans | [
"[email protected]"
] | |
a4d69b5d3d6747f313209a1534539033e0d26edf | 268568ff2d483f39de78a5b29d941ce499cace33 | /spyder/plugins/editor/__init__.py | 837acce64e0e7f40a42373fee3222e97d7416db5 | [
"MIT"
] | permissive | MarkMoretto/spyder-master | 61e7f8007144562978da9c6adecaa3022758c56f | 5f8c64edc0bbd203a97607950b53a9fcec9d2f0b | refs/heads/master | 2023-01-10T16:34:37.825886 | 2020-08-07T19:07:56 | 2020-08-07T19:07:56 | 285,901,914 | 2 | 1 | MIT | 2022-12-20T13:46:41 | 2020-08-07T19:03:37 | Python | UTF-8 | Python | false | false | 394 | py | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2009- Spyder Project Contributors
#
# Distributed under the terms of the MIT License
# (see spyder/__init__.py for details)
# -----------------------------------------------------------------------------
"""
spyder.plugins.editor
=====================
Editor Plugin.
"""
| [
"[email protected]"
] | |
7b74ecf9cdd1b909e24e5e576f8de6229d90b73b | 74cab51fa52be54dfd720ba130f0c0435d99c8cd | /xray/scratch/00-VICTRE_pipeline/01-pipeline_codes/x-ray_runs/run_xray_jobs.py | 9d528cc33e7ea4057dd677b530e765a726b82271 | [] | no_license | shenghh2015/fda_breast_phantom | a57f82fc280783b1c3ca48308565beeff87e6530 | dfb9d8613ed5cf0e5812a674027a5d200ffc91bd | refs/heads/main | 2023-06-04T10:51:09.736674 | 2021-06-17T19:21:27 | 2021-06-17T19:21:27 | 377,733,048 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 643 | py | import os
import argparse
#os.chdir('/scratch/00-VICTRE_pipeline/00-base_input_files/x-ray_runs/')
parser = argparse.ArgumentParser()
parser.add_argument("--num_jobs",type=int)
parser.add_argument("--gpu_num",type=int)
args = parser.parse_args()
path = os.getcwd()
print('The current path:{}'.format(path))
os.chdir('/scratch/00-VICTRE_pipeline/01-pipeline_codes/x-ray_runs')
cmd = 'python2 run_xray_alone.py {}'.format(args.gpu_num)
#cmd = 'python2 /scratch/00-VICTRE_pipeline/01-pipeline_codes/x-ray_runs/run_xray_SP.py {}'.format(args.gpu_num)
#nb_iter = 10000
nb_iter = args.num_jobs
for i in range(nb_iter):
os.system(cmd)
| [
"[email protected]"
] | |
57e3696503c3443d6e57294c7da7aefd27b261f9 | 244ecfc2017a48c70b74556be8c188e7a4815848 | /res/scripts/client/gui/shared/utils/requesters/requestscontroller.py | be2c8caecc9dbab9b1fc757b83af66b425b517d0 | [] | no_license | webiumsk/WOT-0.9.12 | c1e1259411ba1e6c7b02cd6408b731419d3174e5 | 5be5fd9186f335e7bae88c9761c378ff5fbf5351 | refs/heads/master | 2021-01-10T01:38:36.523788 | 2015-11-18T11:33:37 | 2015-11-18T11:33:37 | 46,414,438 | 1 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 5,567 | py | # 2015.11.18 11:57:02 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/gui/shared/utils/requesters/RequestsController.py
from functools import partial
import BigWorld
from debug_utils import LOG_ERROR, LOG_DEBUG
from shared_utils import safeCancelCallback
from gui.shared.rq_cooldown import RequestCooldownManager, REQUEST_SCOPE
class _NoCooldownsManager(RequestCooldownManager):
def __init__(self):
super(_NoCooldownsManager, self).__init__(REQUEST_SCOPE.GLOBAL)
def lookupName(self, rqTypeID):
return str(rqTypeID)
def getDefaultCoolDown(self):
return 0.0
class RequestsController(object):
def __init__(self, requester, cooldowns = _NoCooldownsManager()):
self._requester = requester
self._cooldowns = cooldowns
self._waiters = {}
self._rqQueue = []
self._rqCallbackID = None
self._rqCtx = None
self._rqHandler = None
return
def fini(self):
self.stopProcessing()
if self._requester:
self._requester.fini()
self._requester = None
return
def stopProcessing(self):
self._rqQueue = []
self._clearWaiters()
self._clearDelayedRequest()
if self._requester is not None:
self._requester.stopProcessing()
return
def request(self, ctx, callback = lambda *args: None, allowDelay = None):
LOG_DEBUG('Send server request', self.__class__.__name__, ctx, callback, allowDelay)
if allowDelay is None:
allowDelay = bool(self._cooldowns._commonCooldown)
requestType = ctx.getRequestType()
handler = self._getHandlerByRequestType(requestType)
if handler:
cooldown = ctx.getCooldown()
def _doRequest():
self._clearDelayedRequest()
cb = partial(self._callbackWrapper, requestType, callback, cooldown)
if handler(ctx, callback=cb):
self._waiters[requestType] = BigWorld.callback(self._getRequestTimeOut(), partial(self._onTimeout, cb, requestType, ctx))
self._cooldowns.process(requestType, cooldown)
if not allowDelay:
if self._cooldowns.validate(requestType, cooldown):
self._doRequestError(ctx, 'cooldown', callback)
else:
_doRequest()
else:
self._rqQueue.append((requestType, ctx, _doRequest))
self._doNextRequest()
else:
self._doRequestError(ctx, 'handler not found', callback)
return
def isInCooldown(self, requestTypeID):
return self._cooldowns.isInProcess(requestTypeID)
def getCooldownTime(self, requestTypeID):
return self._cooldowns.getTime(requestTypeID)
def isProcessing(self, requestTypeID):
return requestTypeID in self._waiters
def hasHandler(self, requestTypeID):
return self._getHandlerByRequestType(requestTypeID) is not None
def _doNextRequest(self, adjustCooldown = None):
if len(self._rqQueue) and self._rqCallbackID is None:
requestType, ctx, request = self._rqQueue.pop(0)
cooldownLeft = self._cooldowns.getTime(requestType)
if cooldownLeft:
self._loadDelayedRequest(cooldownLeft, ctx, request)
else:
request()
elif adjustCooldown and self._rqCallbackID is not None:
self._loadDelayedRequest(adjustCooldown, self._rqCtx, self._rqHandler)
return
def _getHandlerByRequestType(self, requestTypeID):
raise NotImplementedError
def _getRequestTimeOut(self):
return 30.0
def _callbackWrapper(self, requestType, callback, cooldown, *args):
callbackID = self._waiters.pop(requestType, None)
if callbackID is not None:
safeCancelCallback(callbackID)
self._cooldowns.adjust(requestType, cooldown)
if callback:
callback(*args)
self._doNextRequest(adjustCooldown=cooldown)
return
def _clearWaiters(self):
if self._waiters is not None:
while len(self._waiters):
_, callbackID = self._waiters.popitem()
safeCancelCallback(callbackID)
return
def _onTimeout(self, cb, requestType, ctx):
LOG_ERROR('Request timed out', self, requestType, ctx)
self._doRequestError(ctx, 'time out', cb)
def _doRequestError(self, ctx, msg, callback = None):
if self._requester:
self._requester._stopProcessing(ctx, msg, callback)
LOG_ERROR(msg, ctx)
return False
def _loadDelayedRequest(self, seconds, ctx, request):
self._clearDelayedRequest()
self._rqCtx = ctx
self._rqHandler = request
self._rqCtx.startProcessing()
self._rqCallbackID = BigWorld.callback(seconds, request)
def _clearDelayedRequest(self):
if self._rqCallbackID is not None:
safeCancelCallback(self._rqCallbackID)
self._rqCallbackID = None
if self._rqCtx is not None:
self._rqCtx.stopProcessing()
self._rqCtx = None
if self._rqHandler is not None:
self._rqHandler = None
return
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\gui\shared\utils\requesters\requestscontroller.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2015.11.18 11:57:03 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
95ce44a1ea35c2f7d70624237f2a1f58ee321bed | 05d84ca54c3866a546dc9eb03bf904552845b114 | /main/migrations/0005_auto_20181110_0140.py | 30b6a19f4a470da99c33289b5c2a8bf8c9fb0dfe | [] | no_license | chriscauley/ur-conf | 43ba6793995b4a17308844c6ccaeb7e92f50eb1e | 17819b750117ce3cb8808d4a6c8702b5234c8dbb | refs/heads/master | 2022-12-13T08:38:05.026612 | 2020-05-11T13:10:53 | 2020-05-11T13:10:53 | 155,024,014 | 0 | 0 | null | 2022-12-09T20:29:49 | 2018-10-28T01:13:40 | JavaScript | UTF-8 | Python | false | false | 368 | py | # Generated by Django 2.1.2 on 2018-11-10 01:40
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0004_auto_20181109_1624'),
]
operations = [
migrations.RenameField(
model_name='achievement',
old_name='description',
new_name='text',
),
]
| [
"[email protected]"
] | |
cf90adffa041ea62709451a2f6cf519949fce841 | b677894966f2ae2d0585a31f163a362e41a3eae0 | /ns3/ns-3.26/src/olsr/examples/wscript | 35a0d90e4cb07c3aaf83654d02db5ca1a0ebcde7 | [
"LicenseRef-scancode-free-unknown",
"GPL-2.0-only",
"Apache-2.0"
] | permissive | cyliustack/clusim | 667a9eef2e1ea8dad1511fd405f3191d150a04a8 | cbedcf671ba19fded26e4776c0e068f81f068dfd | refs/heads/master | 2022-10-06T20:14:43.052930 | 2022-10-01T19:42:19 | 2022-10-01T19:42:19 | 99,692,344 | 7 | 3 | Apache-2.0 | 2018-07-04T10:09:24 | 2017-08-08T12:51:33 | Python | UTF-8 | Python | false | false | 466 | ## -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
def build(bld):
obj = bld.create_ns3_program('simple-point-to-point-olsr',
['point-to-point', 'internet', 'olsr', 'applications', 'wifi'])
obj.source = 'simple-point-to-point-olsr.cc'
obj = bld.create_ns3_program('olsr-hna',
['core', 'mobility', 'wifi', 'csma', 'olsr'])
obj.source = 'olsr-hna.cc'
| [
"[email protected]"
] | ||
8a90f1562daaf6f29ced5e9afb0f9c2d898e3578 | 7a5a3eb831825fb0c0e80957278d95332e5f2258 | /core/RIG/Control/mirrorCtlShp/mirrorCtlShp.py | cf45e216ce822ca1791c86f01caf11de51c3ea90 | [] | no_license | lefan2016/MPToolkit | e76cee2be10558dd97f228789d0e05bca4deebfc | 5b3d1cf7a83b9eeda57919722e353ada7a4f7f6b | refs/heads/master | 2020-04-23T11:05:02.816596 | 2015-12-17T06:26:32 | 2015-12-17T07:08:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,722 | py | #========================================
# author: changlong.zang
# mail: [email protected]
# date: Tue, 05 May 2015 11:44:00
#========================================
import os.path, re, pymel.core
import maya.cmds as mc
from mpUtils import scriptTool, uiTool, mayaTool
#--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
windowClass, baseClass = uiTool.loadUi(os.path.join(scriptTool.getScriptPath(), 'mirrorCtlShp.ui'))
class MirrorControlShp(windowClass, baseClass):
def __init__(self, parent=uiTool.getMayaWindow()):
if uiTool.windowExists('mirrorControlShapeUI'):return
super(MirrorControlShp, self).__init__(parent)
self.setupUi(self)
self.show()
def on_btn_mirror_clicked(self, click=None):
if click == None:return
controlType = str(self.fld_controlType.text())
flipAxis = 'X'
if self.rdn_filpX.isChecked():
flipAxis = 'X'
elif self.rdn_filpY.isChecked():
flipAxis = 'Y'
else:
flipAxis = 'Z'
if self.rdn_lefttoright.isChecked():
mirrorControlShape(controlType, 'L', 'R', flipAxis)
else:
mirrorControlShape(controlType, 'R', 'L', flipAxis)
@mayaTool.undo_decorator
def mirrorControlShape(typ, source, targent, flipAxis):
if len(typ) == 0:return
if source not in 'LR':return
if source == targent:return
#- get source side controls
all_controls = ' '.join(mc.listRelatives(mc.ls(type='nurbsCurve'), p=True, path=True))
matched_controls = re.findall('\S*%s_\w+_%s_\d+'%(source, typ), all_controls)
for ctl in matched_controls:
#- get targent control
targentControl = re.sub('%s_'%source, '%s_'%targent, ctl)
if not mc.objExists(targentControl):continue
#- duplicate shape
tempx = mc.duplicate(ctl, po=True)
mc.parent(mc.listRelatives(ctl, s=True, path=True), tempx, s=True, add=True)
#- make Temp
Temp = pymel.core.PyNode(mc.duplicate(tempx, rc=True)[0])
for a in 'trs':
for b in 'xyz':
attr = a + b
mc.setAttr('%s.%s'%(Temp, attr), l=False, k=True, cb=False)
#- close max min value controler
mc.transformLimits(Temp.name(), etx=(0, 0),ety=(0, 0),etz=(0, 0),erx=(0, 0),ery=(0, 0),erz=(0, 0))
mc.parent(Temp.name(), w=True)
#- filp
grp = mc.createNode('transform')
sourcePosi = mc.xform(ctl, q=True, ws=True, rp=True)
targenPosi = mc.xform(targentControl, q=True, ws=True, rp=True)
midPoint = [(sourcePosi[0] + targenPosi[0]) / 2,
(sourcePosi[0] + targenPosi[0]) / 2,
(sourcePosi[0] + targenPosi[0]) / 2]
mc.move(midPoint[0], midPoint[1], midPoint[2], grp, a=True)
mc.parent(Temp.name(), grp)
mc.setAttr('%s.s%s'%(grp, flipAxis.lower()), -1)
#- freeze transformations
mc.parent(Temp.name(), targentControl)
mc.makeIdentity(Temp.name(), apply=True, t=True, r=True, s=True)
#- get original shapes
originalShapes = mc.listRelatives(targentControl, s=True, path=True, type='nurbsCurve')
#- parent new shapes
shapes = mc.listRelatives(Temp.name(), s=True, path=True, type='nurbsCurve')
for shp in shapes:
mc.setAttr('%s.ovc'%shp, mc.getAttr('%s.ovc'%originalShapes[0]))
mc.delete(originalShapes)
mc.parent(shapes, targentControl, s=True, r=True)
for shp in shapes:
mc.rename(shp, '%sShape'%targentControl)
#- delete temp
mc.delete(tempx, Temp.name(), grp)
| [
"[email protected]"
] | |
7e785828ecc952b6eabfc9890f7df3d4a90a054d | b71e91d4eb55b6826dbe378180aa7b2b8a717bdf | /Capitulo10/exercicio109_v2.py | b5df9d68ab21d3a0935e8bfaac4dc18ddb8237c3 | [] | no_license | gustavopierre/think_python | 49a9ceb50f760b41f6fbac54a07f6b394aa8d637 | a3ad6e660db4e6ce2aa105f5084e585f95936867 | refs/heads/main | 2023-03-24T23:48:29.415573 | 2021-03-15T22:15:30 | 2021-03-15T22:15:30 | 348,137,048 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 125 | py | t = []
fin = open('./Capitulo9/words.txt')
for line in fin:
word = line.strip()
t = t + [word]
fin.close()
print(t)
| [
"[email protected]"
] | |
35a1716ba124832d9905c592d9e96d33a27971ee | eb9c3dac0dca0ecd184df14b1fda62e61cc8c7d7 | /google/cloud/dialogflow/v2/dialogflow-v2-py/google/cloud/dialogflow_v2/services/intents/pagers.py | f5030b36777fe129d348bab6f6e1d7c6da9472c0 | [
"Apache-2.0"
] | permissive | Tryweirder/googleapis-gen | 2e5daf46574c3af3d448f1177eaebe809100c346 | 45d8e9377379f9d1d4e166e80415a8c1737f284d | refs/heads/master | 2023-04-05T06:30:04.726589 | 2021-04-13T23:35:20 | 2021-04-13T23:35:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,626 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional
from google.cloud.dialogflow_v2.types import intent
class ListIntentsPager:
"""A pager for iterating through ``list_intents`` requests.
This class thinly wraps an initial
:class:`google.cloud.dialogflow_v2.types.ListIntentsResponse` object, and
provides an ``__iter__`` method to iterate through its
``intents`` field.
If there are more pages, the ``__iter__`` method will make additional
``ListIntents`` requests and continue to iterate
through the ``intents`` field on the
corresponding responses.
All the usual :class:`google.cloud.dialogflow_v2.types.ListIntentsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., intent.ListIntentsResponse],
request: intent.ListIntentsRequest,
response: intent.ListIntentsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.dialogflow_v2.types.ListIntentsRequest):
The initial request object.
response (google.cloud.dialogflow_v2.types.ListIntentsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = intent.ListIntentsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(self) -> Iterable[intent.ListIntentsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[intent.Intent]:
for page in self.pages:
yield from page.intents
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
class ListIntentsAsyncPager:
"""A pager for iterating through ``list_intents`` requests.
This class thinly wraps an initial
:class:`google.cloud.dialogflow_v2.types.ListIntentsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``intents`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListIntents`` requests and continue to iterate
through the ``intents`` field on the
corresponding responses.
All the usual :class:`google.cloud.dialogflow_v2.types.ListIntentsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(self,
method: Callable[..., Awaitable[intent.ListIntentsResponse]],
request: intent.ListIntentsRequest,
response: intent.ListIntentsResponse,
*,
metadata: Sequence[Tuple[str, str]] = ()):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
request (google.cloud.dialogflow_v2.types.ListIntentsRequest):
The initial request object.
response (google.cloud.dialogflow_v2.types.ListIntentsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
self._method = method
self._request = intent.ListIntentsRequest(request)
self._response = response
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
async def pages(self) -> AsyncIterable[intent.ListIntentsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
self._response = await self._method(self._request, metadata=self._metadata)
yield self._response
def __aiter__(self) -> AsyncIterable[intent.Intent]:
async def async_generator():
async for page in self.pages:
for response in page.intents:
yield response
return async_generator()
def __repr__(self) -> str:
return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
6d4f64ec2045276b56dba14ee6b4ad2b1dff62c4 | dab891f601d33dbab869672eb035f6bb1c79c8d8 | /dirs/crons.py | 683aabcdf865b6fbc0b22a2872b37ecf2551f851 | [] | no_license | codeforcauseorg-archive/Py-Boot-2021 | c8835ed4af2b2cdd1f80d5dff9a12676ef613ea6 | 6734e7f9b0a8e3c1eb46de11d095fae1c4c49ec7 | refs/heads/main | 2023-07-17T23:14:47.204035 | 2021-08-24T06:26:02 | 2021-08-24T06:26:02 | 392,699,382 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 146 | py | from crontab import CronTab
cron = CronTab(user='ganga')
job = cron.new(command='python file-dispenser.py')
job.minute.every(1)
cron.write()
| [
"[email protected]"
] | |
9ad6be4c78e74806f22c40d8736604e560f03069 | 59f635ca237f829c9f420ea7e97e150845e7c131 | /src/fuzzyLogicRule/variable/variable.py | 36d970ec8f610c82ef214c5a451c40efefa22bff | [] | no_license | Lukeeeeee/FuzzyInferenceWithDRL | 38256d4d5f01b552779765a5016ef444977114a4 | 5dbe4ded544d779a81d72917f2cef03c67b0c4d6 | refs/heads/master | 2021-03-24T02:48:17.175094 | 2017-09-12T01:19:12 | 2017-09-12T01:19:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 737 | py |
class Variable(object):
def __init__(self, name, mf, range):
self.name = name
self.mf = mf
self.linguistic_label = []
for mf_i in mf:
self.linguistic_label.append(mf_i.name)
self.upper_range = range[1]
self.lower_range = range[0]
self._value = {}
self._degree = {}
for linguistic_i in self.linguistic_label:
self.degree[linguistic_i] = 0.0
@property
def value(self):
return self._value
@value.setter
def value(self, new_val):
self._value = new_val
@property
def degree(self):
return self._degree
@degree.setter
def degree(self, new_degree):
self._degree = new_degree
| [
"[email protected]"
] | |
c7c889636b85cb12e2266052560382d35d7e43d5 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/6/odu.py | ffb5ddfec684b20676e433f2e985814acd853b04 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'oDU':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
eb15b4125424c6865b42ca535067bf7e590e2d1e | c697122555fa82d993f37c89f17ba98dd5fe502f | /src/pyams_table/tests/__init__.py | 7432d6657437b20da080ab490faf7c03d172abd7 | [
"ZPL-2.1"
] | permissive | Py-AMS/pyams-table | d9cdc9019a9aebc3e685eef243214f0ebc65301c | 5b94a4cb2fa8874372f5ea40a202940881ba86b9 | refs/heads/master | 2021-07-11T11:31:36.772976 | 2021-03-09T12:49:10 | 2021-03-09T12:49:10 | 234,533,290 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 800 | py | #
# Copyright (c) 2015-2019 Thierry Florac <tflorac AT ulthar.net>
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
"""
Generic test cases for pyams_table doctests
"""
__docformat__ = 'restructuredtext'
import os
import sys
def get_package_dir(value):
"""Get package directory"""
package_dir = os.path.split(value)[0]
if package_dir not in sys.path:
sys.path.append(package_dir)
return package_dir
| [
"[email protected]"
] | |
73a8edbc10cf9a95bdf7fe0998a45479bc51ee6a | 2e9e994e17456ed06970dccb55c18dc0cad34756 | /atcoder/abc/075/D/D.py | bb36bb97e08ed60220e3bd3641780bbfbf444e97 | [] | no_license | ksomemo/Competitive-programming | a74e86b5e790c6e68e9642ea9e5332440cb264fc | 2a12f7de520d9010aea1cd9d61b56df4a3555435 | refs/heads/master | 2020-12-02T06:46:13.666936 | 2019-05-27T04:08:01 | 2019-05-27T04:08:01 | 96,894,691 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,883 | py | def main():
N, K = map(int, input().split())
x, y = zip(*(
map(int, input().split())
for _ in range(N)
))
f(N, K, x, y)
def f(N, K, x, y):
"""
制約
* 2≦K≦N≦50
* −10^9≦xi,yi≦10^9 (1≦i≦N)
* xi≠xj (1≦i<j≦N)
* yi≠yj (1≦i<j≦N)
* 入力値はすべて整数である。(21:50 追記)
長方形の内部にN点のうち
K個以上の点を含みつつ、
それぞれの辺がX軸かY軸に平行な長方形を考えます。
→最小の面積
制約より、
xyの範囲を全探索はTLE
点は重ならないかつ、50以下
→点の座標から求めれば間に合いそう
例題1より、
全ての点を含む場合、
abs(x_max-x_min) * abs(y_max-y_min)
=> 追記:
これを分かったのに図示しなかったため、
点が角ではなく辺上にのることに気づかず…
点同士の組合せになってしまった(座標同士の組合せと気づけたはず)
例題2より
1つの場合1、ただし1*1の長方形より最大4点含む
→この例の場合、4点から2点以上で面積1
(x_2nd-x_min)*(y_max-y_min)
→x_maxを除いた点が含まれる
→50*50では済まない
y
↑
|*...
|.*..
|...*
|*...
------→x
(0,0)
(0,4)
(1,3)
(3,1)
各点を結んだときの面積の個数:O(N*N)
面積求めたあとの点が含まれる個数の計算量: O(N)
=> O(N^3) => 50^3 => 1.25 * 10^5 => OK
点を必ず含む場所からの面積
=> そこからでなくてよい, むしろ広くなる
=>
"""
ans = abs(max(x) - min(x)) * abs(max(y) - min(y))
if K == N:
print(ans)
return
def r(x, y, x_start=0, y_start=0):
for xi in range(x_start, N):
for yi in range(y_start, N):
yield x[xi], y[yi], xi, yi
# for x1, y1 in zip(x, y):
# for x2, y2 in zip(x, y):
# for x1, y1 in r(x, y):
# for x2, y2 in r(x, y):
sorted_x = sorted(x)
sorted_y = sorted(y)
for min_x, min_y, xi, yi in r(sorted_x, sorted_y):
for max_x, max_y, _, _ in r(sorted_x, sorted_y, x_start=xi+1, y_start=yi+1):
# s = abs(x1-x2) * abs(y1-y2)
# if s == 0:
# continue
# min_x = min(x1, x2)
# max_x = max(x1, x2)
# min_y = min(y1, y2)
# max_y = max(y1, y2)
contains_count = 0
for x3, y3 in zip(x, y):
if min_x <= x3 <= max_x and min_y <= y3 <= max_y:
contains_count += 1
if contains_count >= K:
s = (max_x - min_x) * (max_y - min_y)
ans = min(ans, s)
print(ans)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
5145be87d31768c82f2e05727c0de10bf98fbc32 | 1a4bc1a11fdb3f714f22f5e0e826b47aa0569de2 | /projects/project03/tests/q3_1_9.py | 7ba9201ae87f09d05ffc4fe63d8fde616069e11c | [] | no_license | taylorgibson/ma4110-fa21 | 201af7a044fd7d99140c68c48817306c18479610 | a306e1b6e7516def7de968781f6c8c21deebeaf5 | refs/heads/main | 2023-09-05T21:31:44.259079 | 2021-11-18T17:42:15 | 2021-11-18T17:42:15 | 395,439,687 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 649 | py | test = { 'name': 'q3_1_9',
'points': None,
'suites': [ { 'cases': [ { 'code': ">>> genre_and_distances.take(np.arange(7)).group('Genre').index_by('Genre')[my_assigned_genre][0].item('count') >= 4\nTrue",
'hidden': False,
'locked': False},
{'code': ">>> my_assigned_genre_was_correct == (my_assigned_genre == 'thriller')\nTrue", 'hidden': False, 'locked': False}],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'}]}
| [
"[email protected]"
] | |
29c53c9dd989b92da644998540979d855bafd45c | e262e64415335060868e9f7f73ab8701e3be2f7b | /.history/Test002/数据类型_20201205183838.py | 68110a4f1f601b98f39c8628e07060a8e42e6692 | [] | no_license | Allison001/developer_test | 6e211f1e2bd4287ee26fd2b33baf1c6a8d80fc63 | b8e04b4b248b0c10a35e93128a5323165990052c | refs/heads/master | 2023-06-18T08:46:40.202383 | 2021-07-23T03:31:54 | 2021-07-23T03:31:54 | 322,807,303 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,818 | py | # fruits = ['orange', 'apple', 'pear', 'banana', 'kiwi', 'apple', 'banana']
# print(fruits.count("apple"))
# a = fruits.index("banana",4)
# print(a)
# fruits.reverse()
# print(fruits)
# fruits.append("daka")
# print(fruits)
# print(fruits.sort)
# a = fruits.pop(0)
# print(a)
# print(fruits)
# number = [1,2,45,3,7,24,3]
# print(number.sort(reverse=True))
# from collections import deque
# queue = deque(["Eric", "John", "Michael"])
# queue.append("Terry")
# queue.append("Graham")
# a= queue.popleft()
# print(a)
# b = queue.popleft()
# print(b)
# print(queue)
# number = [1,2,3,4]
# number.append(5)
# number.append(6)
# print(number)
# number.pop()
# number.pop()
# print(number)
# lista = []
# for i in range(1,10):
# lista.append(i**2)
# print(lista)
# number = list(map(lambda x: x**2, range(1,10)))
# print(number)
# number = [i**2 for i in range(1,10)]
# print(number)
# number1= [(x,y) for x in [1,2,3] for y in [3,1,4] if x != y]
# print(number1)
# lis2 = []
# for x in [1,2,3]:
# for y in [3,1,4]:
# if x != y:
# lis2.append(x,y)
# print(number1)
# ver = [1,2,3]
# lista = [i**2 for i in ver]
# print(lista)
# ver1 = [-1,-2,3,4,-5]
# list2 = [i**2 for i in ver1 if i>0]
# print(list2)
# list3 = [abs(i) for i in ver1]
# print(list3)
# freshfruit = [' banana', ' loganberry ', 'passion fruit ']
# ab = [i.strip() for i in freshfruit]
# print(ab)
# list4 =[(x,x**2) for x in range(10)]
# print(list4)
# ver =[[1,2,3],[4,5,6],[7,8,9]]
# list5 = [y for i in ver for y in i]
# print(list5)
# from math import pi
# pia = 1.1323123
# for i in range(6):
# print(round(pia,i))
# list6 = [round(pia,i) for i in range(6)]
# print(list6)
#交换行和列
row_col = [
[1,4,7],
[2,5,8],
[3,6,9]
]
list9 = []
for i in row_col:
| [
"[email protected]"
] | |
e5ed60b0ee074f5aef6c1a6a87a551154b7fa049 | 3bda43b935e8009e1c57afbb3538a6fc595f6a6c | /pattern9.py | 5ca6f2d5b70097f25abdccdf6cdfa5c71bb47488 | [] | no_license | Somi-Singh/pattern_loop | 51639ee4c01c9cc1aa50e6aa1a7471dea977e499 | aa5881f0098cfcdc54428237159ba0d33832cc33 | refs/heads/main | 2023-06-06T04:28:36.608971 | 2021-07-01T10:55:40 | 2021-07-01T10:55:40 | 382,000,674 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 105 | py | i=1
while i<=4:
j=4
while j>=i:
j-=1
print(chr(1+64),end="")
print()
i+=1 | [
"[email protected]"
] | |
fb9cbb94cc255f67675688194b2756a15c9c4dca | 9e20dd7cd0105122f0e164aa0966fc37691d5042 | /card.py | be56121910a6aa12020005a8ca6c92c4d01710da | [] | no_license | RocketMirror/AtCoder_Practice | c650fd3ea1322ecda535e40f6ab63251085cecbe | 32e4c346b65d670ba6642f815b1a04e8b7c5f241 | refs/heads/master | 2023-07-09T14:25:41.052647 | 2021-08-08T17:40:33 | 2021-08-08T17:40:33 | 394,016,650 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 374 | py | n = int (input())
dic = {}
for _ in range (n):
s = str (input())
if s in dic.keys():
dic[s] += 1
else:
dic[s] = 1
m = int (input())
for _ in range (m):
t = str (input())
if t in dic.keys():
dic[t] -= 1
else:
dic[t] = -1
dic = sorted (dic.values(), reverse= True)
if dic[0] < 0:
print (0)
else:
print (dic[0])
| [
"[email protected]"
] | |
ca6d6152b3ba3adb92fb1a3b4b6d72c5875271d5 | ec5e4dacb30800828ae4d68f9d87db523293ab65 | /fb_post/views/create_comment/tests/test_case_01.py | d3a4ca4366ac755f7266092eef1c459667e60275 | [] | no_license | raviteja1766/fb_post_learning | 54022066ba727220433cb72c43458f9cb6164b24 | 889718fc8f138888aea1b66455fa68c000a02091 | refs/heads/master | 2022-11-08T17:39:40.275922 | 2020-06-25T11:19:28 | 2020-06-25T11:19:28 | 274,897,124 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 952 | py | """
# TODO: Update test case description
"""
from django_swagger_utils.utils.test import CustomAPITestCase
from . import APP_NAME, OPERATION_NAME, REQUEST_METHOD, URL_SUFFIX
REQUEST_BODY = """
{
"content": "string"
}
"""
TEST_CASE = {
"request": {
"path_params": {"post_id": "1234"},
"query_params": {},
"header_params": {},
"securities": {"oauth": {"tokenUrl": "http://auth.ibtspl.com/oauth2/", "flow": "password", "scopes": ["superuser"], "type": "oauth2"}},
"body": REQUEST_BODY,
},
}
class TestCase01CreateCommentAPITestCase(CustomAPITestCase):
app_name = APP_NAME
operation_name = OPERATION_NAME
request_method = REQUEST_METHOD
url_suffix = URL_SUFFIX
test_case_dict = TEST_CASE
def test_case(self):
self.default_test_case() # Returns response object.
# Which can be used for further response object checks.
# Add database state checks here. | [
"[email protected]"
] | |
8d2be2f7615f7caa3925f9200b966752a6b53993 | f878260bbca242a15cd3b069ed40f5c0704a771b | /datacat/db/__init__.py | 7eb57874459ae91af67887207204bc5d2d170520 | [] | no_license | rshk-archive/datacat-poc-140825 | 2485c561746ea70852fdbfd9b46eee97edc6cda7 | 626ad8b36655c69e93093be1d74cfb503ec9ba19 | refs/heads/master | 2020-06-01T04:16:32.752067 | 2014-09-26T14:02:03 | 2014-09-26T14:02:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,146 | py | from collections import MutableMapping
import json
import functools
from flask import g
import psycopg2
import psycopg2.extras
from werkzeug.local import LocalProxy
def connect(database, user=None, password=None, host='localhost', port=5432):
conn = psycopg2.connect(database=database, user=user, password=password,
host=host, port=port)
conn.cursor_factory = psycopg2.extras.DictCursor
conn.autocommit = False
return conn
def create_tables(conn):
"""
Create database schema for a given connection.
"""
# We need to be in autocommit mode (i.e. out of transactions)
# in order to create tables / do administrative stuff..
if not conn.autocommit:
raise ValueError("Was expecting a connection with autocommit on")
# ------------------------------------------------------------
# See this: http://stackoverflow.com/questions/18404055
# for creating indices on JSON field items.
#
# We will need to allow defining such indices in the configuration
# but maybe a plugin should be used to handle that..
# ------------------------------------------------------------
with conn.cursor() as cur:
cur.execute("""
CREATE TABLE info (
key CHARACTER VARYING (256) PRIMARY KEY,
value TEXT);
CREATE TABLE dataset (
id SERIAL PRIMARY KEY,
configuration JSON,
ctime TIMESTAMP WITHOUT TIME ZONE,
mtime TIMESTAMP WITHOUT TIME ZONE);
CREATE TABLE resource (
id SERIAL PRIMARY KEY,
metadata JSON,
auto_metadata JSON,
mimetype CHARACTER VARYING (128),
data_oid INTEGER,
ctime TIMESTAMP WITHOUT TIME ZONE,
mtime TIMESTAMP WITHOUT TIME ZONE,
hash VARCHAR(128));
""")
def drop_tables(conn):
if not conn.autocommit:
raise ValueError("Was expecting a connection with autocommit on")
with conn.cursor() as cur:
cur.execute("""
DROP TABLE info;
DROP TABLE dataset;
DROP TABLE resource;
""")
def _cached(key_name):
def decorator(func):
@functools.wraps(func)
def wrapped():
if not hasattr(g, key_name):
setattr(g, key_name, func())
return getattr(g, key_name)
return wrapped
return decorator
# def get_db():
# from flask import current_app
# if not hasattr(g, 'database'):
# g.database = connect(**current_app.config['DATABASE'])
# g.database.autocommit = False
# return g.database
# def get_admin_db():
# from flask import current_app
# if not hasattr(g, 'admin_database'):
# g.admin_database = connect(**current_app.config['DATABASE'])
# g.admin_database.autocommit = True
# return g.admin_database
@_cached('_database')
def get_db():
from flask import current_app
c = connect(**current_app.config['DATABASE'])
c.autocommit = False
return c
@_cached('_admin_database')
def get_admin_db():
from flask import current_app
c = connect(**current_app.config['DATABASE'])
c.autocommit = True
return c
class DbInfoDict(MutableMapping):
def __init__(self, db):
self._db = db
def __getitem__(self, key):
with self._db.cursor() as cur:
cur.execute("""
SELECT * FROM info WHERE "key" = %s;
""", (key,))
row = cur.fetchone()
if row is None:
raise KeyError(key)
return json.loads(row['value'])
def __setitem__(self, key, value):
# Note that the update would be void if anybody deleted
# the key between the two queries! -- but we can be optimistic
# as key deletes are quite infrequent..
value = json.dumps(value)
try:
with self._db, self._db.cursor() as cur:
cur.execute("""
INSERT INTO info (key, value) VALUES (%s, %s)
""", (key, value))
except psycopg2.IntegrityError:
with self._db, self._db.cursor() as cur:
cur.execute("""
UPDATE info SET value=%s WHERE key=%s
""", (value, key))
def __delitem__(self, key):
with self._db, self._db.cursor() as cur:
cur.execute("""
DELETE FROM info WHERE key=%s
""", (key,))
def __iter__(self):
with self._db.cursor() as cur:
cur.execute("SELECT key FROM info;")
for row in cur:
yield row['key']
def iteritems(self):
with self._db.cursor() as cur:
cur.execute("SELECT key, value FROM info;")
for row in cur:
yield row['key'], json.loads(row['value'])
def __len__(self):
with self._db.cursor() as cur:
cur.execute("SELECT count(*) AS count FROM info;")
row = cur.fetchone()
return row['count']
db = LocalProxy(get_db)
admin_db = LocalProxy(get_admin_db)
db_info = LocalProxy(lambda: DbInfoDict(get_db()))
| [
"[email protected]"
] | |
13d04fcb0c47f0b5eaa3044125faa9f70fe6f6cf | f441d86d1de8e1d75057f5c8c92ae012c2e35b92 | /GEOS_Util/coupled_diagnostics/verification/levitus/__init__.py | 23c96548ce4b269997fde00dd498a1783bf51537 | [] | no_license | ddlddl58/GMAO_Shared | 95f992e12b926cf9ec98163d6c62bac78e754efa | e16ddde5c8fab83429d312f5cff43643d9f84c94 | refs/heads/master | 2021-05-20T20:46:26.035810 | 2020-04-01T20:32:10 | 2020-04-01T20:32:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,574 | py | import os
import scipy as sp
from my_lib import dset, grid
import datetime
from dateutil import rrule
__all__=['ctl']
class Ctl(dset.NCDset):
def __init__(self):
name='Levitus'
oceanval=os.environ.get('OCEANVAL',
'/discover/nobackup/projects/gmao/oceanval/verification')
flist=[oceanval+'/levitus/levitus_grd.nc']
t=rrule.rrule(rrule.MONTHLY,dtstart=datetime.date(0001,1,1),count=12)
time=sp.array(t[:],dtype='|O')
# super(Ctl,self).__init__(flist,levname='depth',\
# time=time,name=name,undef=10e11)
super(Ctl,self).__init__(flist,\
time=time,name=name,undef=10e11)
# Something wrong with levels in a datafile
lev=sp.array((0, 10, 20, 30, 50, 75, 100, 125, 150, 200, 250, 300, 400, 500, 600,\
700, 800, 900, 1000, 1100, 1200, 1300, 1400, 1500, 1750, 2000, 2500,\
3000, 3500, 4000, 4500, 5000, 5500))
lon=self.grid['lon']; lat=self.grid['lat']
self.grid=grid.Grid(lon=lon,lat=lat,lev=lev)
def fromfile(self,varname,iind=slice(None),jind=slice(None),kind=slice(None),\
tind=slice(None), dtype=sp.float32):
var=super(Ctl,self).fromfile(varname,iind=iind,jind=jind,kind=kind,\
tind=tind,maskandscale=False,dtype=dtype)
# Applly land mask
var.data=sp.ma.masked_invalid(var.data)
return var
ctl=Ctl()
| [
"[email protected]"
] | |
a135db2c9afff8bc59d6785b2b646725fb751016 | d1376938086c65e8b7d5d0d9618278fd8dfb1cfd | /Python/Scripts/j1.py | 4611b83b23d87d43cd126568360077f5a97b5f03 | [] | no_license | DataEdgeSystems/Project | a6d235b1c0800a9ffb19e6163e69f07918ebce29 | 09ec31b385c7be62275079a7fb1826377d861365 | refs/heads/master | 2022-11-20T18:39:22.850454 | 2020-07-17T06:46:16 | 2020-07-17T06:46:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 223 | py | import turtle
turtle.write("welcome")
turtle.showturtle()
turtle.forward(300)
turtle.down()
turtle.right(90)
turtle.showturtle()
turtle.color("green")
turtle.circle(50)
turtle.home
turtle.goto(100,100)
turtle.showturtle()
| [
"[email protected]"
] | |
e845609c5e19bc9814ba6a8e189484b2eb4d5df5 | 0a07d0f59cd7eb8ce817c74cc114d177f56306e3 | /06_nested_loops/exercise/01_number_pyramid.py | 3a08d4af43ad1af6f05e9b5e8569beb0b3d261cf | [] | no_license | M0673N/Programming-Basics-with-Python | 10c777cec5ed4fcbf9f18dc7c81daa3c7bd406ad | cd23423d49f7fb0423d9f87c5b23dce3275bac21 | refs/heads/main | 2023-05-27T16:13:11.329033 | 2021-06-06T19:00:24 | 2021-06-06T19:00:24 | 360,933,804 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 237 | py | n = int(input())
number = 1
for row in range(1, n + 1):
for col in range(1, row + 1):
if number == n + 1:
break
print(number, end=" ")
number += 1
if number == n + 1:
break
print()
| [
"[email protected]"
] | |
e04a8b7c58fa1ab8782199327a6a27fa183ded49 | 7832e7dc8f1583471af9c08806ce7f1117cd228a | /aliyun-python-sdk-rds/aliyunsdkrds/request/v20140815/StartArchiveSQLLogRequest.py | dea0e8656104d0cc61b24edf93aa28bcc7e5a2f3 | [
"Apache-2.0"
] | permissive | dianplus/aliyun-openapi-python-sdk | d6494850ddf0e66aaf04607322f353df32959725 | 6edf1ed02994245dae1d1b89edc6cce7caa51622 | refs/heads/master | 2023-04-08T11:35:36.216404 | 2017-11-02T12:01:15 | 2017-11-02T12:01:15 | 109,257,597 | 0 | 0 | NOASSERTION | 2023-03-23T17:59:30 | 2017-11-02T11:44:27 | Python | UTF-8 | Python | false | false | 2,742 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class StartArchiveSQLLogRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Rds', '2014-08-15', 'StartArchiveSQLLog','rds')
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_Database(self):
return self.get_query_params().get('Database')
def set_Database(self,Database):
self.add_query_param('Database',Database)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_EndTime(self):
return self.get_query_params().get('EndTime')
def set_EndTime(self,EndTime):
self.add_query_param('EndTime',EndTime)
def get_DBInstanceId(self):
return self.get_query_params().get('DBInstanceId')
def set_DBInstanceId(self,DBInstanceId):
self.add_query_param('DBInstanceId',DBInstanceId)
def get_StartTime(self):
return self.get_query_params().get('StartTime')
def set_StartTime(self,StartTime):
self.add_query_param('StartTime',StartTime)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_User(self):
return self.get_query_params().get('User')
def set_User(self,User):
self.add_query_param('User',User)
def get_QueryKeywords(self):
return self.get_query_params().get('QueryKeywords')
def set_QueryKeywords(self,QueryKeywords):
self.add_query_param('QueryKeywords',QueryKeywords) | [
"[email protected]"
] | |
3f54e042b195335242ba0e1963abcc48a78e159c | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2179/60671/241345.py | f325de92ff5e7cd5ef53723502109c4b2092086d | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 44 | py | print(1)
print(1)
print(2)
print(2)
print(2) | [
"[email protected]"
] | |
47caea1c49197c9d2faacd6924bd6eee72b8dc49 | 021e550da175c7fe4d7b108553f9d71bb0be1044 | /stubs/Rhino/Rhino-stubs/ApplicationSettings/__init__.pyi | 41b08231861a084621b0b6a9dede1d8d5f50c75b | [
"MIT"
] | permissive | rlldx/pythonstubs | b1d00438ae44618bbce30e61a0072316d6a284e8 | 6338085cae231f3f76ba272e72e670be7bc6e8a2 | refs/heads/master | 2020-05-02T16:54:25.088269 | 2019-03-10T06:05:33 | 2019-03-10T06:05:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 44,839 | pyi | from typing import Tuple, Set, Iterable, List
class PaintColor:
NormalStart = 0
NormalEnd = 1
NormalBorder = 2
HotStart = 3
HotEnd = 4
HotBorder = 5
PressedStart = 6
PressedEnd = 7
PressedBorder = 8
TextEnabled = 9
TextDisabled = 10
MouseOverControlStart = 11
MouseOverControlEnd = 12
MouseOverControlBorder = 13
ActiveCaption = 14
InactiveCaption = 15
PanelBackground = 16
ActiveViewportTitle = 17
InactiveViewportTitle = 18
class WidgetColor:
UAxisColor = 0
VAxisColor = 1
WAxisColor = 2
class AppearanceSettingsState:
@property
def DefaultFontFaceName (self) -> str: ...
@DefaultFontFaceName.setter
def DefaultFontFaceName (self, value : str) -> None: ...
@property
def DefaultLayerColor (self) -> Color: ...
@DefaultLayerColor.setter
def DefaultLayerColor (self, value : Color) -> None: ...
@property
def SelectedObjectColor (self) -> Color: ...
@SelectedObjectColor.setter
def SelectedObjectColor (self, value : Color) -> None: ...
@property
def LockedObjectColor (self) -> Color: ...
@LockedObjectColor.setter
def LockedObjectColor (self, value : Color) -> None: ...
@property
def WorldCoordIconXAxisColor (self) -> Color: ...
@WorldCoordIconXAxisColor.setter
def WorldCoordIconXAxisColor (self, value : Color) -> None: ...
@property
def WorldCoordIconYAxisColor (self) -> Color: ...
@WorldCoordIconYAxisColor.setter
def WorldCoordIconYAxisColor (self, value : Color) -> None: ...
@property
def WorldCoordIconZAxisColor (self) -> Color: ...
@WorldCoordIconZAxisColor.setter
def WorldCoordIconZAxisColor (self, value : Color) -> None: ...
@property
def TrackingColor (self) -> Color: ...
@TrackingColor.setter
def TrackingColor (self, value : Color) -> None: ...
@property
def FeedbackColor (self) -> Color: ...
@FeedbackColor.setter
def FeedbackColor (self, value : Color) -> None: ...
@property
def DefaultObjectColor (self) -> Color: ...
@DefaultObjectColor.setter
def DefaultObjectColor (self, value : Color) -> None: ...
@property
def ViewportBackgroundColor (self) -> Color: ...
@ViewportBackgroundColor.setter
def ViewportBackgroundColor (self, value : Color) -> None: ...
@property
def FrameBackgroundColor (self) -> Color: ...
@FrameBackgroundColor.setter
def FrameBackgroundColor (self, value : Color) -> None: ...
@property
def CommandPromptTextColor (self) -> Color: ...
@CommandPromptTextColor.setter
def CommandPromptTextColor (self, value : Color) -> None: ...
@property
def CommandPromptHypertextColor (self) -> Color: ...
@CommandPromptHypertextColor.setter
def CommandPromptHypertextColor (self, value : Color) -> None: ...
@property
def CommandPromptBackgroundColor (self) -> Color: ...
@CommandPromptBackgroundColor.setter
def CommandPromptBackgroundColor (self, value : Color) -> None: ...
@property
def CrosshairColor (self) -> Color: ...
@CrosshairColor.setter
def CrosshairColor (self, value : Color) -> None: ...
@property
def PageviewPaperColor (self) -> Color: ...
@PageviewPaperColor.setter
def PageviewPaperColor (self, value : Color) -> None: ...
@property
def CurrentLayerBackgroundColor (self) -> Color: ...
@CurrentLayerBackgroundColor.setter
def CurrentLayerBackgroundColor (self, value : Color) -> None: ...
@property
def EchoPromptsToHistoryWindow (self) -> bool: ...
@EchoPromptsToHistoryWindow.setter
def EchoPromptsToHistoryWindow (self, value : bool) -> None: ...
@property
def EchoCommandsToHistoryWindow (self) -> bool: ...
@EchoCommandsToHistoryWindow.setter
def EchoCommandsToHistoryWindow (self, value : bool) -> None: ...
@property
def ShowFullPathInTitleBar (self) -> bool: ...
@ShowFullPathInTitleBar.setter
def ShowFullPathInTitleBar (self, value : bool) -> None: ...
@property
def ShowCrosshairs (self) -> bool: ...
@ShowCrosshairs.setter
def ShowCrosshairs (self, value : bool) -> None: ...
@property
def GridThinLineColor (self) -> Color: ...
@GridThinLineColor.setter
def GridThinLineColor (self, value : Color) -> None: ...
@property
def GridThickLineColor (self) -> Color: ...
@GridThickLineColor.setter
def GridThickLineColor (self, value : Color) -> None: ...
@property
def GridXAxisLineColor (self) -> Color: ...
@GridXAxisLineColor.setter
def GridXAxisLineColor (self, value : Color) -> None: ...
@property
def GridYAxisLineColor (self) -> Color: ...
@GridYAxisLineColor.setter
def GridYAxisLineColor (self, value : Color) -> None: ...
@property
def GridZAxisLineColor (self) -> Color: ...
@GridZAxisLineColor.setter
def GridZAxisLineColor (self, value : Color) -> None: ...
class CommandPromptPosition:
Top = 0
Bottom = 1
Floating = 2
Hidden = 3
class AppearanceSettings:
def GetDefaultState () -> AppearanceSettingsState: ...
def GetCurrentState () -> AppearanceSettingsState: ...
def RestoreDefaults () -> None: ...
def UpdateFromState (state : AppearanceSettingsState) -> None: ...
@property
def DefaultFontFaceName () -> str: ...
def GetPaintColor (whichColor : PaintColor) -> Color: ...
def SetPaintColor (whichColor : PaintColor, c : Color) -> None: ...
def SetPaintColor (whichColor : PaintColor, c : Color, forceUiUpdate : bool) -> None: ...
@property
def UsePaintColors () -> bool: ...
def GetWidgetColor (whichColor : WidgetColor) -> Color: ...
def SetWidgetColor (whichColor : WidgetColor, c : Color) -> None: ...
def SetWidgetColor (whichColor : WidgetColor, c : Color, forceUiUpdate : bool) -> None: ...
@property
def DefaultLayerColor () -> Color: ...
@DefaultLayerColor.setter
def DefaultLayerColor (value : Color) -> None: ...
@property
def SelectedObjectColor () -> Color: ...
@SelectedObjectColor.setter
def SelectedObjectColor (value : Color) -> None: ...
@property
def LockedObjectColor () -> Color: ...
@LockedObjectColor.setter
def LockedObjectColor (value : Color) -> None: ...
@property
def WorldCoordIconXAxisColor () -> Color: ...
@WorldCoordIconXAxisColor.setter
def WorldCoordIconXAxisColor (value : Color) -> None: ...
@property
def WorldCoordIconYAxisColor () -> Color: ...
@WorldCoordIconYAxisColor.setter
def WorldCoordIconYAxisColor (value : Color) -> None: ...
@property
def WorldCoordIconZAxisColor () -> Color: ...
@WorldCoordIconZAxisColor.setter
def WorldCoordIconZAxisColor (value : Color) -> None: ...
@property
def TrackingColor () -> Color: ...
@TrackingColor.setter
def TrackingColor (value : Color) -> None: ...
@property
def FeedbackColor () -> Color: ...
@FeedbackColor.setter
def FeedbackColor (value : Color) -> None: ...
@property
def DefaultObjectColor () -> Color: ...
@DefaultObjectColor.setter
def DefaultObjectColor (value : Color) -> None: ...
@property
def ViewportBackgroundColor () -> Color: ...
@ViewportBackgroundColor.setter
def ViewportBackgroundColor (value : Color) -> None: ...
@property
def FrameBackgroundColor () -> Color: ...
@FrameBackgroundColor.setter
def FrameBackgroundColor (value : Color) -> None: ...
@property
def CommandPromptTextColor () -> Color: ...
@CommandPromptTextColor.setter
def CommandPromptTextColor (value : Color) -> None: ...
@property
def CommandPromptHypertextColor () -> Color: ...
@CommandPromptHypertextColor.setter
def CommandPromptHypertextColor (value : Color) -> None: ...
@property
def CommandPromptBackgroundColor () -> Color: ...
@CommandPromptBackgroundColor.setter
def CommandPromptBackgroundColor (value : Color) -> None: ...
@property
def CrosshairColor () -> Color: ...
@CrosshairColor.setter
def CrosshairColor (value : Color) -> None: ...
@property
def PageviewPaperColor () -> Color: ...
@PageviewPaperColor.setter
def PageviewPaperColor (value : Color) -> None: ...
@property
def CurrentLayerBackgroundColor () -> Color: ...
@CurrentLayerBackgroundColor.setter
def CurrentLayerBackgroundColor (value : Color) -> None: ...
@property
def EditCandidateColor () -> Color: ...
@EditCandidateColor.setter
def EditCandidateColor (value : Color) -> None: ...
@property
def GridThinLineColor () -> Color: ...
@GridThinLineColor.setter
def GridThinLineColor (value : Color) -> None: ...
@property
def GridThickLineColor () -> Color: ...
@GridThickLineColor.setter
def GridThickLineColor (value : Color) -> None: ...
@property
def GridXAxisLineColor () -> Color: ...
@GridXAxisLineColor.setter
def GridXAxisLineColor (value : Color) -> None: ...
@property
def GridYAxisLineColor () -> Color: ...
@GridYAxisLineColor.setter
def GridYAxisLineColor (value : Color) -> None: ...
@property
def GridZAxisLineColor () -> Color: ...
@GridZAxisLineColor.setter
def GridZAxisLineColor (value : Color) -> None: ...
@property
def CommandPromptPosition () -> CommandPromptPosition: ...
@CommandPromptPosition.setter
def CommandPromptPosition (value : CommandPromptPosition) -> None: ...
@property
def EchoPromptsToHistoryWindow () -> bool: ...
@EchoPromptsToHistoryWindow.setter
def EchoPromptsToHistoryWindow (value : bool) -> None: ...
@property
def EchoCommandsToHistoryWindow () -> bool: ...
@EchoCommandsToHistoryWindow.setter
def EchoCommandsToHistoryWindow (value : bool) -> None: ...
@property
def ShowFullPathInTitleBar () -> bool: ...
@ShowFullPathInTitleBar.setter
def ShowFullPathInTitleBar (value : bool) -> None: ...
@property
def ShowCrosshairs () -> bool: ...
@ShowCrosshairs.setter
def ShowCrosshairs (value : bool) -> None: ...
@property
def ShowSideBar () -> bool: ...
@ShowSideBar.setter
def ShowSideBar (value : bool) -> None: ...
@property
def MenuVisible () -> bool: ...
@MenuVisible.setter
def MenuVisible (value : bool) -> None: ...
@property
def LanguageIdentifier () -> int: ...
@LanguageIdentifier.setter
def LanguageIdentifier (value : int) -> None: ...
@property
def PreviousLanguageIdentifier () -> int: ...
@PreviousLanguageIdentifier.setter
def PreviousLanguageIdentifier (value : int) -> None: ...
def InitialMainWindowPosition () -> Tuple[bool, Rectangle]: ...
class CommandAliasList:
@property
def Count () -> int: ...
def GetNames () -> Set(str): ...
def Clear () -> None: ...
def GetMacro (alias : str) -> str: ...
def SetMacro (alias : str, macro : str) -> bool: ...
def Add (alias : str, macro : str) -> bool: ...
def Delete (alias : str) -> bool: ...
def IsAlias (alias : str) -> bool: ...
def ToDictionary () -> Dictionary: ...
def IsDefault () -> bool: ...
def GetDefaults () -> Dictionary: ...
class EdgeAnalysisSettingsState:
@property
def ShowEdgeColor (self) -> Color: ...
@ShowEdgeColor.setter
def ShowEdgeColor (self, value : Color) -> None: ...
@property
def ShowEdges (self) -> int: ...
@ShowEdges.setter
def ShowEdges (self, value : int) -> None: ...
class EdgeAnalysisSettings:
def GetDefaultState () -> EdgeAnalysisSettingsState: ...
def GetCurrentState () -> EdgeAnalysisSettingsState: ...
def RestoreDefaults () -> None: ...
def UpdateFromState (state : EdgeAnalysisSettingsState) -> None: ...
@property
def ShowEdgeColor () -> Color: ...
@ShowEdgeColor.setter
def ShowEdgeColor (value : Color) -> None: ...
@property
def ShowEdges () -> int: ...
@ShowEdges.setter
def ShowEdges (value : int) -> None: ...
class FileSettingsState:
@property
def AutoSaveInterval (self) -> TimeSpan: ...
@AutoSaveInterval.setter
def AutoSaveInterval (self, value : TimeSpan) -> None: ...
@property
def AutoSaveEnabled (self) -> bool: ...
@AutoSaveEnabled.setter
def AutoSaveEnabled (self, value : bool) -> None: ...
@property
def AutoSaveMeshes (self) -> bool: ...
@AutoSaveMeshes.setter
def AutoSaveMeshes (self, value : bool) -> None: ...
@property
def SaveViewChanges (self) -> bool: ...
@SaveViewChanges.setter
def SaveViewChanges (self, value : bool) -> None: ...
@property
def FileLockingEnabled (self) -> bool: ...
@FileLockingEnabled.setter
def FileLockingEnabled (self, value : bool) -> None: ...
@property
def FileLockingOpenWarning (self) -> bool: ...
@FileLockingOpenWarning.setter
def FileLockingOpenWarning (self, value : bool) -> None: ...
@property
def ClipboardCopyToPreviousRhinoVersion (self) -> bool: ...
@ClipboardCopyToPreviousRhinoVersion.setter
def ClipboardCopyToPreviousRhinoVersion (self, value : bool) -> None: ...
@property
def ClipboardOnExit (self) -> ClipboardState: ...
@ClipboardOnExit.setter
def ClipboardOnExit (self, value : ClipboardState) -> None: ...
@property
def CreateBackupFiles (self) -> bool: ...
@CreateBackupFiles.setter
def CreateBackupFiles (self, value : bool) -> None: ...
class FileSettings:
def GetDefaultState () -> FileSettingsState: ...
def GetCurrentState () -> FileSettingsState: ...
def GetDataFolder (currentUser : bool) -> str: ...
def RecentlyOpenedFiles () -> Set(str): ...
def AddSearchPath (folder : str, index : int) -> int: ...
def DeleteSearchPath (folder : str) -> bool: ...
def FindFile (fileName : str) -> str: ...
@property
def SearchPathCount () -> int: ...
def GetSearchPaths () -> Set(str): ...
@property
def WorkingFolder () -> str: ...
@WorkingFolder.setter
def WorkingFolder (value : str) -> None: ...
@property
def TemplateFolder () -> str: ...
@TemplateFolder.setter
def TemplateFolder (value : str) -> None: ...
@property
def TemplateFile () -> str: ...
@TemplateFile.setter
def TemplateFile (value : str) -> None: ...
@property
def AutoSaveFile () -> str: ...
@AutoSaveFile.setter
def AutoSaveFile (value : str) -> None: ...
@property
def AutoSaveInterval () -> TimeSpan: ...
@AutoSaveInterval.setter
def AutoSaveInterval (value : TimeSpan) -> None: ...
@property
def AutoSaveEnabled () -> bool: ...
@AutoSaveEnabled.setter
def AutoSaveEnabled (value : bool) -> None: ...
@property
def AutoSaveMeshes () -> bool: ...
@AutoSaveMeshes.setter
def AutoSaveMeshes (value : bool) -> None: ...
def AutoSaveBeforeCommands () -> Set(str): ...
def SetAutoSaveBeforeCommands (commands : Set(str)) -> None: ...
@property
def SaveViewChanges () -> bool: ...
@SaveViewChanges.setter
def SaveViewChanges (value : bool) -> None: ...
@property
def FileLockingEnabled () -> bool: ...
@FileLockingEnabled.setter
def FileLockingEnabled (value : bool) -> None: ...
@property
def FileLockingOpenWarning () -> bool: ...
@FileLockingOpenWarning.setter
def FileLockingOpenWarning (value : bool) -> None: ...
@property
def CreateBackupFiles () -> bool: ...
@CreateBackupFiles.setter
def CreateBackupFiles (value : bool) -> None: ...
@property
def ClipboardCopyToPreviousRhinoVersion () -> bool: ...
@ClipboardCopyToPreviousRhinoVersion.setter
def ClipboardCopyToPreviousRhinoVersion (value : bool) -> None: ...
@property
def ClipboardOnExit () -> ClipboardState: ...
@ClipboardOnExit.setter
def ClipboardOnExit (value : ClipboardState) -> None: ...
@property
def ExecutableFolder () -> str: ...
@property
def InstallFolder () -> DirectoryInfo: ...
@property
def HelpFilePath () -> str: ...
@property
def LocalProfileDataFolder () -> str: ...
@property
def DefaultRuiFile () -> str: ...
class NeverRepeatList:
@property
def UseNeverRepeatList () -> bool: ...
def SetList (commandNames : Set(str)) -> int: ...
def CommandNames () -> Set(str): ...
class MouseSelectMode:
Crossing = 0
Window = 1
Combo = 2
class MiddleMouseMode:
PopupMenu = 0
PopupToolbar = 1
RunMacro = 2
class GeneralSettingsState:
@property
def MouseSelectMode (self) -> MouseSelectMode: ...
@MouseSelectMode.setter
def MouseSelectMode (self, value : MouseSelectMode) -> None: ...
@property
def MaximumPopupMenuLines (self) -> int: ...
@MaximumPopupMenuLines.setter
def MaximumPopupMenuLines (self, value : int) -> None: ...
@property
def MinimumUndoSteps (self) -> int: ...
@MinimumUndoSteps.setter
def MinimumUndoSteps (self, value : int) -> None: ...
@property
def MaximumUndoMemoryMb (self) -> int: ...
@MaximumUndoMemoryMb.setter
def MaximumUndoMemoryMb (self, value : int) -> None: ...
@property
def NewObjectIsoparmCount (self) -> int: ...
@NewObjectIsoparmCount.setter
def NewObjectIsoparmCount (self, value : int) -> None: ...
@property
def MiddleMouseMode (self) -> MiddleMouseMode: ...
@MiddleMouseMode.setter
def MiddleMouseMode (self, value : MiddleMouseMode) -> None: ...
@property
def MiddleMousePopupToolbar (self) -> str: ...
@MiddleMousePopupToolbar.setter
def MiddleMousePopupToolbar (self, value : str) -> None: ...
@property
def MiddleMouseMacro (self) -> str: ...
@MiddleMouseMacro.setter
def MiddleMouseMacro (self, value : str) -> None: ...
@property
def EnableContextMenu (self) -> bool: ...
@EnableContextMenu.setter
def EnableContextMenu (self, value : bool) -> None: ...
@property
def ContextMenuDelay (self) -> TimeSpan: ...
@ContextMenuDelay.setter
def ContextMenuDelay (self, value : TimeSpan) -> None: ...
@property
def AutoUpdateCommandHelp (self) -> bool: ...
@AutoUpdateCommandHelp.setter
def AutoUpdateCommandHelp (self, value : bool) -> None: ...
class GeneralSettings:
def GetDefaultState () -> GeneralSettingsState: ...
def GetCurrentState () -> GeneralSettingsState: ...
@property
def UseExtrusions () -> bool: ...
@property
def MouseSelectMode () -> MouseSelectMode: ...
@MouseSelectMode.setter
def MouseSelectMode (value : MouseSelectMode) -> None: ...
@property
def MaximumPopupMenuLines () -> int: ...
@MaximumPopupMenuLines.setter
def MaximumPopupMenuLines (value : int) -> None: ...
@property
def MinimumUndoSteps () -> int: ...
@MinimumUndoSteps.setter
def MinimumUndoSteps (value : int) -> None: ...
@property
def MaximumUndoMemoryMb () -> int: ...
@MaximumUndoMemoryMb.setter
def MaximumUndoMemoryMb (value : int) -> None: ...
@property
def NewObjectIsoparmCount () -> int: ...
@NewObjectIsoparmCount.setter
def NewObjectIsoparmCount (value : int) -> None: ...
@property
def MiddleMouseMode () -> MiddleMouseMode: ...
@MiddleMouseMode.setter
def MiddleMouseMode (value : MiddleMouseMode) -> None: ...
@property
def MiddleMousePopupToolbar () -> str: ...
@MiddleMousePopupToolbar.setter
def MiddleMousePopupToolbar (value : str) -> None: ...
@property
def MiddleMouseMacro () -> str: ...
@MiddleMouseMacro.setter
def MiddleMouseMacro (value : str) -> None: ...
@property
def EnableContextMenu () -> bool: ...
@EnableContextMenu.setter
def EnableContextMenu (value : bool) -> None: ...
@property
def ContextMenuDelay () -> TimeSpan: ...
@ContextMenuDelay.setter
def ContextMenuDelay (value : TimeSpan) -> None: ...
@property
def AutoUpdateCommandHelp () -> bool: ...
@AutoUpdateCommandHelp.setter
def AutoUpdateCommandHelp (value : bool) -> None: ...
class ClipboardState:
KeepData = 0
DeleteData = 1
PromptWhenBig = 2
class CursorMode:
None = 0
BlackOnWhite = 1
WhiteOnBlack = 2
class OsnapModes:
None = 0
Near = 2
Focus = 8
Center = 32
Vertex = 64
Knot = 128
Quadrant = 512
Midpoint = 2048
Intersection = 8192
End = 131072
Perpendicular = 524288
Tangent = 2097152
Point = 134217728
class PointDisplayMode:
WorldPoint = 0
CplanePoint = 1
class ModelAidSettingsState:
@property
def GridSnap (self) -> bool: ...
@GridSnap.setter
def GridSnap (self, value : bool) -> None: ...
@property
def Ortho (self) -> bool: ...
@Ortho.setter
def Ortho (self, value : bool) -> None: ...
@property
def Planar (self) -> bool: ...
@Planar.setter
def Planar (self, value : bool) -> None: ...
@property
def ProjectSnapToCPlane (self) -> bool: ...
@ProjectSnapToCPlane.setter
def ProjectSnapToCPlane (self, value : bool) -> None: ...
@property
def UseHorizontalDialog (self) -> bool: ...
@UseHorizontalDialog.setter
def UseHorizontalDialog (self, value : bool) -> None: ...
@property
def ExtendTrimLines (self) -> bool: ...
@ExtendTrimLines.setter
def ExtendTrimLines (self, value : bool) -> None: ...
@property
def ExtendToApparentIntersection (self) -> bool: ...
@ExtendToApparentIntersection.setter
def ExtendToApparentIntersection (self, value : bool) -> None: ...
@property
def AltPlusArrow (self) -> bool: ...
@AltPlusArrow.setter
def AltPlusArrow (self, value : bool) -> None: ...
@property
def DisplayControlPolygon (self) -> bool: ...
@DisplayControlPolygon.setter
def DisplayControlPolygon (self, value : bool) -> None: ...
@property
def HighlightControlPolygon (self) -> bool: ...
@HighlightControlPolygon.setter
def HighlightControlPolygon (self, value : bool) -> None: ...
@property
def Osnap (self) -> bool: ...
@Osnap.setter
def Osnap (self, value : bool) -> None: ...
@property
def SnapToLocked (self) -> bool: ...
@SnapToLocked.setter
def SnapToLocked (self, value : bool) -> None: ...
@property
def UniversalConstructionPlaneMode (self) -> bool: ...
@UniversalConstructionPlaneMode.setter
def UniversalConstructionPlaneMode (self, value : bool) -> None: ...
@property
def OrthoAngle (self) -> float: ...
@OrthoAngle.setter
def OrthoAngle (self, value : float) -> None: ...
@property
def NudgeKeyStep (self) -> float: ...
@NudgeKeyStep.setter
def NudgeKeyStep (self, value : float) -> None: ...
@property
def CtrlNudgeKeyStep (self) -> float: ...
@CtrlNudgeKeyStep.setter
def CtrlNudgeKeyStep (self, value : float) -> None: ...
@property
def ShiftNudgeKeyStep (self) -> float: ...
@ShiftNudgeKeyStep.setter
def ShiftNudgeKeyStep (self, value : float) -> None: ...
@property
def OsnapPickboxRadius (self) -> int: ...
@OsnapPickboxRadius.setter
def OsnapPickboxRadius (self, value : int) -> None: ...
@property
def NudgeMode (self) -> int: ...
@NudgeMode.setter
def NudgeMode (self, value : int) -> None: ...
@property
def ControlPolygonDisplayDensity (self) -> int: ...
@ControlPolygonDisplayDensity.setter
def ControlPolygonDisplayDensity (self, value : int) -> None: ...
@property
def OsnapCursorMode (self) -> CursorMode: ...
@OsnapCursorMode.setter
def OsnapCursorMode (self, value : CursorMode) -> None: ...
@property
def OsnapModes (self) -> OsnapModes: ...
@OsnapModes.setter
def OsnapModes (self, value : OsnapModes) -> None: ...
@property
def MousePickboxRadius (self) -> int: ...
@MousePickboxRadius.setter
def MousePickboxRadius (self, value : int) -> None: ...
@property
def PointDisplay (self) -> PointDisplayMode: ...
@PointDisplay.setter
def PointDisplay (self, value : PointDisplayMode) -> None: ...
class ModelAidSettings:
def GetCurrentState () -> ModelAidSettingsState: ...
def GetDefaultState () -> ModelAidSettingsState: ...
def UpdateFromState (state : ModelAidSettingsState) -> None: ...
@property
def GridSnap () -> bool: ...
@GridSnap.setter
def GridSnap (value : bool) -> None: ...
@property
def Ortho () -> bool: ...
@Ortho.setter
def Ortho (value : bool) -> None: ...
@property
def Planar () -> bool: ...
@Planar.setter
def Planar (value : bool) -> None: ...
@property
def ProjectSnapToCPlane () -> bool: ...
@ProjectSnapToCPlane.setter
def ProjectSnapToCPlane (value : bool) -> None: ...
@property
def UseHorizontalDialog () -> bool: ...
@UseHorizontalDialog.setter
def UseHorizontalDialog (value : bool) -> None: ...
@property
def ExtendTrimLines () -> bool: ...
@ExtendTrimLines.setter
def ExtendTrimLines (value : bool) -> None: ...
@property
def ExtendToApparentIntersection () -> bool: ...
@ExtendToApparentIntersection.setter
def ExtendToApparentIntersection (value : bool) -> None: ...
@property
def AltPlusArrow () -> bool: ...
@AltPlusArrow.setter
def AltPlusArrow (value : bool) -> None: ...
@property
def DisplayControlPolygon () -> bool: ...
@DisplayControlPolygon.setter
def DisplayControlPolygon (value : bool) -> None: ...
@property
def HighlightControlPolygon () -> bool: ...
@HighlightControlPolygon.setter
def HighlightControlPolygon (value : bool) -> None: ...
@property
def Osnap () -> bool: ...
@Osnap.setter
def Osnap (value : bool) -> None: ...
@property
def SnapToLocked () -> bool: ...
@SnapToLocked.setter
def SnapToLocked (value : bool) -> None: ...
@property
def UniversalConstructionPlaneMode () -> bool: ...
@UniversalConstructionPlaneMode.setter
def UniversalConstructionPlaneMode (value : bool) -> None: ...
@property
def OrthoAngle () -> float: ...
@OrthoAngle.setter
def OrthoAngle (value : float) -> None: ...
@property
def NudgeKeyStep () -> float: ...
@NudgeKeyStep.setter
def NudgeKeyStep (value : float) -> None: ...
@property
def CtrlNudgeKeyStep () -> float: ...
@CtrlNudgeKeyStep.setter
def CtrlNudgeKeyStep (value : float) -> None: ...
@property
def ShiftNudgeKeyStep () -> float: ...
@ShiftNudgeKeyStep.setter
def ShiftNudgeKeyStep (value : float) -> None: ...
@property
def OsnapPickboxRadius () -> int: ...
@OsnapPickboxRadius.setter
def OsnapPickboxRadius (value : int) -> None: ...
@property
def NudgeMode () -> int: ...
@NudgeMode.setter
def NudgeMode (value : int) -> None: ...
@property
def ControlPolygonDisplayDensity () -> int: ...
@ControlPolygonDisplayDensity.setter
def ControlPolygonDisplayDensity (value : int) -> None: ...
@property
def OsnapCursorMode () -> CursorMode: ...
@OsnapCursorMode.setter
def OsnapCursorMode (value : CursorMode) -> None: ...
@property
def OsnapModes () -> OsnapModes: ...
@OsnapModes.setter
def OsnapModes (value : OsnapModes) -> None: ...
@property
def MousePickboxRadius () -> int: ...
@MousePickboxRadius.setter
def MousePickboxRadius (value : int) -> None: ...
@property
def PointDisplay () -> PointDisplayMode: ...
@PointDisplay.setter
def PointDisplay (value : PointDisplayMode) -> None: ...
@property
def AutoGumballEnabled () -> bool: ...
@AutoGumballEnabled.setter
def AutoGumballEnabled (value : bool) -> None: ...
@property
def SnappyGumballEnabled () -> bool: ...
@SnappyGumballEnabled.setter
def SnappyGumballEnabled (value : bool) -> None: ...
class ViewSettingsState:
@property
def PanScreenFraction (self) -> float: ...
@PanScreenFraction.setter
def PanScreenFraction (self, value : float) -> None: ...
@property
def PanReverseKeyboardAction (self) -> bool: ...
@PanReverseKeyboardAction.setter
def PanReverseKeyboardAction (self, value : bool) -> None: ...
@property
def AlwaysPanParallelViews (self) -> bool: ...
@AlwaysPanParallelViews.setter
def AlwaysPanParallelViews (self, value : bool) -> None: ...
@property
def ZoomScale (self) -> float: ...
@ZoomScale.setter
def ZoomScale (self, value : float) -> None: ...
@property
def ZoomExtentsParallelViewBorder (self) -> float: ...
@ZoomExtentsParallelViewBorder.setter
def ZoomExtentsParallelViewBorder (self, value : float) -> None: ...
@property
def ZoomExtentsPerspectiveViewBorder (self) -> float: ...
@ZoomExtentsPerspectiveViewBorder.setter
def ZoomExtentsPerspectiveViewBorder (self, value : float) -> None: ...
@property
def RotateCircleIncrement (self) -> int: ...
@RotateCircleIncrement.setter
def RotateCircleIncrement (self, value : int) -> None: ...
@property
def RotateReverseKeyboard (self) -> bool: ...
@RotateReverseKeyboard.setter
def RotateReverseKeyboard (self, value : bool) -> None: ...
@property
def RotateToView (self) -> bool: ...
@RotateToView.setter
def RotateToView (self, value : bool) -> None: ...
@property
def DefinedViewSetCPlane (self) -> bool: ...
@DefinedViewSetCPlane.setter
def DefinedViewSetCPlane (self, value : bool) -> None: ...
@property
def DefinedViewSetProjection (self) -> bool: ...
@DefinedViewSetProjection.setter
def DefinedViewSetProjection (self, value : bool) -> None: ...
@property
def SingleClickMaximize (self) -> bool: ...
@SingleClickMaximize.setter
def SingleClickMaximize (self, value : bool) -> None: ...
@property
def LinkedViewports (self) -> bool: ...
@LinkedViewports.setter
def LinkedViewports (self, value : bool) -> None: ...
class ViewSettings:
def GetDefaultState () -> ViewSettingsState: ...
def GetCurrentState () -> ViewSettingsState: ...
def RestoreDefaults () -> None: ...
def UpdateFromState (state : ViewSettingsState) -> None: ...
@property
def PanScreenFraction () -> float: ...
@PanScreenFraction.setter
def PanScreenFraction (value : float) -> None: ...
@property
def PanReverseKeyboardAction () -> bool: ...
@PanReverseKeyboardAction.setter
def PanReverseKeyboardAction (value : bool) -> None: ...
@property
def AlwaysPanParallelViews () -> bool: ...
@AlwaysPanParallelViews.setter
def AlwaysPanParallelViews (value : bool) -> None: ...
@property
def ZoomScale () -> float: ...
@ZoomScale.setter
def ZoomScale (value : float) -> None: ...
@property
def ZoomExtentsParallelViewBorder () -> float: ...
@ZoomExtentsParallelViewBorder.setter
def ZoomExtentsParallelViewBorder (value : float) -> None: ...
@property
def ZoomExtentsPerspectiveViewBorder () -> float: ...
@ZoomExtentsPerspectiveViewBorder.setter
def ZoomExtentsPerspectiveViewBorder (value : float) -> None: ...
@property
def RotateCircleIncrement () -> int: ...
@RotateCircleIncrement.setter
def RotateCircleIncrement (value : int) -> None: ...
@property
def RotateReverseKeyboard () -> bool: ...
@RotateReverseKeyboard.setter
def RotateReverseKeyboard (value : bool) -> None: ...
@property
def RotateToView () -> bool: ...
@RotateToView.setter
def RotateToView (value : bool) -> None: ...
@property
def DefinedViewSetCPlane () -> bool: ...
@DefinedViewSetCPlane.setter
def DefinedViewSetCPlane (value : bool) -> None: ...
@property
def DefinedViewSetProjection () -> bool: ...
@DefinedViewSetProjection.setter
def DefinedViewSetProjection (value : bool) -> None: ...
@property
def SingleClickMaximize () -> bool: ...
@SingleClickMaximize.setter
def SingleClickMaximize (value : bool) -> None: ...
@property
def LinkedViewports () -> bool: ...
@LinkedViewports.setter
def LinkedViewports (value : bool) -> None: ...
class OpenGLSettingsState:
@property
def AntialiasLevel (self) -> AntialiasLevel: ...
@AntialiasLevel.setter
def AntialiasLevel (self, value : AntialiasLevel) -> None: ...
class OpenGLSettings:
def __init__(self): ...
def GetDefaultState () -> OpenGLSettingsState: ...
def GetCurrentState () -> OpenGLSettingsState: ...
def RestoreDefaults () -> None: ...
def UpdateFromState (state : OpenGLSettingsState) -> None: ...
@property
def AntialiasLevel () -> AntialiasLevel: ...
@AntialiasLevel.setter
def AntialiasLevel (value : AntialiasLevel) -> None: ...
class ShortcutKey:
F1 = 0
F2 = 1
F3 = 2
F4 = 3
F5 = 4
F6 = 5
F7 = 6
F8 = 7
F9 = 8
F10 = 9
F11 = 10
F12 = 11
CtrlF1 = 12
CtrlF2 = 13
CtrlF3 = 14
CtrlF4 = 15
CtrlF5 = 16
CtrlF6 = 17
CtrlF7 = 18
CtrlF8 = 19
CtrlF9 = 20
CtrlF10 = 21
CtrlF11 = 22
CtrlF12 = 23
ShiftCtrlF1 = 24
ShiftCtrlF2 = 25
ShiftCtrlF3 = 26
ShiftCtrlF4 = 27
ShiftCtrlF5 = 28
ShiftCtrlF6 = 29
ShiftCtrlF7 = 30
ShiftCtrlF8 = 31
ShiftCtrlF9 = 32
ShiftCtrlF10 = 33
ShiftCtrlF11 = 34
ShiftCtrlF12 = 35
AltCtrlF1 = 36
AltCtrlF2 = 37
AltCtrlF3 = 38
AltCtrlF4 = 39
AltCtrlF5 = 40
AltCtrlF6 = 41
AltCtrlF7 = 42
AltCtrlF8 = 43
AltCtrlF9 = 44
AltCtrlF10 = 45
AltCtrlF11 = 46
AltCtrlF12 = 47
CtrlA = 48
CtrlB = 49
CtrlC = 50
CtrlD = 51
CtrlE = 52
CtrlF = 53
CtrlG = 54
CtrlH = 55
CtrlI = 56
CtrlJ = 57
CtrlK = 58
CtrlL = 59
CtrlM = 60
CtrlN = 61
CtrlO = 62
CtrlP = 63
CtrlQ = 64
CtrlR = 65
CtrlS = 66
CtrlT = 67
CtrlU = 68
CtrlV = 69
CtrlW = 70
CtrlX = 71
CtrlY = 72
CtrlZ = 73
ShiftCtrlA = 74
ShiftCtrlB = 75
ShiftCtrlC = 76
ShiftCtrlD = 77
ShiftCtrlE = 78
ShiftCtrlF = 79
ShiftCtrlG = 80
ShiftCtrlH = 81
ShiftCtrlI = 82
ShiftCtrlJ = 83
ShiftCtrlK = 84
ShiftCtrlL = 85
ShiftCtrlM = 86
ShiftCtrlN = 87
ShiftCtrlO = 88
ShiftCtrlP = 89
ShiftCtrlQ = 90
ShiftCtrlR = 91
ShiftCtrlS = 92
ShiftCtrlT = 93
ShiftCtrlU = 94
ShiftCtrlV = 95
ShiftCtrlW = 96
ShiftCtrlX = 97
ShiftCtrlY = 98
ShiftCtrlZ = 99
AltCtrlA = 100
AltCtrlB = 101
AltCtrlC = 102
AltCtrlD = 103
AltCtrlE = 104
AltCtrlF = 105
AltCtrlG = 106
AltCtrlH = 107
AltCtrlI = 108
AltCtrlJ = 109
AltCtrlK = 110
AltCtrlL = 111
AltCtrlM = 112
AltCtrlN = 113
AltCtrlO = 114
AltCtrlP = 115
AltCtrlQ = 116
AltCtrlR = 117
AltCtrlS = 118
AltCtrlT = 119
AltCtrlU = 120
AltCtrlV = 121
AltCtrlW = 122
AltCtrlX = 123
AltCtrlY = 124
AltCtrlZ = 125
Ctrl0 = 126
Ctrl1 = 127
Ctrl2 = 128
Ctrl3 = 129
Ctrl4 = 130
Ctrl5 = 131
Ctrl6 = 132
Ctrl7 = 133
Ctrl8 = 134
Ctrl9 = 135
ShiftCtrl0 = 136
ShiftCtrl1 = 137
ShiftCtrl2 = 138
ShiftCtrl3 = 139
ShiftCtrl4 = 140
ShiftCtrl5 = 141
ShiftCtrl6 = 142
ShiftCtrl7 = 143
ShiftCtrl8 = 144
ShiftCtrl9 = 145
AltCtrl0 = 146
AltCtrl1 = 147
AltCtrl2 = 148
AltCtrl3 = 149
AltCtrl4 = 150
AltCtrl5 = 151
AltCtrl6 = 152
AltCtrl7 = 153
AltCtrl8 = 154
AltCtrl9 = 155
Home = 156
End = 157
CtrlHome = 158
CtrlEnd = 159
ShiftHome = 160
ShiftEnd = 161
ShiftCtrlHome = 162
ShiftCtrlEnd = 163
AltCtrlHome = 164
AltCtrlEnd = 165
PageUp = 166
PageDown = 167
ShiftPageUp = 168
ShiftPageDown = 169
CtrlPageUp = 170
CtrlPageDown = 171
ShiftCtrlPageUp = 172
ShiftCtrlPageDown = 173
AltCtrlPageUp = 174
AltCtrlPageDown = 175
class ShortcutKeySettings:
def GetMacro (key : ShortcutKey) -> str: ...
def SetMacro (key : ShortcutKey, macro : str) -> None: ...
class SmartTrackSettingsState:
@property
def UseSmartTrack (self) -> bool: ...
@UseSmartTrack.setter
def UseSmartTrack (self, value : bool) -> None: ...
@property
def UseDottedLines (self) -> bool: ...
@UseDottedLines.setter
def UseDottedLines (self, value : bool) -> None: ...
@property
def SmartOrtho (self) -> bool: ...
@SmartOrtho.setter
def SmartOrtho (self, value : bool) -> None: ...
@property
def SmartTangents (self) -> bool: ...
@SmartTangents.setter
def SmartTangents (self, value : bool) -> None: ...
@property
def ActivationDelayMilliseconds (self) -> int: ...
@ActivationDelayMilliseconds.setter
def ActivationDelayMilliseconds (self, value : int) -> None: ...
@property
def MaxSmartPoints () -> int: ...
@MaxSmartPoints.setter
def MaxSmartPoints (value : int) -> None: ...
@property
def LineColor (self) -> Color: ...
@LineColor.setter
def LineColor (self, value : Color) -> None: ...
@property
def TanPerpLineColor (self) -> Color: ...
@TanPerpLineColor.setter
def TanPerpLineColor (self, value : Color) -> None: ...
@property
def PointColor (self) -> Color: ...
@PointColor.setter
def PointColor (self, value : Color) -> None: ...
@property
def ActivePointColor (self) -> Color: ...
@ActivePointColor.setter
def ActivePointColor (self, value : Color) -> None: ...
class SmartTrackSettings:
def GetCurrentState () -> SmartTrackSettingsState: ...
def GetDefaultState () -> SmartTrackSettingsState: ...
def UpdateFromState (state : SmartTrackSettingsState) -> None: ...
@property
def UseSmartTrack () -> bool: ...
@UseSmartTrack.setter
def UseSmartTrack (value : bool) -> None: ...
@property
def UseDottedLines () -> bool: ...
@UseDottedLines.setter
def UseDottedLines (value : bool) -> None: ...
@property
def SmartOrtho () -> bool: ...
@SmartOrtho.setter
def SmartOrtho (value : bool) -> None: ...
@property
def SmartTangents () -> bool: ...
@SmartTangents.setter
def SmartTangents (value : bool) -> None: ...
@property
def ActivationDelayMilliseconds () -> int: ...
@ActivationDelayMilliseconds.setter
def ActivationDelayMilliseconds (value : int) -> None: ...
@property
def MaxSmartPoints () -> int: ...
@MaxSmartPoints.setter
def MaxSmartPoints (value : int) -> None: ...
@property
def LineColor () -> Color: ...
@LineColor.setter
def LineColor (value : Color) -> None: ...
@property
def TanPerpLineColor () -> Color: ...
@TanPerpLineColor.setter
def TanPerpLineColor (value : Color) -> None: ...
@property
def PointColor () -> Color: ...
@PointColor.setter
def PointColor (value : Color) -> None: ...
@property
def ActivePointColor () -> Color: ...
@ActivePointColor.setter
def ActivePointColor (value : Color) -> None: ...
class CursorTooltipSettingsState:
def __init__(self): ...
@property
def TooltipsEnabled (self) -> bool: ...
@TooltipsEnabled.setter
def TooltipsEnabled (self, value : bool) -> None: ...
@property
def Offset (self) -> Point: ...
@Offset.setter
def Offset (self, value : Point) -> None: ...
@property
def BackgroundColor (self) -> Color: ...
@BackgroundColor.setter
def BackgroundColor (self, value : Color) -> None: ...
@property
def TextColor (self) -> Color: ...
@TextColor.setter
def TextColor (self, value : Color) -> None: ...
@property
def OsnapPane (self) -> bool: ...
@OsnapPane.setter
def OsnapPane (self, value : bool) -> None: ...
@property
def DistancePane (self) -> bool: ...
@DistancePane.setter
def DistancePane (self, value : bool) -> None: ...
@property
def PointPane (self) -> bool: ...
@PointPane.setter
def PointPane (self, value : bool) -> None: ...
@property
def RelativePointPane (self) -> bool: ...
@RelativePointPane.setter
def RelativePointPane (self, value : bool) -> None: ...
@property
def CommandPromptPane (self) -> bool: ...
@CommandPromptPane.setter
def CommandPromptPane (self, value : bool) -> None: ...
@property
def AutoSuppress (self) -> bool: ...
@AutoSuppress.setter
def AutoSuppress (self, value : bool) -> None: ...
class CursorTooltipSettings:
def GetCurrentState () -> CursorTooltipSettingsState: ...
def GetDefaultState () -> CursorTooltipSettingsState: ...
@property
def TooltipsEnabled () -> bool: ...
@TooltipsEnabled.setter
def TooltipsEnabled (value : bool) -> None: ...
@property
def Offset () -> Point: ...
@Offset.setter
def Offset (value : Point) -> None: ...
@property
def BackgroundColor () -> Color: ...
@BackgroundColor.setter
def BackgroundColor (value : Color) -> None: ...
@property
def TextColor () -> Color: ...
@TextColor.setter
def TextColor (value : Color) -> None: ...
@property
def OsnapPane () -> bool: ...
@OsnapPane.setter
def OsnapPane (value : bool) -> None: ...
@property
def DistancePane () -> bool: ...
@DistancePane.setter
def DistancePane (value : bool) -> None: ...
@property
def PointPane () -> bool: ...
@PointPane.setter
def PointPane (value : bool) -> None: ...
@property
def RelativePointPane () -> bool: ...
@RelativePointPane.setter
def RelativePointPane (value : bool) -> None: ...
@property
def CommandPromptPane () -> bool: ...
@CommandPromptPane.setter
def CommandPromptPane (value : bool) -> None: ...
@property
def AutoSuppress () -> bool: ...
@AutoSuppress.setter
def AutoSuppress (value : bool) -> None: ...
class CurvatureAnalysisSettingsState:
@property
def GaussRange (self) -> Interval: ...
@GaussRange.setter
def GaussRange (self, value : Interval) -> None: ...
@property
def MeanRange (self) -> Interval: ...
@MeanRange.setter
def MeanRange (self, value : Interval) -> None: ...
@property
def MinRadiusRange (self) -> Interval: ...
@MinRadiusRange.setter
def MinRadiusRange (self, value : Interval) -> None: ...
@property
def MaxRadiusRange (self) -> Interval: ...
@MaxRadiusRange.setter
def MaxRadiusRange (self, value : Interval) -> None: ...
@property
def Style (self) -> CurvatureStyle: ...
@Style.setter
def Style (self, value : CurvatureStyle) -> None: ...
class CurvatureAnalysisSettings:
def GetDefaultState () -> CurvatureAnalysisSettingsState: ...
def GetCurrentState () -> CurvatureAnalysisSettingsState: ...
def RestoreDefaults () -> None: ...
def UpdateFromState (state : CurvatureAnalysisSettingsState) -> None: ...
@property
def GaussRange () -> Interval: ...
@GaussRange.setter
def GaussRange (value : Interval) -> None: ...
@property
def MeanRange () -> Interval: ...
@MeanRange.setter
def MeanRange (value : Interval) -> None: ...
@property
def MinRadiusRange () -> Interval: ...
@MinRadiusRange.setter
def MinRadiusRange (value : Interval) -> None: ...
@property
def MaxRadiusRange () -> Interval: ...
@MaxRadiusRange.setter
def MaxRadiusRange (value : Interval) -> None: ...
@property
def Style () -> CurvatureStyle: ...
@Style.setter
def Style (value : CurvatureStyle) -> None: ...
def CalculateCurvatureAutoRange (meshes : Iterable[Mesh], settings : CurvatureAnalysisSettingsState) -> Tuple[bool, CurvatureAnalysisSettingsState]: ...
class HistorySettings:
@property
def RecordingEnabled () -> bool: ...
@RecordingEnabled.setter
def RecordingEnabled (value : bool) -> None: ...
@property
def UpdateEnabled () -> bool: ...
@UpdateEnabled.setter
def UpdateEnabled (value : bool) -> None: ...
@property
def ObjectLockingEnabled () -> bool: ...
@ObjectLockingEnabled.setter
def ObjectLockingEnabled (value : bool) -> None: ...
@property
def BrokenRecordWarningEnabled () -> bool: ...
@BrokenRecordWarningEnabled.setter
def BrokenRecordWarningEnabled (value : bool) -> None: ...
class LicenseNode:
Standalone = 0
Network = 1
NetworkCheckedOut = 2
class Installation:
Undefined = 0
Commercial = 1
Educational = 2
EducationalLab = 3
NotForResale = 4
NotForResaleLab = 5
Beta = 6
BetaLab = 7
Evaluation = 8
Corporate = 9
EvaluationTimed = 10
class CurvatureStyle:
Gaussian = 0
Mean = 1
MinRadius = 2
MaxRadius = 3
| [
"[email protected]"
] | |
a05e4fa1f1c5fa3696fae51ce274f1ad101d3175 | 15131f068ff3f457c4f405310cd375274d913c63 | /backend/long_limit_29117/settings.py | 75a0a84a46ff0f64d98eeb0b6ddb2709b73bb5a8 | [] | no_license | crowdbotics-apps/long-limit-29117 | 749cf2c7a7d2e2fcdb9464eb327a4f6799e0f18c | de1f20fe4298f9833f4c3c10bedd689d0b078024 | refs/heads/master | 2023-06-22T16:29:54.695478 | 2021-07-24T17:16:00 | 2021-07-24T17:16:00 | 389,157,594 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,927 | py | """
Django settings for long_limit_29117 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
from modules.manifest import get_modules
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
'storages',
]
MODULES_APPS = get_modules()
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS + MODULES_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'long_limit_29117.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'web_build')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'long_limit_29117.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static'), os.path.join(BASE_DIR, 'web_build/static')]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# AWS S3 config
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY", "")
AWS_STORAGE_BUCKET_NAME = env.str("AWS_STORAGE_BUCKET_NAME", "")
AWS_STORAGE_REGION = env.str("AWS_STORAGE_REGION", "")
USE_S3 = (
AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY and
AWS_STORAGE_BUCKET_NAME and
AWS_STORAGE_REGION
)
if USE_S3:
AWS_S3_CUSTOM_DOMAIN = env.str("AWS_S3_CUSTOM_DOMAIN", "")
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_DEFAULT_ACL = env.str("AWS_DEFAULT_ACL", "public-read")
AWS_MEDIA_LOCATION = env.str("AWS_MEDIA_LOCATION", "media")
AWS_AUTO_CREATE_BUCKET = env.bool("AWS_AUTO_CREATE_BUCKET", True)
DEFAULT_FILE_STORAGE = env.str(
"DEFAULT_FILE_STORAGE", "home.storage_backends.MediaStorage"
)
MEDIA_URL = '/mediafiles/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'mediafiles')
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning("You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails.")
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
] | |
b0489bd24caeaaf0ce9996ca35046cfccf718050 | ac8b725681e25177c5de3daf58afe00135241d0f | /leetcode/0136_single_number.py | ce6423738f0aa2d3b5644e971dda3130faedfdde | [
"MIT"
] | permissive | jacquerie/leetcode | 7af100ea1d7292c8c3da34210cf04d891be5561b | 0cb213b9c7bcb6efa11210e9ebc291befb560bb9 | refs/heads/master | 2022-05-19T22:19:46.284065 | 2022-03-27T02:41:58 | 2022-03-27T02:41:58 | 129,323,741 | 3 | 0 | MIT | 2021-01-04T01:41:50 | 2018-04-12T23:51:56 | Python | UTF-8 | Python | false | false | 330 | py | # -*- coding: utf-8 -*-
class Solution:
def singleNumber(self, nums):
result = 0
for num in nums:
result ^= num
return result
if __name__ == "__main__":
solution = Solution()
assert 1 == solution.singleNumber([2, 2, 1])
assert 4 == solution.singleNumber([4, 1, 2, 1, 2])
| [
"[email protected]"
] | |
322be270a39cddcf47218ccadcbd9963f2929efc | 6c512b7d2ae4b1ad713a57f74a4816e1291ba7a1 | /python_3/experiments/expr_generate_random_email_addr.py | ffdb0114212f3743389a9430fcc54266bf9dc738 | [
"MIT"
] | permissive | duttashi/applied-machine-learning | 451389e8f27931f32132a148e93effa7c6352536 | ff3267b97d9dd7122400754798e06fb493daa40a | refs/heads/master | 2021-12-17T19:12:39.531717 | 2021-12-04T09:36:46 | 2021-12-04T09:36:46 | 169,368,684 | 0 | 2 | MIT | 2021-12-04T09:36:47 | 2019-02-06T07:19:08 | R | UTF-8 | Python | false | false | 1,002 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Feb 21 09:02:51 2021
Objective: To generate a random list of email addresses and write to file
Reference: https://codereview.stackexchange.com/questions/58269/generating-random-email-addresses
@author: Ashish
"""
import random, string
domains = ["hotmail.com", "gmail.com", "aol.com", "mail.com", "mail.kz", "yahoo.com"]
letters = string.ascii_lowercase[:12]
def get_random_domain(domains):
return random.choice(domains)
def get_random_name(letters, length):
return "".join(random.choice(letters) for i in range(length))
def generate_random_emails(nb, length):
return [
get_random_name(letters, length) + "@" + get_random_domain(domains)
for i in range(nb)
]
def main():
# 7 refers to the number of chars in username part of the email id
# 100 referes to the number of email address required
print(generate_random_emails(100, 7))
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
1f4a96aadea9ab8facaad238c1994ab40c709058 | f771e83756436594a145bd7b80e5e5d8bca53268 | /djangocms_baseplugins/spacer/migrations/0001_initial.py | 966d6afda4ff196593e37d0468afda831e2ebd4b | [
"MIT"
] | permissive | bnzk/djangocms-baseplugins | b76ed75460fbeacb62366935824d2bcfac52b25e | 98e390482aa4facc35efe2412ff1603d85e2c8ba | refs/heads/develop | 2023-06-17T23:55:41.574828 | 2023-06-09T09:22:01 | 2023-06-09T09:22:01 | 68,296,521 | 2 | 0 | MIT | 2023-04-17T09:18:11 | 2016-09-15T13:32:05 | Python | UTF-8 | Python | false | false | 2,206 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-05-09 11:32
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('cms', '0016_auto_20160608_1535'),
]
operations = [
migrations.CreateModel(
name='Spacer',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True, primary_key=True,
related_name='spacer_spacer',
serialize=False, to='cms.CMSPlugin')),
('title',
models.CharField(blank=True, default='', max_length=256, verbose_name='Title')),
('published', models.BooleanField(default=True, verbose_name='Published?')),
('published_from_date', models.DateTimeField(blank=True, default=None, null=True,
verbose_name='Published from')),
('published_until_date', models.DateTimeField(blank=True, default=None, null=True,
verbose_name='Published until')),
('in_menu', models.BooleanField(default=False, verbose_name='In Menu?')),
('layout',
models.CharField(blank=True, default='', max_length=64, verbose_name='Layout')),
('background', models.CharField(blank=True, default='', max_length=64,
verbose_name='Background')),
('color',
models.CharField(blank=True, default='', max_length=64, verbose_name='Color')),
('anchor', models.SlugField(blank=True, default='', verbose_name='Anchor')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
| [
"[email protected]"
] | |
9e54269c162580b6585706bcfc401a469f121965 | 807305b8aefbd7aac4f44c67deed06c059ca02d9 | /tests/databases/value/test_database.py | 3f26650ec4063233ba8ff239621aca52b79828e1 | [
"MIT"
] | permissive | supramolecular-toolkit/stk | c40103b4820c67d110cbddc7be30d9b58d85f7af | 46f70cd000890ca7c2312cc0fdbab306565f1400 | refs/heads/master | 2022-11-27T18:22:25.187588 | 2022-11-16T13:23:11 | 2022-11-16T13:23:11 | 129,884,045 | 22 | 5 | MIT | 2019-08-19T18:16:41 | 2018-04-17T09:58:28 | Python | UTF-8 | Python | false | false | 862 | py | def test_database(case_data):
"""
Test a database.
Parameters
----------
case_data : :class:`.CaseData`
A test case. Holds the database to test and the value to put
into it.
Returns
-------
None : :class:`NoneType`
"""
_test_database(
database=case_data.database,
molecule=case_data.molecule,
value=case_data.value,
)
def _test_database(database, molecule, value):
"""
Test a database.
Parameters
----------
database : class:`.ValueDatabase`
The database to test.
molecule : :class:`.Molecule`
The molecule to test.
value : :class:`object`
The value to put into the database.
Returns
-------
None : :class:`NoneType`
"""
database.put(molecule, value)
assert database.get(molecule) == value
| [
"[email protected]"
] | |
f223475d9486a080b7b1aca585647310abe8c018 | 2e6248663931cac90404e7ed63cb905ff1854b90 | /sycomore/rf_spoiling.py | eb11c03f22a8b95749edcaed5c904f5849b3e1c5 | [] | no_license | ruojianhua1/sycomore-web | 30b2a69843667b07a55af620ba78864bc227fd4d | a3fb38d7548939c3fa33ab838aa658cc0b89a9a9 | refs/heads/master | 2023-03-22T03:59:07.543015 | 2020-01-22T09:10:06 | 2020-01-22T09:10:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 844 | py | import numpy
import sycomore
from sycomore.units import *
def rf_spoiling(
model, flip_angle, TE, TR, slice_thickness, phase_step, repetitions):
t_readout = TR-TE
G_readout = (2*numpy.pi*rad / (sycomore.gamma*slice_thickness))/(TR-TE)
echoes = numpy.zeros(repetitions, dtype=complex)
for r in range(0, repetitions):
phase = (phase_step * 1/2*(r+1)*r)
model.apply_pulse(flip_angle, phase)
model.apply_time_interval(TE)
echoes[r] = model.echo*numpy.exp(-1j*phase.convert_to(rad))
model.apply_time_interval(t_readout, G_readout)
return echoes
def compute_ideal_spoiling(species, flip_angle, TR):
alpha = flip_angle.convert_to(rad)
E1 = numpy.exp((-TR/species.T1))
signal = numpy.sin(alpha)*(1-E1)/(1-numpy.cos(alpha)*E1)
return float(signal)
| [
"[email protected]"
] | |
163f42dea8e8032a2c607490214273e7151c07fe | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/notepad2/testcase/firstcases/testcase3_010.py | 5e1e4a976a0825757fc871a9d6bde4ddddf53119 | [] | no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,308 | py | #coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'com.farmerbb.notepad',
'appActivity' : 'com.farmerbb.notepad.activity.MainActivity',
'resetKeyboard' : True,
'androidCoverage' : 'com.farmerbb.notepad/com.farmerbb.notepad.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
return
# testcase010
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
element = getElememtBack(driver, "new UiSelector().text(\"Close\")", "new UiSelector().className(\"android.widget.Button\")")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().className(\"android.widget.ImageView\").description(\"More options\")")
TouchAction(driver).long_press(element).release().perform()
element = getElememt(driver, "new UiSelector().className(\"android.widget.ImageView\").description(\"More options\")")
TouchAction(driver).long_press(element).release().perform()
driver.press_keycode(4)
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"3_010\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'com.farmerbb.notepad'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage)
| [
"[email protected]"
] | |
51b36e9e923120f85678fa88d62f02b2e0c61d2c | 1e11d6f9245c55e21edfb24f4340d52e3f7f327f | /dillo/migrations/0059_image_entity.py | 15048319182249db28d4ac935bc231ce24a67f27 | [] | no_license | armadillica/dillo | 996e8462f4f76349ecc49ecb08cdd6c8c66e072b | 960aed85f8438109bed9883321891305e1db8b10 | refs/heads/main | 2023-08-04T06:45:34.570071 | 2023-06-04T00:07:57 | 2023-06-04T00:07:57 | 30,461,275 | 79 | 18 | null | 2023-08-02T00:22:40 | 2015-02-07T16:17:43 | Python | UTF-8 | Python | false | false | 508 | py | # Generated by Django 2.2.14 on 2021-04-11 20:57
import dillo.models.mixins
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dillo', '0058_rename_icon_to_image'),
]
operations = [
migrations.AddField(
model_name='post',
name='image',
field=models.ImageField(blank=True, help_text='A preview image for the entity', upload_to=dillo.models.mixins.get_upload_to_hashed_path),
),
]
| [
"[email protected]"
] | |
139dfac249b6f7b80b7a95457f89dea57fcbe4b2 | ffb05b145989e01da075e2a607fb291955251f46 | /pypers/oxford/metatracer.py | 253fb7ce096b902041cfe72b29f2096d1d49f442 | [] | no_license | micheles/papers | a5e7f2fa0cf305cd3f8face7c7ecc0db70ce7cc7 | be9070f8b7e8192b84a102444b1238266bdc55a0 | refs/heads/master | 2023-06-07T16:46:46.306040 | 2018-07-14T04:17:51 | 2018-07-14T04:17:51 | 32,264,461 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 629 | py | # metatracer.py
import inspect
from decorators import decorator
@decorator
def traced(meth, *args, **kw):
cls = meth.__cls__
modname = meth.__module__ or cls.__module__
print "calling %s.%s.%s" % (modname, cls.__name__, meth.__name__)
return meth(*args, **kw)
class MetaTracer(type):
def __init__(cls, name, bases, dic):
super(MetaTracer, cls).__init__(name, bases, dic)
for k, v in dic.iteritems():
if inspect.isfunction(v):
v.__cls__ = cls # so we know in which class v was defined
setattr(cls, k, traced(v))
| [
"[email protected]"
] | |
5d3eb74722808ffd0192f32a87c42dc32849375a | 409829dfa1c9758ac67190fe76fea3746106bbad | /setup.py | 3b7e08ef66822de90ff5fd52631aa58f7305cdb6 | [
"MIT"
] | permissive | eagleflo/python-sc2 | 916d1df613190dbc5da0883d2c3c6275350c8f88 | 1bd24e0b7d3200df7fb7ef02256753c45fea0b32 | refs/heads/master | 2020-04-01T05:16:42.683300 | 2018-10-13T10:33:31 | 2018-10-13T10:33:31 | 152,896,763 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,356 | py | from setuptools import setup, find_packages
from pipenv.project import Project
from pipenv.utils import convert_deps_to_pip
pfile = Project(chdir=False).parsed_pipfile
requirements = convert_deps_to_pip(pfile['packages'], r=False)
test_requirements = convert_deps_to_pip(pfile['dev-packages'], r=False)
setup(
name = "sc2",
packages = find_packages(),
version = "0.10.1",
description = "A StarCraft II API Client for Python 3",
license="MIT",
author = "Hannes Karppila",
author_email = "[email protected]",
url = "https://github.com/Dentosal/python-sc2",
keywords = ["StarCraft", "StarCraft 2", "StarCraft II", "AI", "Bot"],
setup_requires=["pipenv"],
install_requires=requirements,
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"Topic :: Games/Entertainment",
"Topic :: Games/Entertainment :: Real Time Strategy",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
]
)
| [
"[email protected]"
] | |
6c8c160c9634381508ee010a7ceeae9177ffd6ec | 8e24e8bba2dd476f9fe612226d24891ef81429b7 | /geeksforgeeks/python/python_all/112_5.py | dc004ce6416dbe458336c86465138549e9215466 | [] | no_license | qmnguyenw/python_py4e | fb56c6dc91c49149031a11ca52c9037dc80d5dcf | 84f37412bd43a3b357a17df9ff8811eba16bba6e | refs/heads/master | 2023-06-01T07:58:13.996965 | 2021-06-15T08:39:26 | 2021-06-15T08:39:26 | 349,059,725 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,837 | py | Python – Filter unequal elements of two lists corresponding same index
Sometimes, while working with Python data, we can have a problem in which we
require to extract the values across multiple lists which are unequal and have
similar index. This kind of problem can come in many domains. Let’s discuss
certain ways in which this problem can be solved.
**Method #1 : Using loop +zip()**
The combination of above functions can be used to solve this problem. In this,
we extract combine the index elements using zip and then extract and check for
dissimilarity using conditional statement in loop.
__
__
__
__
__
__
__
# Python3 code to demonstrate working of
# Unequal Equi-index elements
# using loop + zip()
# initialize lists
test_list1 = ["a", "b", "c", "d"]
test_list2 = ["g", "b", "s", "d"]
# printing original lists
print("The original list 1 : " + str(test_list1))
print("The original list 2 : " + str(test_list2))
# Unequal Equi-index elements
# using loop + zip()
res = []
for i, j in zip(test_list1, test_list2):
if i != j:
res.append(i)
# printing result
print("Unequal index elements in lists : " + str(res))
---
__
__
**Output :**
The original list 1 : ['a', 'b', 'c', 'd']
The original list 2 : ['g', 'b', 's', 'd']
Unequal index elements in lists : ['a', 'c']
**Method #2 : Usingzip() \+ list comprehension**
Combination of these functionalities can also be used to solve this problem.
In this, we use similar method as above, just a shorthand logic compressed
using list comprehension.
__
__
__
__
__
__
__
# Python3 code to demonstrate working of
# Unequal Equi-index elements
# using list comprehension + zip()
# initialize lists
test_list1 = ["a", "b", "c", "d"]
test_list2 = ["g", "b", "s", "d"]
# printing original lists
print("The original list 1 : " + str(test_list1))
print("The original list 2 : " + str(test_list2))
# Unequal Equi-index elements
# using list comprehension + zip()
res = [i for i, j in zip(test_list1, test_list2) if i !=
j]
# printing result
print("Unequal index elements in lists : " + str(res))
---
__
__
**Output :**
The original list 1 : ['a', 'b', 'c', 'd']
The original list 2 : ['g', 'b', 's', 'd']
Unequal index elements in lists : ['a', 'c']
Attention geek! Strengthen your foundations with the **Python Programming
Foundation** Course and learn the basics.
To begin with, your interview preparations Enhance your Data Structures
concepts with the **Python DS** Course.
My Personal Notes _arrow_drop_up_
Save
| [
"[email protected]"
] | |
0b5f1d72d0ce03d8535066049aea2e6e91ad8bbf | 965e163df916b01d647953f2b1431d265683f6ca | /test/test_helpers.py | e839094edc9afd8bc16d27b5c41e69a3377b702d | [
"MIT"
] | permissive | expressvpn/expressvpn_leak_testing | 6505c39228d396caff0c2df3777009c6fbdf3127 | 9e4cee899ac04f7820ac351fa55efdc0c01370ba | refs/heads/master | 2023-08-18T06:33:33.931040 | 2021-10-11T03:02:50 | 2021-10-11T03:02:50 | 112,572,905 | 244 | 48 | MIT | 2021-01-19T16:02:18 | 2017-11-30T06:18:40 | Python | UTF-8 | Python | false | false | 1,912 | py | import sys
import unittest
import mock
from parameterized import parameterized
from xv_leak_tools.helpers import current_os
from xv_leak_tools.helpers import merge_two_dicts
from xv_leak_tools.helpers import other_oses
class TestOSHelpers(unittest.TestCase):
def test_current_os(self):
for plat in ['linux', 'linux2']:
with mock.patch.object(sys, 'platform', plat):
self.assertEqual(current_os(), 'linux')
with mock.patch.object(sys, 'platform', 'darwin'):
self.assertEqual(current_os(), 'macos')
for plat in ['win32', 'cygwin']:
with mock.patch.object(sys, 'platform', plat):
self.assertEqual(current_os(), 'windows')
with mock.patch.object(sys, 'platform', 'unknown'):
with self.assertRaises(Exception):
current_os()
def test_other_oses(self):
plat_and_others = [
('linux', ['windows', 'macos']),
('darwin', ['windows', 'linux']),
('win32', ['linux', 'macos']),
]
for plat, others in plat_and_others:
with mock.patch.object(sys, 'platform', plat):
self.assertEqual(set(others), set(other_oses()))
class TestMergeTwoDicts(unittest.TestCase):
DICT1 = {'a': 1, 'b': 2, 'c': 3}
DICT2 = {'d': 4, 'e': 5, 'f': 6}
DICT3 = {'a': 4, 'b': 5, 'c': 6}
DICT4 = {'c': 4, 'd': 5, 'e': 6}
MERGED1_2 = {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6}
MERGED1_4 = {'a': 1, 'b': 2, 'c': 4, 'd': 5, 'e': 6}
@parameterized.expand([
(DICT1, DICT2, MERGED1_2),
(DICT1, DICT3, DICT3),
({}, DICT1, DICT1),
(DICT1, {}, DICT1),
(DICT1, DICT4, MERGED1_4),
])
def test_merge_two_dicts(self, dict1, dict2, merged):
self.assertEqual(merge_two_dicts(dict1, dict2), merged)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
38e53983d55b9890e6298bca5e315690dd08829a | 9e3f1fc1935ac1dcdce7c8d39776de14ec49b5e6 | /aptechapp/apps.py | bc8776a83e61e36a73718deed4ee603c13447ab8 | [] | no_license | irchriscott/Aptech-Connect-Web | f03072bcc790ef1d72b6e609fa99656f0e45fcba | 7ebe1919ffacd9e826e2d86c778f4546072738e9 | refs/heads/master | 2020-05-02T07:19:05.867858 | 2019-03-29T12:46:22 | 2019-03-29T12:46:22 | 177,814,557 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 158 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class AptechappConfig(AppConfig):
name = 'aptechapp'
| [
"[email protected]"
] | |
a31878733c748d9fc436e92e0c85e1f95a4a753b | 3cef51c71fdb19d326bea2a76bce03b6e04a69cb | /2016/22 Grid Computing/solutionb.py | 476a47fa34b3476665f34fb9ac2b6ae703eda0a0 | [] | no_license | janezd/advent-of-code | fe2819c667a2f9309fe3c50e5c234a98855f26b8 | 700b09894eb6b8de4324304e99be17ca664a7c5b | refs/heads/master | 2021-01-10T08:53:31.721953 | 2017-12-26T10:33:11 | 2017-12-26T10:33:11 | 48,594,325 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,091 | py | import numpy as np
import re
from hashlib import sha1
class Hashable:
def __init__(self, a):
self.a = a
self._hash = int(sha1(np.ascontiguousarray(self.a)).hexdigest(), 16)
print(self._hash)
def __hash__(self):
return self._hash
def __eq__(self, other):
return np.array_equal(self.a, other)
data = []
x_prev = -1
re_node = re.compile("/dev/grid/node-x(\d+)-y(\d+) +(\d+)T +(\d+)T +(\d+)T")
for line in open("input.txt"):
mo = re_node.search(line)
if mo:
x, y, *c = [int(x) for x in mo.groups()]
if x != x_prev:
data.append([])
x_prev = x
data[-1].append(c)
for x in data:
print([t for t in x][:7])
sizex, sizey = len(data), len(data[0])
check_next = [(np.array(data, dtype=np.uint16), len(data) - 1, 0)]
steps = 0
seen = set()
while check_next:
steps += 1
print(steps, len(check_next))
to_check = check_next
check_next = []
while to_check:
data, xpos, ypos = to_check.pop()
for x in range(sizex):
for y in range(sizey):
for nx, ny in (x - 1, y), (x + 1, y), (x, y - 1), (x, y + 1):
if 0 <= nx < sizex and 0 <= ny < sizey and \
data[x, y, 1] <= data[nx, ny, 2]:
print("{}, {} -> {}, {}".format(x, y, nx, ny))
exit()
new = data.copy()
new[nx, ny, 2] -= new[x, y, 1]
new[nx, ny, 1] += new[x, y, 1]
new[x, y, 1] = 0
new[x, y, 2] = new[x, y, 0]
nposx = nx if x == xpos else xpos
nposy = ny if y == ypos else ypos
if nposx == nposy == 0:
print(steps)
exit()
mat = (Hashable(new), nposx, nposy)
if mat not in seen:
check_next.append((new, nposx, nposy))
seen.add(mat)
| [
"[email protected]"
] | |
1434edbf203110a40d993508e20577056c7ba9dc | f124e3a874bf9bfe8ce2d694d276238c1a1150fc | /src/lib/pedal/sandbox/result.py | 6f2f9b61ed343b8ae41d3c6743ef572fea823ec4 | [
"MIT",
"Python-2.0"
] | permissive | Skydler/skulpt | b37a737b4f82c788e2f94b3a406fc3a313a75274 | 6eeabde2c5eb80c4a13f0958b75a69d99cd31b8a | refs/heads/master | 2020-08-17T10:48:29.244685 | 2019-10-23T12:16:36 | 2019-10-23T12:16:36 | 215,650,824 | 0 | 0 | NOASSERTION | 2019-10-16T21:50:53 | 2019-10-16T21:50:53 | null | UTF-8 | Python | false | false | 12,543 | py | class SandboxResult:
"""
Proxy class for wrapping results from executing student code. Attempts
to perfectly emulate the underlying data value, so that users will never
realize they have a proxy. The advantage is that special information is
available in the corresponding Sandbox about this result that can give
more context.
Attributes:
value (any): The actual data stored in this class that we are proxying.
If the underlying proxy object has a field called `value`, then
you can use either `_actual_value` to access the proxied object.
_actual_call_id (int): The call that was used to generate this result.
_actual_sandbox (Sandbox): The sandbox that was used to generate this
result. If None, then the sandbox was lost.
"""
ASSIGNABLE_ATTRS = ['value', '_actual_call_id', '_actual_sandbox',
'_clone_this_result']
def __init__(self, value, call_id=None, sandbox=None):
"""
Args:
value (any): Literally any type of data.
call_id (int): The unique call ID that generated this result. If
None, then the SandboxResult was generated by manipulating an earlier
result.
TODO: We could actually remember the operations applied to this
instance and use them to reconstruct the transformations...
sandbox (Sandbox): The sandbox that was used to generate this
result. If None, then the sandbox was lost.
"""
self.value = value
self._actual_call_id = call_id
self._actual_sandbox = sandbox
def __getattribute__(self, name):
"""
Get the attribute with the given `name`. This allows us to pass
most attributes along to the underlying `value`, while still
maintaining access to the proxy's attributes.
"""
v = object.__getattribute__(self, "value")
if name == "__class__":
return v.__class__
elif name == "__actual_class__":
return object.__getattribute__(self, "__class__")
elif name == "_actual_value":
return v
elif name in SandboxResult.ASSIGNABLE_ATTRS:
return object.__getattribute__(self, name)
elif name == "value" and not hasattr(v, "value"):
return v
else:
return SandboxResult(object.__getattribute__(v, name),
object.__getattribute__(self, "_actual_call_id"),
object.__getattribute__(self, "_actual_sandbox"))
def __setattr__(self, name, value):
if name in SandboxResult.ASSIGNABLE_ATTRS:
object.__setattr__(self, name, value)
else:
setattr(self.value, name, value)
def __delattr__(self, name):
if name in SandboxResult.ASSIGNABLE_ATTRS:
object.__delattr__(self, name, value)
else:
delattr(self.value, name, value)
def _clone_this_result(self, new_value):
"""
Create a new SandboxResult based on this current one. Copies over the
`call_id` and `sandbox`.
Args:
new_value (any): The new value to be proxying.
Returns:
SandboxResult
"""
return SandboxResult(new_value,
call_id=self._actual_call_id,
sandbox=self._actual_sandbox)
def __repr__(self):
"""
Returns the representation of the proxied object.
Returns:
str: The `repr` of the proxied object.
"""
return repr(self.value)
def __str__(self):
"""
Returns the string representation of the proxied object.
Returns:
str: The `str` of the proxied object.
"""
return str(self.value)
def __bytes__(self):
return bytes(self.value)
def __format__(self, format_spec):
return format(self.value, format_spec)
def __call__(self, *args):
"""
Returns the result of calling the proxied object with the args.
Returns:
SandboxResult: A proxy of the Sandbox object.
"""
return self._clone_this_result(self.value(*args))
def __hash__(self):
return hash(self.value)
def __bool__(self):
return bool(self.value)
def __dir__(self):
return dir(self.value)
def __instancecheck__(self, instance):
return isinstance(self.value, instance)
def __subclasscheck__(self, subclass):
return issubclass(self.value, subclass)
def __len__(self):
return self._clone_this_result(len(self.value))
def __getitem__(self, key):
return self._clone_this_result(self.value[key])
def __setitem__(self, key, value):
self.value[key] = value
def __delitem__(self, key):
del self.value[key]
def __missing__(self, key):
return self.value.__missing__(key)
def __iter__(self):
return iter(self.value)
def __reversed__(self):
return reversed(self.value)
def __contains__(self, item):
return self.value.__contains__(item)
def __eq__(self, other):
"""
Test if the proxied object is equal to the given `other`.
Args:
other (any): The other object.
Returns:
bool or any: Returns whatever the proxy object's __eq__ returns.
"""
if isinstance(other, SandboxResult):
return self.value == other.value
return self.value == other
def __lt__(self, other):
if isinstance(other, SandboxResult):
return self.value < other.value
return self.value < other
def __le__(self, other):
if isinstance(other, SandboxResult):
return self.value <= other.value
return self.value <= other
def __gt__(self, other):
if isinstance(other, SandboxResult):
return self.value > other.value
return self.value > other
def __ge__(self, other):
if isinstance(other, SandboxResult):
return self.value >= other.value
return self.value >= other
def __ne__(self, other):
if isinstance(other, SandboxResult):
return self.value != other.value
return self.value != other
## Numeric Operations
def __add__(self, other):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value + other.value)
return self._clone_this_result(self.value + other)
def __sub__(self, other):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value - other.value)
return self._clone_this_result(self.value - other)
def __mul__(self, other):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value * other.value)
return self._clone_this_result(self.value * other)
def __matmul__(self, other):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value.__matmul__(other.value))
return self._clone_this_result(self.value.__matmul__(other))
def __truediv__(self, other):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value.__truediv__(other.value))
return self._clone_this_result(self.value.__truediv__(other))
def __floordiv__(self, other):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value.__floordiv__(other.value))
return self._clone_this_result(self.value.__floordiv__(other))
def __mod__(self, other):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value.__mod__(other.value))
return self._clone_this_result(self.value.__mod__(other))
def __divmod__(self, other):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value.__divmod__(other.value))
return self._clone_this_result(self.value.__divmod__(other))
def __pow__(self, other, *modulo):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value.__pow__(other.value, *modulo))
return self._clone_this_result(self.value.__pow__(other, *modulo))
def __lshift__(self, other):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value.__lshift__(other.value))
return self._clone_this_result(self.value.__lshift__(other))
def __rshift__(self, other):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value.__rshift__(other.value))
return self._clone_this_result(self.value.__rshift__(other))
def __and__(self, other):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value.__and__(other.value))
return self._clone_this_result(self.value.__and__(other))
def __xor__(self, other):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value.__xor__(other.value))
return self._clone_this_result(self.value.__xor__(other))
def __or__(self, other):
if isinstance(other, SandboxResult):
return self._clone_this_result(self.value.__or__(other.value))
return self._clone_this_result(self.value.__or__(other))
def __radd__(self, other):
return self._clone_this_result(self.value.__radd__(other))
def __rsub__(self, other):
return self._clone_this_result(self.value.__rsub__(other))
def __rmul__(self, other):
return self._clone_this_result(self.value.__rmul__(other))
def __rmatmul__(self, other):
return self._clone_this_result(self.value.__rmatmul__(other))
def __rtruediv__(self, other):
return self._clone_this_result(self.value.__rtruediv__(other))
def __rfloordiv__(self, other):
return self._clone_this_result(self.value.__rfloordiv__(other))
def __rmod__(self, other):
return self._clone_this_result(self.value.__rmod__(other))
def __rdivmod__(self, other):
return self._clone_this_result(self.value.__rdivmod__(other))
def __rpow__(self, other):
return self._clone_this_result(self.value.__rpow__(other))
def __rlshift__(self, other):
return self._clone_this_result(self.value.__rlshift__(other))
def __rand__(self, other):
return self._clone_this_result(self.value.__rand__(other))
def __rxor__(self, other):
return self._clone_this_result(self.value.__rxor__(other))
def __ror__(self, other):
return self._clone_this_result(self.value.__ror__(other))
## TODO: __iadd__ and other in-place assignment operators?
def __neg__(self):
return self._clone_this_result(self.value.__neg__())
def __pos__(self):
return self._clone_this_result(self.value.__pos__())
def __abs__(self):
return self._clone_this_result(self.value.__abs__())
def __invert__(self):
return self._clone_this_result(self.value.__invert__())
def __complex__(self):
return self._clone_this_result(self.value.__complex__())
def __int__(self):
return self._clone_this_result(self.value.__int__())
def __float__(self):
return self._clone_this_result(self.value.__float__())
def __round__(self, *ndigits):
return self._clone_this_result(self.value.__round__(*ndigits))
def __trunc__(self):
return self._clone_this_result(self.value.__trunc__())
def __floor__(self):
return self._clone_this_result(self.value.__floor__())
def __ceil__(self):
return self._clone_this_result(self.value.__ceil__())
def __enter__(self):
return self.value.__enter__()
def __exit__(self, exc_type, exc_value, traceback):
return self.value.__exit__(exc_type, exc_value, traceback)
def __await__(self):
return self.value.__await__()
def __aiter__(self):
return self.value.__aiter__()
def __anext__(self):
return self.value.__anext__()
def __aenter__(self):
return self.value.__aenter__()
def __aexit__(self, exc_type, exc_value, traceback):
return self.value.__aexit__(exc_type, exc_value, traceback)
| [
"[email protected]"
] | |
8a745f4689ea9fda6a7da7c444c8f3eef0616059 | 52f4426d2776871cc7f119de258249f674064f78 | /misc/algorithm/shortest_path/floyd_warshall.py | 69f03a396a9041e4d03aae9d449722638becbffb | [] | no_license | namhyun-gu/algorithm | 8ad98d336366351e715465643dcdd9f04eeb0ad2 | d99c44f9825576c16aaca731888e0c32f2ae6e96 | refs/heads/master | 2023-06-06T02:28:16.514422 | 2021-07-02T10:34:03 | 2021-07-02T10:34:03 | 288,646,740 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 954 | py | from math import inf
from typing import List
graph: List[List[int]] = [
[0, 7, inf, inf, 3, 10, inf],
[7, 0, 4, 10, 2, 6, inf],
[inf, 4, 0, 2, inf, inf, inf],
[inf, 10, 2, 0, 11, 9, 4],
[3, 2, inf, 11, 0, inf, 5],
[10, 6, inf, 9, inf, 0, inf],
[inf, inf, inf, 4, 5, inf, 0],
]
def floyd_warshall() -> List[List[int]]:
vertex_size = len(graph)
w = [[0 for i in range(vertex_size)] for j in range(vertex_size)]
for i in range(vertex_size):
for j in range(vertex_size):
w[i][j] = graph[i][j]
for k in range(vertex_size):
for i in range(vertex_size):
for j in range(vertex_size):
w[i][j] = min(w[i][j], w[i][k] + w[k][j])
return w
if __name__ == "__main__":
print("--- floyd_warshall ---")
dists = floyd_warshall()
for i in range(len(dists)):
for j in range(len(dists)):
print(dists[i][j], end="\t")
print()
| [
"[email protected]"
] | |
d174d21032804d359d6c11940c1d34010ee9f1d4 | 1915774790a77a630c00e70738ac41a315f5a2cb | /doorscalc/migrations/0038_auto_20190828_0818.py | 700027d8f8f6d981755119764f7adca3d7b95bad | [] | no_license | coconutcake/hajduktools | 842948646d2e8d3368b4d420d73bba981d649d43 | 6f9e678a1168195d77d1163bc9145205d03bb141 | refs/heads/master | 2020-07-02T20:02:19.914649 | 2019-09-13T17:44:05 | 2019-09-13T17:44:05 | 201,648,138 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 573 | py | # Generated by Django 2.1.11 on 2019-08-28 06:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('doorscalc', '0037_auto_20190822_1052'),
]
operations = [
migrations.AlterField(
model_name='order',
name='status',
field=models.CharField(blank=True, choices=[('Pending', 'Pending'), ('Accepted', 'Accepted'), ('Producing', 'Producing')], default='Pending', help_text='Status zamówienia', max_length=50, null=True, verbose_name='Status'),
),
]
| [
"[email protected]"
] | |
adfd67c20156cb8b8ad74e8c8a720e9e2e48f5eb | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /QiPr3M5tsqfsbYcCQ_6.py | 971a4a4b867272607ab5fce9aa8d69b8c201cd28 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 223 | py |
def square_digits(n):
curdigit = 0
final = ""
while n > 0:
curdigit = n % 10
n = n // 10
final = str(curdigit ** 2) + final
final = int(final)
print(final)
return final
| [
"[email protected]"
] | |
e30f9dfb96f688439caf3db1cd6882c6de1688f0 | 81c6278a9d50e04794eb4b0fe35ec0d595ca60d1 | /cerulean/test/test_torque_scheduler.py | cee58c7bb4fb372013b4807428628b71862ece59 | [
"Apache-2.0"
] | permissive | romulogoncalves/cerulean | c5c028328873d49b326d5bba1520c6de41b23f16 | c61b4848a46f93e47653a474771ee6c443727c79 | refs/heads/master | 2020-08-02T09:31:15.982745 | 2019-01-21T09:26:38 | 2019-01-21T09:26:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,124 | py | from cerulean import JobDescription
from cerulean.torque_scheduler import (_get_field_from_qstat_xml,
_job_desc_to_job_script,
_seconds_to_time)
def test_job_script_name() -> None:
job_desc = JobDescription()
job_desc.name = 'test_name'
script = _job_desc_to_job_script(job_desc)
assert '#PBS -N test_name' in script
def test_job_script_working_directory() -> None:
# Note: doesn't test that it works, that's what test_scheduler is for
job_desc = JobDescription()
job_desc.working_directory = '/home/user/workdir'
script = _job_desc_to_job_script(job_desc)
assert '/home/user/workdir' in script
def test_job_script_command_args() -> None:
# Note: doesn't test that it works, that's what test_scheduler is for
job_desc = JobDescription()
job_desc.command = 'echo'
job_desc.arguments = ['-n', 'Hello world', 'testing']
script = _job_desc_to_job_script(job_desc)
assert "echo -n Hello world testing" in script
def test_job_script_stdout_file() -> None:
# Note: doesn't test that it works, that's what test_scheduler is for
job_desc = JobDescription()
job_desc.stdout_file = '/home/user/test.out'
script = _job_desc_to_job_script(job_desc)
assert '/home/user/test.out' in script
def test_job_script_stderr_file() -> None:
# Note: doesn't test that it works, that's what test_scheduler is for
job_desc = JobDescription()
job_desc.stderr_file = '/home/user/test.err'
script = _job_desc_to_job_script(job_desc)
assert '/home/user/test.err' in script
def test_job_script_queue_name() -> None:
# Note: doesn't test that it works, that's what test_scheduler is for
job_desc = JobDescription()
job_desc.queue_name = 'testing_queue'
script = _job_desc_to_job_script(job_desc)
assert '#PBS -q testing_queue' in script
def test_job_script_time_reserved() -> None:
# Note: doesn't test that it works, that's what test_scheduler is for
job_desc = JobDescription()
job_desc.time_reserved = 70
script = _job_desc_to_job_script(job_desc)
assert '00:00:01:10' in script
def test_job_script_num_nodes() -> None:
# Note: doesn't test that it works, that's what test_scheduler is for
job_desc = JobDescription()
job_desc.num_nodes = 42
script = _job_desc_to_job_script(job_desc)
assert 'nodes=42' in script
def test_job_script_processes_per_node() -> None:
job_desc = JobDescription()
job_desc.mpi_processes_per_node = 4
script = _job_desc_to_job_script(job_desc)
assert 'ppn=4' in script
def test_job_script_extra_scheduler_options() -> None:
job_desc = JobDescription()
job_desc.extra_scheduler_options = '-p 10'
script = _job_desc_to_job_script(job_desc)
assert '#PBS -p 10' in script
def test_seconds_to_time() -> None:
time = (2 * 24 * 60 * 60) + (13 * 60 * 60) + (7 * 60) + 48
time_str = _seconds_to_time(time)
assert time_str == '02:13:07:48'
time_str = _seconds_to_time(2)
assert time_str == '00:00:00:02'
| [
"[email protected]"
] | |
316b6c09614be0148d1c2b5160f106c633db3bdc | 9baa9f1bedf7bc973f26ab37c9b3046824b80ca7 | /venv-bck/lib/python2.7/site-packages/bson/__init__.py | 765e29a98d825e4e4fed5c0c32749a243ca0a4f2 | [] | no_license | shakthydoss/suriyan | 58774fc5de1de0a9f9975c2ee3a98900e0a5dff4 | 8e39eb2e65cc6c6551fc165b422b46d598cc54b8 | refs/heads/master | 2020-04-12T05:36:59.957153 | 2017-01-08T06:12:13 | 2017-01-08T06:12:13 | 59,631,349 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33,803 | py | # Copyright 2009-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BSON (Binary JSON) encoding and decoding.
"""
import calendar
import collections
import datetime
import itertools
import struct
import sys
import uuid
import re
from bson.binary import (Binary, OLD_UUID_SUBTYPE,
JAVA_LEGACY, CSHARP_LEGACY,
UUIDLegacy)
from bson.code import Code
from bson.codec_options import (
CodecOptions, DEFAULT_CODEC_OPTIONS, _raw_document_class)
from bson.dbref import DBRef
from bson.errors import (InvalidBSON,
InvalidDocument,
InvalidStringData)
from bson.int64 import Int64
from bson.max_key import MaxKey
from bson.min_key import MinKey
from bson.objectid import ObjectId
from bson.py3compat import (b,
PY3,
iteritems,
text_type,
string_type,
reraise)
from bson.regex import Regex
from bson.son import SON, RE_TYPE
from bson.timestamp import Timestamp
from bson.tz_util import utc
from codecs import (utf_8_decode as _utf_8_decode,
utf_8_encode as _utf_8_encode)
try:
from bson import _cbson
_USE_C = True
except ImportError:
_USE_C = False
EPOCH_AWARE = datetime.datetime.fromtimestamp(0, utc)
EPOCH_NAIVE = datetime.datetime.utcfromtimestamp(0)
BSONNUM = b"\x01" # Floating point
BSONSTR = b"\x02" # UTF-8 string
BSONOBJ = b"\x03" # Embedded document
BSONARR = b"\x04" # Array
BSONBIN = b"\x05" # Binary
BSONUND = b"\x06" # Undefined
BSONOID = b"\x07" # ObjectId
BSONBOO = b"\x08" # Boolean
BSONDAT = b"\x09" # UTC Datetime
BSONNUL = b"\x0A" # Null
BSONRGX = b"\x0B" # Regex
BSONREF = b"\x0C" # DBRef
BSONCOD = b"\x0D" # Javascript code
BSONSYM = b"\x0E" # Symbol
BSONCWS = b"\x0F" # Javascript code with scope
BSONINT = b"\x10" # 32bit int
BSONTIM = b"\x11" # Timestamp
BSONLON = b"\x12" # 64bit int
BSONMIN = b"\xFF" # Min key
BSONMAX = b"\x7F" # Max key
_UNPACK_FLOAT = struct.Struct("<d").unpack
_UNPACK_INT = struct.Struct("<i").unpack
_UNPACK_LENGTH_SUBTYPE = struct.Struct("<iB").unpack
_UNPACK_LONG = struct.Struct("<q").unpack
_UNPACK_TIMESTAMP = struct.Struct("<II").unpack
def _raise_unknown_type(element_type, element_name):
"""Unknown type helper."""
raise InvalidBSON("Detected unknown BSON type %r for fieldname '%s'. Are "
"you using the latest driver version?" % (
element_type, element_name))
def _get_int(data, position, dummy0, dummy1, dummy2):
"""Decode a BSON int32 to python int."""
end = position + 4
return _UNPACK_INT(data[position:end])[0], end
def _get_c_string(data, position, opts):
"""Decode a BSON 'C' string to python unicode string."""
end = data.index(b"\x00", position)
return _utf_8_decode(data[position:end],
opts.unicode_decode_error_handler, True)[0], end + 1
def _get_float(data, position, dummy0, dummy1, dummy2):
"""Decode a BSON double to python float."""
end = position + 8
return _UNPACK_FLOAT(data[position:end])[0], end
def _get_string(data, position, obj_end, opts, dummy):
"""Decode a BSON string to python unicode string."""
length = _UNPACK_INT(data[position:position + 4])[0]
position += 4
if length < 1 or obj_end - position < length:
raise InvalidBSON("invalid string length")
end = position + length - 1
if data[end:end + 1] != b"\x00":
raise InvalidBSON("invalid end of string")
return _utf_8_decode(data[position:end],
opts.unicode_decode_error_handler, True)[0], end + 1
def _get_object(data, position, obj_end, opts, dummy):
"""Decode a BSON subdocument to opts.document_class or bson.dbref.DBRef."""
obj_size = _UNPACK_INT(data[position:position + 4])[0]
end = position + obj_size - 1
if data[end:position + obj_size] != b"\x00":
raise InvalidBSON("bad eoo")
if end >= obj_end:
raise InvalidBSON("invalid object length")
if _raw_document_class(opts.document_class):
return (opts.document_class(data[position:end + 1], opts),
position + obj_size)
obj = _elements_to_dict(data, position + 4, end, opts)
position += obj_size
if "$ref" in obj:
return (DBRef(obj.pop("$ref"), obj.pop("$id", None),
obj.pop("$db", None), obj), position)
return obj, position
def _get_array(data, position, obj_end, opts, element_name):
"""Decode a BSON array to python list."""
size = _UNPACK_INT(data[position:position + 4])[0]
end = position + size - 1
if data[end:end + 1] != b"\x00":
raise InvalidBSON("bad eoo")
position += 4
end -= 1
result = []
# Avoid doing global and attibute lookups in the loop.
append = result.append
index = data.index
getter = _ELEMENT_GETTER
while position < end:
element_type = data[position:position + 1]
# Just skip the keys.
position = index(b'\x00', position) + 1
try:
value, position = getter[element_type](
data, position, obj_end, opts, element_name)
except KeyError:
_raise_unknown_type(element_type, element_name)
append(value)
return result, position + 1
def _get_binary(data, position, dummy0, opts, dummy1):
"""Decode a BSON binary to bson.binary.Binary or python UUID."""
length, subtype = _UNPACK_LENGTH_SUBTYPE(data[position:position + 5])
position += 5
if subtype == 2:
length2 = _UNPACK_INT(data[position:position + 4])[0]
position += 4
if length2 != length - 4:
raise InvalidBSON("invalid binary (st 2) - lengths don't match!")
length = length2
end = position + length
if subtype in (3, 4):
# Java Legacy
uuid_representation = opts.uuid_representation
if uuid_representation == JAVA_LEGACY:
java = data[position:end]
value = uuid.UUID(bytes=java[0:8][::-1] + java[8:16][::-1])
# C# legacy
elif uuid_representation == CSHARP_LEGACY:
value = uuid.UUID(bytes_le=data[position:end])
# Python
else:
value = uuid.UUID(bytes=data[position:end])
return value, end
# Python3 special case. Decode subtype 0 to 'bytes'.
if PY3 and subtype == 0:
value = data[position:end]
else:
value = Binary(data[position:end], subtype)
return value, end
def _get_oid(data, position, dummy0, dummy1, dummy2):
"""Decode a BSON ObjectId to bson.objectid.ObjectId."""
end = position + 12
return ObjectId(data[position:end]), end
def _get_boolean(data, position, dummy0, dummy1, dummy2):
"""Decode a BSON true/false to python True/False."""
end = position + 1
return data[position:end] == b"\x01", end
def _get_date(data, position, dummy0, opts, dummy1):
"""Decode a BSON datetime to python datetime.datetime."""
end = position + 8
millis = _UNPACK_LONG(data[position:end])[0]
diff = ((millis % 1000) + 1000) % 1000
seconds = (millis - diff) / 1000
micros = diff * 1000
if opts.tz_aware:
dt = EPOCH_AWARE + datetime.timedelta(
seconds=seconds, microseconds=micros)
if opts.tzinfo:
dt = dt.astimezone(opts.tzinfo)
else:
dt = EPOCH_NAIVE + datetime.timedelta(
seconds=seconds, microseconds=micros)
return dt, end
def _get_code(data, position, obj_end, opts, element_name):
"""Decode a BSON code to bson.code.Code."""
code, position = _get_string(data, position, obj_end, opts, element_name)
return Code(code), position
def _get_code_w_scope(data, position, obj_end, opts, element_name):
"""Decode a BSON code_w_scope to bson.code.Code."""
code, position = _get_string(
data, position + 4, obj_end, opts, element_name)
scope, position = _get_object(data, position, obj_end, opts, element_name)
return Code(code, scope), position
def _get_regex(data, position, dummy0, opts, dummy1):
"""Decode a BSON regex to bson.regex.Regex or a python pattern object."""
pattern, position = _get_c_string(data, position, opts)
bson_flags, position = _get_c_string(data, position, opts)
bson_re = Regex(pattern, bson_flags)
return bson_re, position
def _get_ref(data, position, obj_end, opts, element_name):
"""Decode (deprecated) BSON DBPointer to bson.dbref.DBRef."""
collection, position = _get_string(
data, position, obj_end, opts, element_name)
oid, position = _get_oid(data, position, obj_end, opts, element_name)
return DBRef(collection, oid), position
def _get_timestamp(data, position, dummy0, dummy1, dummy2):
"""Decode a BSON timestamp to bson.timestamp.Timestamp."""
end = position + 8
inc, timestamp = _UNPACK_TIMESTAMP(data[position:end])
return Timestamp(timestamp, inc), end
def _get_int64(data, position, dummy0, dummy1, dummy2):
"""Decode a BSON int64 to bson.int64.Int64."""
end = position + 8
return Int64(_UNPACK_LONG(data[position:end])[0]), end
# Each decoder function's signature is:
# - data: bytes
# - position: int, beginning of object in 'data' to decode
# - obj_end: int, end of object to decode in 'data' if variable-length type
# - opts: a CodecOptions
_ELEMENT_GETTER = {
BSONNUM: _get_float,
BSONSTR: _get_string,
BSONOBJ: _get_object,
BSONARR: _get_array,
BSONBIN: _get_binary,
BSONUND: lambda v, w, x, y, z: (None, w), # Deprecated undefined
BSONOID: _get_oid,
BSONBOO: _get_boolean,
BSONDAT: _get_date,
BSONNUL: lambda v, w, x, y, z: (None, w),
BSONRGX: _get_regex,
BSONREF: _get_ref, # Deprecated DBPointer
BSONCOD: _get_code,
BSONSYM: _get_string, # Deprecated symbol
BSONCWS: _get_code_w_scope,
BSONINT: _get_int,
BSONTIM: _get_timestamp,
BSONLON: _get_int64,
BSONMIN: lambda v, w, x, y, z: (MinKey(), w),
BSONMAX: lambda v, w, x, y, z: (MaxKey(), w)}
def _element_to_dict(data, position, obj_end, opts):
"""Decode a single key, value pair."""
element_type = data[position:position + 1]
position += 1
element_name, position = _get_c_string(data, position, opts)
try:
value, position = _ELEMENT_GETTER[element_type](data, position,
obj_end, opts,
element_name)
except KeyError:
_raise_unknown_type(element_type, element_name)
return element_name, value, position
if _USE_C:
_element_to_dict = _cbson._element_to_dict
def _iterate_elements(data, position, obj_end, opts):
end = obj_end - 1
while position < end:
(key, value, position) = _element_to_dict(data, position, obj_end, opts)
yield key, value
def _elements_to_dict(data, position, obj_end, opts):
"""Decode a BSON document."""
result = opts.document_class()
for key, value in _iterate_elements(data, position, obj_end, opts):
result[key] = value
return result
def _bson_to_dict(data, opts):
"""Decode a BSON string to document_class."""
try:
obj_size = _UNPACK_INT(data[:4])[0]
except struct.error as exc:
raise InvalidBSON(str(exc))
if obj_size != len(data):
raise InvalidBSON("invalid object size")
if data[obj_size - 1:obj_size] != b"\x00":
raise InvalidBSON("bad eoo")
try:
if _raw_document_class(opts.document_class):
return opts.document_class(data, opts)
return _elements_to_dict(data, 4, obj_size - 1, opts)
except InvalidBSON:
raise
except Exception:
# Change exception type to InvalidBSON but preserve traceback.
_, exc_value, exc_tb = sys.exc_info()
reraise(InvalidBSON, exc_value, exc_tb)
if _USE_C:
_bson_to_dict = _cbson._bson_to_dict
_PACK_FLOAT = struct.Struct("<d").pack
_PACK_INT = struct.Struct("<i").pack
_PACK_LENGTH_SUBTYPE = struct.Struct("<iB").pack
_PACK_LONG = struct.Struct("<q").pack
_PACK_TIMESTAMP = struct.Struct("<II").pack
_LIST_NAMES = tuple(b(str(i)) + b"\x00" for i in range(1000))
def gen_list_name():
"""Generate "keys" for encoded lists in the sequence
b"0\x00", b"1\x00", b"2\x00", ...
The first 1000 keys are returned from a pre-built cache. All
subsequent keys are generated on the fly.
"""
for name in _LIST_NAMES:
yield name
counter = itertools.count(1000)
while True:
yield b(str(next(counter))) + b"\x00"
def _make_c_string_check(string):
"""Make a 'C' string, checking for embedded NUL characters."""
if isinstance(string, bytes):
if b"\x00" in string:
raise InvalidDocument("BSON keys / regex patterns must not "
"contain a NUL character")
try:
_utf_8_decode(string, None, True)
return string + b"\x00"
except UnicodeError:
raise InvalidStringData("strings in documents must be valid "
"UTF-8: %r" % string)
else:
if "\x00" in string:
raise InvalidDocument("BSON keys / regex patterns must not "
"contain a NUL character")
return _utf_8_encode(string)[0] + b"\x00"
def _make_c_string(string):
"""Make a 'C' string."""
if isinstance(string, bytes):
try:
_utf_8_decode(string, None, True)
return string + b"\x00"
except UnicodeError:
raise InvalidStringData("strings in documents must be valid "
"UTF-8: %r" % string)
else:
return _utf_8_encode(string)[0] + b"\x00"
if PY3:
def _make_name(string):
"""Make a 'C' string suitable for a BSON key."""
# Keys can only be text in python 3.
if "\x00" in string:
raise InvalidDocument("BSON keys / regex patterns must not "
"contain a NUL character")
return _utf_8_encode(string)[0] + b"\x00"
else:
# Keys can be unicode or bytes in python 2.
_make_name = _make_c_string_check
def _encode_float(name, value, dummy0, dummy1):
"""Encode a float."""
return b"\x01" + name + _PACK_FLOAT(value)
if PY3:
def _encode_bytes(name, value, dummy0, dummy1):
"""Encode a python bytes."""
# Python3 special case. Store 'bytes' as BSON binary subtype 0.
return b"\x05" + name + _PACK_INT(len(value)) + b"\x00" + value
else:
def _encode_bytes(name, value, dummy0, dummy1):
"""Encode a python str (python 2.x)."""
try:
_utf_8_decode(value, None, True)
except UnicodeError:
raise InvalidStringData("strings in documents must be valid "
"UTF-8: %r" % (value,))
return b"\x02" + name + _PACK_INT(len(value) + 1) + value + b"\x00"
def _encode_mapping(name, value, check_keys, opts):
"""Encode a mapping type."""
if _raw_document_class(value):
return b'\x03' + name + value.raw
data = b"".join([_element_to_bson(key, val, check_keys, opts)
for key, val in iteritems(value)])
return b"\x03" + name + _PACK_INT(len(data) + 5) + data + b"\x00"
def _encode_dbref(name, value, check_keys, opts):
"""Encode bson.dbref.DBRef."""
buf = bytearray(b"\x03" + name + b"\x00\x00\x00\x00")
begin = len(buf) - 4
buf += _name_value_to_bson(b"$ref\x00",
value.collection, check_keys, opts)
buf += _name_value_to_bson(b"$id\x00",
value.id, check_keys, opts)
if value.database is not None:
buf += _name_value_to_bson(
b"$db\x00", value.database, check_keys, opts)
for key, val in iteritems(value._DBRef__kwargs):
buf += _element_to_bson(key, val, check_keys, opts)
buf += b"\x00"
buf[begin:begin + 4] = _PACK_INT(len(buf) - begin)
return bytes(buf)
def _encode_list(name, value, check_keys, opts):
"""Encode a list/tuple."""
lname = gen_list_name()
data = b"".join([_name_value_to_bson(next(lname), item,
check_keys, opts)
for item in value])
return b"\x04" + name + _PACK_INT(len(data) + 5) + data + b"\x00"
def _encode_text(name, value, dummy0, dummy1):
"""Encode a python unicode (python 2.x) / str (python 3.x)."""
value = _utf_8_encode(value)[0]
return b"\x02" + name + _PACK_INT(len(value) + 1) + value + b"\x00"
def _encode_binary(name, value, dummy0, dummy1):
"""Encode bson.binary.Binary."""
subtype = value.subtype
if subtype == 2:
value = _PACK_INT(len(value)) + value
return b"\x05" + name + _PACK_LENGTH_SUBTYPE(len(value), subtype) + value
def _encode_uuid(name, value, dummy, opts):
"""Encode uuid.UUID."""
uuid_representation = opts.uuid_representation
# Python Legacy Common Case
if uuid_representation == OLD_UUID_SUBTYPE:
return b"\x05" + name + b'\x10\x00\x00\x00\x03' + value.bytes
# Java Legacy
elif uuid_representation == JAVA_LEGACY:
from_uuid = value.bytes
data = from_uuid[0:8][::-1] + from_uuid[8:16][::-1]
return b"\x05" + name + b'\x10\x00\x00\x00\x03' + data
# C# legacy
elif uuid_representation == CSHARP_LEGACY:
# Microsoft GUID representation.
return b"\x05" + name + b'\x10\x00\x00\x00\x03' + value.bytes_le
# New
else:
return b"\x05" + name + b'\x10\x00\x00\x00\x04' + value.bytes
def _encode_objectid(name, value, dummy0, dummy1):
"""Encode bson.objectid.ObjectId."""
return b"\x07" + name + value.binary
def _encode_bool(name, value, dummy0, dummy1):
"""Encode a python boolean (True/False)."""
return b"\x08" + name + (value and b"\x01" or b"\x00")
def _encode_datetime(name, value, dummy0, dummy1):
"""Encode datetime.datetime."""
if value.utcoffset() is not None:
value = value - value.utcoffset()
millis = int(calendar.timegm(value.timetuple()) * 1000 +
value.microsecond / 1000)
return b"\x09" + name + _PACK_LONG(millis)
def _encode_none(name, dummy0, dummy1, dummy2):
"""Encode python None."""
return b"\x0A" + name
def _encode_regex(name, value, dummy0, dummy1):
"""Encode a python regex or bson.regex.Regex."""
flags = value.flags
# Python 2 common case
if flags == 0:
return b"\x0B" + name + _make_c_string_check(value.pattern) + b"\x00"
# Python 3 common case
elif flags == re.UNICODE:
return b"\x0B" + name + _make_c_string_check(value.pattern) + b"u\x00"
else:
sflags = b""
if flags & re.IGNORECASE:
sflags += b"i"
if flags & re.LOCALE:
sflags += b"l"
if flags & re.MULTILINE:
sflags += b"m"
if flags & re.DOTALL:
sflags += b"s"
if flags & re.UNICODE:
sflags += b"u"
if flags & re.VERBOSE:
sflags += b"x"
sflags += b"\x00"
return b"\x0B" + name + _make_c_string_check(value.pattern) + sflags
def _encode_code(name, value, dummy, opts):
"""Encode bson.code.Code."""
cstring = _make_c_string(value)
cstrlen = len(cstring)
if not value.scope:
return b"\x0D" + name + _PACK_INT(cstrlen) + cstring
scope = _dict_to_bson(value.scope, False, opts, False)
full_length = _PACK_INT(8 + cstrlen + len(scope))
return b"\x0F" + name + full_length + _PACK_INT(cstrlen) + cstring + scope
def _encode_int(name, value, dummy0, dummy1):
"""Encode a python int."""
if -2147483648 <= value <= 2147483647:
return b"\x10" + name + _PACK_INT(value)
else:
try:
return b"\x12" + name + _PACK_LONG(value)
except struct.error:
raise OverflowError("BSON can only handle up to 8-byte ints")
def _encode_timestamp(name, value, dummy0, dummy1):
"""Encode bson.timestamp.Timestamp."""
return b"\x11" + name + _PACK_TIMESTAMP(value.inc, value.time)
def _encode_long(name, value, dummy0, dummy1):
"""Encode a python long (python 2.x)"""
try:
return b"\x12" + name + _PACK_LONG(value)
except struct.error:
raise OverflowError("BSON can only handle up to 8-byte ints")
def _encode_minkey(name, dummy0, dummy1, dummy2):
"""Encode bson.min_key.MinKey."""
return b"\xFF" + name
def _encode_maxkey(name, dummy0, dummy1, dummy2):
"""Encode bson.max_key.MaxKey."""
return b"\x7F" + name
# Each encoder function's signature is:
# - name: utf-8 bytes
# - value: a Python data type, e.g. a Python int for _encode_int
# - check_keys: bool, whether to check for invalid names
# - opts: a CodecOptions
_ENCODERS = {
bool: _encode_bool,
bytes: _encode_bytes,
datetime.datetime: _encode_datetime,
dict: _encode_mapping,
float: _encode_float,
int: _encode_int,
list: _encode_list,
# unicode in py2, str in py3
text_type: _encode_text,
tuple: _encode_list,
type(None): _encode_none,
uuid.UUID: _encode_uuid,
Binary: _encode_binary,
Int64: _encode_long,
Code: _encode_code,
DBRef: _encode_dbref,
MaxKey: _encode_maxkey,
MinKey: _encode_minkey,
ObjectId: _encode_objectid,
Regex: _encode_regex,
RE_TYPE: _encode_regex,
SON: _encode_mapping,
Timestamp: _encode_timestamp,
UUIDLegacy: _encode_binary,
# Special case. This will never be looked up directly.
collections.Mapping: _encode_mapping,
}
_MARKERS = {
5: _encode_binary,
7: _encode_objectid,
11: _encode_regex,
13: _encode_code,
17: _encode_timestamp,
18: _encode_long,
100: _encode_dbref,
127: _encode_maxkey,
255: _encode_minkey,
}
if not PY3:
_ENCODERS[long] = _encode_long
def _name_value_to_bson(name, value, check_keys, opts):
"""Encode a single name, value pair."""
# First see if the type is already cached. KeyError will only ever
# happen once per subtype.
try:
return _ENCODERS[type(value)](name, value, check_keys, opts)
except KeyError:
pass
# Second, fall back to trying _type_marker. This has to be done
# before the loop below since users could subclass one of our
# custom types that subclasses a python built-in (e.g. Binary)
marker = getattr(value, "_type_marker", None)
if isinstance(marker, int) and marker in _MARKERS:
func = _MARKERS[marker]
# Cache this type for faster subsequent lookup.
_ENCODERS[type(value)] = func
return func(name, value, check_keys, opts)
# If all else fails test each base type. This will only happen once for
# a subtype of a supported base type.
for base in _ENCODERS:
if isinstance(value, base):
func = _ENCODERS[base]
# Cache this type for faster subsequent lookup.
_ENCODERS[type(value)] = func
return func(name, value, check_keys, opts)
raise InvalidDocument("cannot convert value of type %s to bson" %
type(value))
def _element_to_bson(key, value, check_keys, opts):
"""Encode a single key, value pair."""
if not isinstance(key, string_type):
raise InvalidDocument("documents must have only string keys, "
"key was %r" % (key,))
if check_keys:
if key.startswith("$"):
raise InvalidDocument("key %r must not start with '$'" % (key,))
if "." in key:
raise InvalidDocument("key %r must not contain '.'" % (key,))
name = _make_name(key)
return _name_value_to_bson(name, value, check_keys, opts)
def _dict_to_bson(doc, check_keys, opts, top_level=True):
"""Encode a document to BSON."""
if _raw_document_class(doc):
return doc.raw
try:
elements = []
if top_level and "_id" in doc:
elements.append(_name_value_to_bson(b"_id\x00", doc["_id"],
check_keys, opts))
for (key, value) in iteritems(doc):
if not top_level or key != "_id":
elements.append(_element_to_bson(key, value,
check_keys, opts))
except AttributeError:
raise TypeError("encoder expected a mapping type but got: %r" % (doc,))
encoded = b"".join(elements)
return _PACK_INT(len(encoded) + 5) + encoded + b"\x00"
if _USE_C:
_dict_to_bson = _cbson._dict_to_bson
_CODEC_OPTIONS_TYPE_ERROR = TypeError(
"codec_options must be an instance of CodecOptions")
def decode_all(data, codec_options=DEFAULT_CODEC_OPTIONS):
"""Decode BSON data to multiple documents.
`data` must be a string of concatenated, valid, BSON-encoded
documents.
:Parameters:
- `data`: BSON data
- `codec_options` (optional): An instance of
:class:`~bson.codec_options.CodecOptions`.
.. versionchanged:: 3.0
Removed `compile_re` option: PyMongo now always represents BSON regular
expressions as :class:`~bson.regex.Regex` objects. Use
:meth:`~bson.regex.Regex.try_compile` to attempt to convert from a
BSON regular expression to a Python regular expression object.
Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with
`codec_options`.
.. versionchanged:: 2.7
Added `compile_re` option. If set to False, PyMongo represented BSON
regular expressions as :class:`~bson.regex.Regex` objects instead of
attempting to compile BSON regular expressions as Python native
regular expressions, thus preventing errors for some incompatible
patterns, see `PYTHON-500`_.
.. _PYTHON-500: https://jira.mongodb.org/browse/PYTHON-500
"""
if not isinstance(codec_options, CodecOptions):
raise _CODEC_OPTIONS_TYPE_ERROR
docs = []
position = 0
end = len(data) - 1
use_raw = _raw_document_class(codec_options.document_class)
try:
while position < end:
obj_size = _UNPACK_INT(data[position:position + 4])[0]
if len(data) - position < obj_size:
raise InvalidBSON("invalid object size")
obj_end = position + obj_size - 1
if data[obj_end:position + obj_size] != b"\x00":
raise InvalidBSON("bad eoo")
if use_raw:
docs.append(
codec_options.document_class(
data[position:obj_end + 1], codec_options))
else:
docs.append(_elements_to_dict(data,
position + 4,
obj_end,
codec_options))
position += obj_size
return docs
except InvalidBSON:
raise
except Exception:
# Change exception type to InvalidBSON but preserve traceback.
_, exc_value, exc_tb = sys.exc_info()
reraise(InvalidBSON, exc_value, exc_tb)
if _USE_C:
decode_all = _cbson.decode_all
def decode_iter(data, codec_options=DEFAULT_CODEC_OPTIONS):
"""Decode BSON data to multiple documents as a generator.
Works similarly to the decode_all function, but yields one document at a
time.
`data` must be a string of concatenated, valid, BSON-encoded
documents.
:Parameters:
- `data`: BSON data
- `codec_options` (optional): An instance of
:class:`~bson.codec_options.CodecOptions`.
.. versionchanged:: 3.0
Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with
`codec_options`.
.. versionadded:: 2.8
"""
if not isinstance(codec_options, CodecOptions):
raise _CODEC_OPTIONS_TYPE_ERROR
position = 0
end = len(data) - 1
while position < end:
obj_size = _UNPACK_INT(data[position:position + 4])[0]
elements = data[position:position + obj_size]
position += obj_size
yield _bson_to_dict(elements, codec_options)
def decode_file_iter(file_obj, codec_options=DEFAULT_CODEC_OPTIONS):
"""Decode bson data from a file to multiple documents as a generator.
Works similarly to the decode_all function, but reads from the file object
in chunks and parses bson in chunks, yielding one document at a time.
:Parameters:
- `file_obj`: A file object containing BSON data.
- `codec_options` (optional): An instance of
:class:`~bson.codec_options.CodecOptions`.
.. versionchanged:: 3.0
Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with
`codec_options`.
.. versionadded:: 2.8
"""
while True:
# Read size of next object.
size_data = file_obj.read(4)
if len(size_data) == 0:
break # Finished with file normaly.
elif len(size_data) != 4:
raise InvalidBSON("cut off in middle of objsize")
obj_size = _UNPACK_INT(size_data)[0] - 4
elements = size_data + file_obj.read(obj_size)
yield _bson_to_dict(elements, codec_options)
def is_valid(bson):
"""Check that the given string represents valid :class:`BSON` data.
Raises :class:`TypeError` if `bson` is not an instance of
:class:`str` (:class:`bytes` in python 3). Returns ``True``
if `bson` is valid :class:`BSON`, ``False`` otherwise.
:Parameters:
- `bson`: the data to be validated
"""
if not isinstance(bson, bytes):
raise TypeError("BSON data must be an instance of a subclass of bytes")
try:
_bson_to_dict(bson, DEFAULT_CODEC_OPTIONS)
return True
except Exception:
return False
class BSON(bytes):
"""BSON (Binary JSON) data.
"""
@classmethod
def encode(cls, document, check_keys=False,
codec_options=DEFAULT_CODEC_OPTIONS):
"""Encode a document to a new :class:`BSON` instance.
A document can be any mapping type (like :class:`dict`).
Raises :class:`TypeError` if `document` is not a mapping type,
or contains keys that are not instances of
:class:`basestring` (:class:`str` in python 3). Raises
:class:`~bson.errors.InvalidDocument` if `document` cannot be
converted to :class:`BSON`.
:Parameters:
- `document`: mapping type representing a document
- `check_keys` (optional): check if keys start with '$' or
contain '.', raising :class:`~bson.errors.InvalidDocument` in
either case
- `codec_options` (optional): An instance of
:class:`~bson.codec_options.CodecOptions`.
.. versionchanged:: 3.0
Replaced `uuid_subtype` option with `codec_options`.
"""
if not isinstance(codec_options, CodecOptions):
raise _CODEC_OPTIONS_TYPE_ERROR
return cls(_dict_to_bson(document, check_keys, codec_options))
def decode(self, codec_options=DEFAULT_CODEC_OPTIONS):
"""Decode this BSON data.
By default, returns a BSON document represented as a Python
:class:`dict`. To use a different :class:`MutableMapping` class,
configure a :class:`~bson.codec_options.CodecOptions`::
>>> import collections # From Python standard library.
>>> import bson
>>> from bson.codec_options import CodecOptions
>>> data = bson.BSON.encode({'a': 1})
>>> decoded_doc = bson.BSON.decode(data)
<type 'dict'>
>>> options = CodecOptions(document_class=collections.OrderedDict)
>>> decoded_doc = bson.BSON.decode(data, codec_options=options)
>>> type(decoded_doc)
<class 'collections.OrderedDict'>
:Parameters:
- `codec_options` (optional): An instance of
:class:`~bson.codec_options.CodecOptions`.
.. versionchanged:: 3.0
Removed `compile_re` option: PyMongo now always represents BSON
regular expressions as :class:`~bson.regex.Regex` objects. Use
:meth:`~bson.regex.Regex.try_compile` to attempt to convert from a
BSON regular expression to a Python regular expression object.
Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with
`codec_options`.
.. versionchanged:: 2.7
Added `compile_re` option. If set to False, PyMongo represented BSON
regular expressions as :class:`~bson.regex.Regex` objects instead of
attempting to compile BSON regular expressions as Python native
regular expressions, thus preventing errors for some incompatible
patterns, see `PYTHON-500`_.
.. _PYTHON-500: https://jira.mongodb.org/browse/PYTHON-500
"""
if not isinstance(codec_options, CodecOptions):
raise _CODEC_OPTIONS_TYPE_ERROR
return _bson_to_dict(self, codec_options)
def has_c():
"""Is the C extension installed?
"""
return _USE_C
| [
"[email protected]"
] | |
e9f18e7cf838a0d9ad2b3ff178bfda18e06dd1ec | ceb75e50d77b962edbe866b0640271cdd4721be9 | /hash.py | d986345513156fa9df62d089ac9bfcc250367c63 | [] | no_license | rocket3989/hashCode2020 | 9394dc52e66c085e14fdd6f4152d6605f426b79a | 6fd287381c3b32813eb3f63558d3afd83bd0ebbf | refs/heads/master | 2021-01-08T11:14:36.956131 | 2020-02-27T02:16:50 | 2020-02-27T02:16:50 | 242,014,611 | 3 | 7 | null | null | null | null | UTF-8 | Python | false | false | 2,693 | py | B, L, D = [int(x) for x in input().split()]
score = [int(x) for x in input().split()]
freq = [0 for i in range(B)]
from heapq import heappop, heappush, heapify
import random
calls = 0
class library:
def __init__(self, count, time, rate, books):
self.count = count
self.time = time
self.rate = rate
self.books = books
# random.shuffle(self.books)
def bookOrder(self):
bookList = []
for book in self.books:
bookList.append((-score[book], book))
bookList.sort()
for i in range(len(bookList)):
bookList[i] = bookList[i][1]
self.books = bookList
def heuristic(self):
count = (D - self.time) * self.rate
if count <= 0: return 0
self.bookOrder()
sumOf = 0
for book in self.books:
if seen[book]: continue
sumOf += score[book]
count -= 1
if count == 0: break
return (sumOf) / self.time
libs = []
h = []
seen = [False for b in range(B)]
for l in range(L):
N, T, M = [int(x) for x in input().split()] # books, time, books/day
ids = [int(x) for x in input().split()] # ids of books
for i in ids:
freq[i] += 1
libs.append(library(N, T, M, ids))
for l in range(L):
h.append((-libs[l].heuristic(), l))
heapify(h)
sumOf = 0
output = []
while len(h):
val, l = heappop(h)
val = -val
newScore = libs[l].heuristic()
if libs[l].time >= D:
continue
if val > newScore:
heappush(h, (-newScore, l))
continue
if D < 0: break
D -= libs[l].time
findable = D * libs[l].rate
books = libs[l].books
out = []
for book in books:
if seen[book]: continue
seen[book] = True
out.append(book)
sumOf += score[book]
findable -= 1
if findable == 0: break
if len(out) == 0:
D += libs[l].time
continue
else:
output.append((l, len(out)))
output.append(out)
for book in libs[l].books:
freq[book] -= 1
print(sumOf)
print(len(output) // 2)
for line in output:
print(*line)
"""
python3 hash.py < a.txt > ao.txt && python3 hash.py < b.txt > bo.txt && python3 hash.py < c.txt > co.txt && python3 hash.py < d.txt > do.txt && python3 hash.py < e.txt > eo.txt && python3 hash.py < f.txt > fo.txt
"""
| [
"[email protected]"
] | |
f8a0b1b30cd967fa9b3450c9cfb4207194c8ec86 | 8c680fc22c005aadc84deff33850d819e943124a | /open-test-data/rfc4475-sip-torture-test/unksm2.dat.py | a19935571b6edd931465bffe6ed24bb66262a1db | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | bobjects/BobStack | abc824b8513d826cbc725e8fbd3837ae8e03d646 | c177b286075044832f44baf9ace201780c8b4320 | refs/heads/master | 2020-04-06T05:12:35.041273 | 2017-10-08T23:39:35 | 2017-10-08T23:39:35 | 50,403,711 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | messageString = (
'REGISTER sip:example.com SIP/2.0\r\n'
'To: isbn:2983792873\r\n'
'From: <http://www.example.com>;tag=3234233\r\n'
'Call-ID: [email protected]\r\n'
'CSeq: 234902 REGISTER\r\n'
'Max-Forwards: 70\r\n'
'Via: SIP/2.0/UDP 192.0.2.21:5060;branch=z9hG4bKkdjuw\r\n'
'Contact: <name:John_Smith>\r\n'
'l: 0\r\n'
'\r\n'
)
| [
"[email protected]"
] | |
4ea0b9b40fea28cf979342e613ffcf0c2010e2d2 | d52ee2f7ec5dcd8825f4e221a7f084d488d35634 | /new_scripts/cluster_rerank/rerank_match_dress.py | 442dbeed13eb4e02caf9a03b28d1e9dadb8028df | [] | no_license | rekriz11/sockeye-recipes | 9dbf96140e4d9d546210dd1c29801132e1b9201c | 644363b92e2f38311cc2b7e926b6558aa41900f3 | refs/heads/master | 2020-03-29T16:52:52.542574 | 2020-03-13T18:18:25 | 2020-03-13T18:18:25 | 150,131,769 | 5 | 3 | null | 2018-09-24T16:15:47 | 2018-09-24T16:15:46 | null | UTF-8 | Python | false | false | 9,656 | py | import kenlm
import sys
import gensim.models as g
from sklearn.metrics.pairwise import cosine_similarity
import numpy as np
from fractions import Fraction
from collections import Counter
## Checks validity of output
def debug_output(sentences, scores, title):
print(title)
for i in range(len(sentences)):
for j in range(len(sentences[i])):
print(str(scores[i][j]) + "\t" + " ".join(sentences[i][j]))
print("\n")
## Flattens a two-dimensional list
def flatten(listoflists):
list = [item for sublist in listoflists for item in sublist]
return list
## Normalizes list of lists so all numbers are between 0 and 1
def normalize(listy):
flat = flatten(listy)
maxy = max(flat)
miny = min(flat)
new_listy = [[(l - miny) / (maxy - miny) for l in ls] for ls in listy]
return new_listy
## Gets all sentences from a file
def get_sents(file):
sentences = []
with open(file, 'r', encoding='utf8') as f:
for line in f:
ls = line[:-1].split(" ")
sentences.append([l.lower() for l in ls])
return sentences
## Gets all candidate simplifications
def get_simple_sents(candidates_file):
sentences = []
with open(candidates_file, 'r', encoding='utf8') as f:
for line in f:
ls = line[:-1].split("\t")
sents = [l.split(" ") for l in ls]
sentences.append(sents)
return sentences
## Gets perplexities of candidate simplifications
def get_perplexities(sentences, lm):
perplexities = []
for sents in sentences:
perps = []
for sent in sents:
perps.append(lm.score(" ".join(sent)))
perplexities.append(perps)
norm_perps = normalize(perplexities)
return norm_perps
## Reads in complexity prediction scores from file
def load_comp_preds(comp_pred_file, sentences):
scores = []
with open(comp_pred_file, 'r', encoding='utf8') as f:
for line in f:
ls = line[:-1].split("\t")
scores.append([4 - float(l) for l in ls])
norm_scores = normalize(scores)
return norm_scores
## Predicts vector for each complex sentence
def get_complex_embeddings(sents, model, start_alpha, infer_epoch):
embeddings = []
i = 0
for sent in sents:
if i % 100 == 0:
print(i)
i += 1
embed = model.infer_vector(sent, alpha=start_alpha, steps=infer_epoch)
embeddings.append(embed)
return embeddings
## Predicts vector for each sentence
def get_embeddings(sents, model, start_alpha, infer_epoch):
embeddings = []
i = 0
for snts in sents:
if i % 100 == 0:
print(i)
i += 1
embeds = []
for sent in snts:
embed = model.infer_vector(sent, alpha=start_alpha, steps=infer_epoch)
embeds.append(embed)
embeddings.append(np.asarray(embeds))
return embeddings
## Calculates cosine similarities between complex and simple sentences
def get_sims(complex_embeddings, embeddings, sentences):
similarities = []
for i in range(len(embeddings)):
sims = []
for j in range(len(embeddings[i])):
x = embeddings[i][j].reshape(1, -1)
y = complex_embeddings[i].reshape(1, -1)
sims.append(cosine_similarity(x, y)[0][0])
similarities.append(sims)
norm_sims = normalize(similarities)
return norm_sims
## Ranks candidates based on average of fluency, relevancy, and simplicity
def rank_candidates(sentences, dress_sents, perplexities, comp_preds, similarities, weights, diff):
top_sentences = []
max_indices = []
for i in range(len(sentences)):
scores = []
for j in range(len(sentences[i])):
score = weights[0]*perplexities[i][j] + \
weights[1]*comp_preds[i][j] + \
weights[2]*similarities[i][j]
scores.append(score)
'''
for c, sent in enumerate(sentences[i]):
print(str(c) + "\t" + str(sent))
print()
'''
ranked_indices = sorted(range(len(scores)), key=lambda k: scores[k], reverse=True)
sent_lengths = [len(sentences[i][s]) for s in ranked_indices]
dress_length = len(dress_sents[i]) + diff
'''
for c in range(len(ranked_indices)):
print(str(c) + "\t" + str(sentences[i][ranked_indices[c]]))
print("SENTENCE LENGTHS FOR SENTENCE " + str(i))
print(sent_lengths)
print("TARGET LENGTH: " + str(dress_length))
'''
## Gets the highest ranked sentence of the same length as dress output
max_index = -1
temp_diff = 0
while max_index == -1:
for ind in range(int(len(ranked_indices)/2)):
'''
print("Comparison: " + str(sent_lengths[ind]) + ", " + \
str(dress_length - diff) + ", " + \
str(dress_length + diff))
'''
if sent_lengths[ind] == dress_length - temp_diff \
or sent_lengths[ind] == dress_length + temp_diff:
max_index = ind
break
temp_diff += 1
'''
print("RANK OF SENTENCE: " + str(max_index))
print("ORIGINAL INDEX OF SENTENCE: " + str(ranked_indices[max_index]))
print(sentences[i][ranked_indices[max_index]])
print("\n")
'''
top_sentences.append(sentences[i][ranked_indices[max_index]])
max_indices.append(max_index)
print(sum(max_indices)/len(max_indices))
ind_dict = dict(Counter(max_indices))
for i in range(max(ind_dict.keys()) + 1):
try:
print(str(i) + ": " + str(ind_dict[i]) + ", " + \
str(round(ind_dict[i] / sum(list(ind_dict.values())), 3)))
except KeyError:
continue
return top_sentences
## Outputs sentences to file
def save_sentences(sentences, output_file):
with open(output_file, 'w', encoding='utf8') as f:
for sent in sentences:
f.write(" ".join(sent) + "\n")
def main(doc2vec_file, lm_file, comp_pred_file, candidates_file, \
complex_file, weights, dress_file, diff, output_file):
## Get complex sentences
print("Reading in complex sentences...")
complex_sents = get_sents(complex_file)
print(len(complex_sents))
print(complex_sents[0])
## Get dress sentences
print("Reading in DRESS sentences...")
dress_sents = get_sents(dress_file)
print(len(complex_sents))
print(complex_sents[0])
## Gets candidate simplifications
print("Reading in candidates...")
sentences = get_simple_sents(candidates_file)
print(len(sentences))
print(len(sentences[0]))
## Loads language model
print("Loading kenlm model...")
lm = kenlm.Model(lm_file)
## Get perplexity scores for each candidate sentence
print("Calculating perplexities...")
perplexities = get_perplexities(sentences, lm)
## Loads sentence complexity predictions
print("Getting complexity predictions...")
comp_preds = load_comp_preds(comp_pred_file, sentences)
## Loads doc2vec model
print("Loading doc2vec model...")
doc2vec = g.Doc2Vec.load(doc2vec_file)
## Gets embeddings for test sentences
print("Getting complex embeddings...")
start_alpha = 0.01
infer_epoch = 1000
complex_embeddings = get_complex_embeddings(complex_sents, doc2vec, start_alpha, infer_epoch)
#complex_embeddings = [[0 for i in range(300)] for sent in complex_sents]
## Gets embeddings for each sentence
print("Getting embeddings...")
embeddings = get_embeddings(sentences, doc2vec, start_alpha, infer_epoch)
#embeddings = [[[0 for i in range(300)] for sent in sents] for sents in sentences]
## Calculate cosine similarities between complex and simple sentences
print("Calculating similarities...")
similarities = get_sims(complex_embeddings, embeddings, sentences)
#similarities = [[1 for i in range(len(s))] for s in sentences]
print("Rerank sentences...")
## Reranks sentences based on average of fluency, relevancy, and simplicity
top_sentences = rank_candidates(sentences, dress_sents, perplexities, comp_preds, similarities, weights, diff)
save_sentences(top_sentences, output_file)
if __name__ == '__main__':
doc2vec_file = sys.argv[1]
lm_file = sys.argv[2]
comp_pred_file = sys.argv[3]
complex_file = sys.argv[4]
candidates_file = sys.argv[5]
weight1 = float(Fraction(sys.argv[6]))
weight2 = float(Fraction(sys.argv[7]))
weight3 = float(Fraction(sys.argv[8]))
weights = [weight1, weight2, weight3]
dress_file = sys.argv[9]
diff = int(sys.argv[10])
output_file = sys.argv[11]
main(doc2vec_file, lm_file, comp_pred_file, complex_file, \
candidates_file, weights, dress_file, diff, output_file)
'''
python ~/sockeye-recipes/new_scripts/cluster_rerank/rerank_match_dress.py \
/data2/text_simplification/embeddings/enwiki_dbow/doc2vec.bin \
/data2/text_simplification/models/lm/lm-merged.kenlm \
~/sockeye-recipes/egs/pretrained_embeddings/output/tokens.100beam.20centroids.preds \
~/sockeye-recipes/egs/pretrained_embeddings/output/tokens.100beam.20centroids \
~/sockeye-recipes/egs/pretrained_embeddings/data/newsela_Zhang_Lapata_splits/V0V4_V1V4_V2V4_V3V4_V0V3_V0V2_V1V3.aner.ori.test.src \
1 0 0 \
/data2/text_simplification/output/dress-ls.BASELINE \
0 \
~/sockeye-recipes/egs/pretrained_embeddings/output/tokens.100beam.top1reranked_perplexity
'''
| [
"[email protected]"
] | |
cdeb4e8690477000342cd59097c9cc23be076db6 | 4e5b233311bdb5872fd5f862f90b3c6e56f56bc2 | /Day_16_CoffeeMachine.py | d342630146136aa15eca126c5e01696d1bebea25 | [] | no_license | asher-lab/100daysofpython | 02b7c6703b82683f824e3a5b21d41233141827df | 046f16fddb1073195f320e0f0f6e091c35f10314 | refs/heads/main | 2023-07-01T17:55:57.654898 | 2021-08-11T02:38:59 | 2021-08-11T02:38:59 | 388,103,965 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 661 | py | from menu import Menu, MenuItem
from coffee_maker import CoffeeMaker
from money_machine import MoneyMachine
#declaring objects
menu = Menu()
coffee_maker = CoffeeMaker()
money_machine = MoneyMachine()
is_on = True
while is_on:
options = menu.get_items()
choice = input(f"What would you like? {options}:")
if choice == "off":
is_on = False
elif choice == "report":
coffee_maker.report()
money_machine.report()
else:
drink = menu.find_drink(choice)
if coffee_maker.is_resource_sufficient(drink):
if money_machine.make_payment(drink.cost):
coffee_maker.make_coffee(drink)
| [
"[email protected]"
] | |
bc9ae930f9b3a8b12ccb821e89bb277f81c1a19d | 76d3ec49d75ca5cef755e612a97ddcbc34f62bf3 | /data_integration/parallel_tasks/sql.py | 724456ec4dcef6e187d023a7fe4298906fd58d8a | [
"MIT"
] | permissive | ierosodin/data-integration | 8f7e9500bda03c8316061452bc502d68cfcd33da | f3ee414e8b8994e5b740a374c0594e40862ff6e9 | refs/heads/master | 2021-01-02T14:58:10.752745 | 2020-02-13T00:58:58 | 2020-02-13T00:58:58 | 239,665,119 | 0 | 0 | MIT | 2020-02-11T03:11:37 | 2020-02-11T03:11:36 | null | UTF-8 | Python | false | false | 3,315 | py | import inspect
import re
import typing
from mara_page import _, html
from .. import config, pipelines
from ..commands import sql
class ParallelExecuteSQL(pipelines.ParallelTask, sql._SQLCommand):
def __init__(self, id: str, description: str, parameter_function: typing.Callable, parameter_placeholders: [str],
max_number_of_parallel_tasks: int = None, sql_statement: str = None, file_name: str = None,
commands_before: [pipelines.Command] = None, commands_after: [pipelines.Command] = None,
db_alias: str = None, echo_queries: bool = True, timezone: str = None,
replace: {str: str} = None) -> None:
if (not (sql_statement or file_name)) or (sql_statement and file_name):
raise ValueError('Please provide either sql_statement or file_name (but not both)')
pipelines.ParallelTask.__init__(self, id=id, description=description,
max_number_of_parallel_tasks=max_number_of_parallel_tasks,
commands_before=commands_before, commands_after=commands_after)
sql._SQLCommand.__init__(self, sql_statement, file_name, replace)
self.parameter_function = parameter_function
self.parameter_placeholders = parameter_placeholders
self._db_alias = db_alias
self.timezone = timezone
self.echo_queries = echo_queries
@property
def db_alias(self):
return self._db_alias or config.default_db_alias()
def add_parallel_tasks(self, sub_pipeline: 'pipelines.Pipeline') -> None:
parameters = self.parameter_function()
if not isinstance(parameters, list) or not all(isinstance(item, tuple) for item in parameters):
raise ValueError(f'parameter function should return a list of tuples, got "{repr(parameters)}"')
for parameter_tuple in parameters:
id = '-'.join([re.sub('[^0-9a-z\-_]+', '', str(x).lower().replace('-', '_')) for x in parameter_tuple])
replace = self.replace.copy()
for placeholder, param in zip(self.parameter_placeholders, parameter_tuple):
replace[placeholder] = param
sub_pipeline.add(pipelines.Task(
id=id, description=f'Execute SQL for parameters {repr(parameter_tuple)}',
commands=[
sql.ExecuteSQL(sql_file_name=self.sql_file_name, db_alias=self.db_alias,
echo_queries=self.echo_queries, timezone=self.timezone, replace=replace)
if self.sql_file_name else
sql.ExecuteSQL(sql_statement=self.sql_statement, db_alias=self.db_alias,
echo_queries=self.echo_queries, timezone=self.timezone, replace=replace)]))
def html_doc_items(self) -> [(str, str)]:
return [('db', _.tt[self.db_alias])] \
+ sql._SQLCommand.html_doc_items(self, self.db_alias) \
+ [('parameter function', html.highlight_syntax(inspect.getsource(self.parameter_function), 'python')),
('parameter placeholders', _.tt[repr(self.parameter_placeholders)]),
('echo queries', _.tt[str(self.echo_queries)]),
('timezone', _.tt[self.timezone or ''])]
| [
"[email protected]"
] | |
0384d7dfb9a7e53271b18c52096a581c20d45dc2 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_46/71.py | 24e9421799bdf1037f657a345661908082b4f683 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,005 | py | input_file = "A-large.in";
output_file = "0.out";
input = open(input_file,"r");
output = open(output_file,"w+");
num_tests = int(input.readline());
for test in range(num_tests):
size = int(input.readline());
matrix = []
a = []
for i in range(size):
gg = input.readline()
matrix.append(gg.strip());
a.append(len(gg.rstrip('\n0')))
if test != 2:
#continue
pass
#print(matrix)
n = 0
for i in range(size):
idx = i + 1
found = i;
for j in range(i,size):
if (a[j] <= idx):
found = j
break
# swap
if i != found:
s = a[i:found]
a[i] = a[found]
a[i+1:found+1] = s
n = n + found - i
#print("{0} -> {1}".format(i,found))
print("Case #{0}: {1}".format(test+1,n));
| [
"[email protected]"
] | |
2c486fdff4147264b3a4d0404a6e8af18fa3daab | 2055c8b46b72e4fa19dc87fddf29faf0c81e703a | /Trackle/trackle/venv/lib/python3.6/encodings/iso8859_13.py | dad20781f270f4685c66642e733d6fd0ec0061e5 | [] | no_license | JMRLudan/Trackle2 | 2c72a3fff2e9bfe988874e00b64f56950043e825 | 2d2f486f076eb956903dabcc05434f1ecc6b6079 | refs/heads/master | 2021-06-20T08:56:56.973353 | 2020-01-19T14:28:59 | 2020-01-19T14:28:59 | 176,396,178 | 1 | 2 | null | 2021-06-10T21:20:09 | 2019-03-19T01:10:28 | Python | UTF-8 | Python | false | false | 61 | py | /Users/JLudan/anaconda3/lib/python3.6/encodings/iso8859_13.py | [
"[email protected]"
] | |
81fa627e293605e74746676da48b221cdaa59e9d | ce0a3a73c7825f7327b8319fb2593b6b01659bb0 | /webtest03/evaluation/migrations/0002_auto_20180802_0151.py | 5bb7bf88cabc2d1779cc24878b96578183d001a5 | [] | no_license | soccergame/deeplearning | 28b0a6ed85df12e362b3a451050fab5a2a994be7 | cbc65d3eba453992a279cfd96a9d3640d8fe6b9f | refs/heads/master | 2020-03-28T22:38:26.085464 | 2018-08-31T11:22:39 | 2018-08-31T11:22:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 441 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-08-02 01:51
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('evaluation', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='media',
name='name',
field=models.CharField(max_length=50),
),
]
| [
"[email protected]"
] | |
ffd9dc16f603d827f2770aa462b19116227f81ab | 947fa6a4a6155ffce0038b11f4d743603418ad68 | /.c9/metadata/environment/ib_miniprojects_backend/covid_dashboard/interactors/get_mandals_daily_report_day_wise_interactor.py | 391e90db9899d1ac8399b764ed3bda3330121e23 | [] | no_license | bharathi151/bharathi_diyyala | bd75e10639d7d22b332d5ce677e7799402dc4984 | 99f8657d010c790a0e4e4c9d6b57f81814784eb0 | refs/heads/master | 2022-11-21T12:43:48.401239 | 2020-07-23T09:05:52 | 2020-07-23T09:05:52 | 281,903,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,116 | py | {"filter":false,"title":"get_mandals_daily_report_day_wise_interactor.py","tooltip":"/ib_miniprojects_backend/covid_dashboard/interactors/get_mandals_daily_report_day_wise_interactor.py","undoManager":{"mark":16,"position":16,"stack":[[{"start":{"row":22,"column":8},"end":{"row":25,"column":9},"action":"remove","lines":["mandals_dtos = self._get_mandals_dtos(cumulative_dto=cumulative_dto)"," mandals_cumulative_dtos = DayWiseMandalCumulativeDtos("," mandals=mandals_dtos"," )"],"id":2},{"start":{"row":22,"column":4},"end":{"row":22,"column":8},"action":"remove","lines":[" "]},{"start":{"row":22,"column":0},"end":{"row":22,"column":4},"action":"remove","lines":[" "]},{"start":{"row":21,"column":72},"end":{"row":22,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":27,"column":0},"end":{"row":70,"column":24},"action":"remove","lines":[""," def _get_total_active_cases(self,cumulative_dto):"," total_confirmed_cases = cumulative_dto.total_confirmed_cases"," total_deaths = cumulative_dto.total_deaths"," total_recovered_cases = cumulative_dto.total_recovered_cases"," total_active_cases = total_confirmed_cases - ("," total_deaths + total_recovered_cases"," )"," return total_active_cases",""," def _get_mandals_dtos(self, cumulative_dto):"," mandals = []"," for mandal in cumulative_dto.mandals:"," mandals.append("," self._get_mandal_stats_dtos(mandal=mandal)"," )"," return mandals",""," def _get_mandal_stats_dtos(self, mandal):"," day_wise_statistics = []"," for mandal_dto in mandal.day_wise_statistics:"," total_active_cases = self._get_total_active_cases(mandal_dto)"," day_wise_statistics.append("," self._get_day_wise_cumulative_mandal_dto("," mandal=mandal_dto, total_active_cases=total_active_cases"," )"," )"," mandal_dtos = DayWiseMandalCumulativeDto("," mandal_name=mandal.mandal_name,"," mandal_id=mandal.mandal_id,"," day_wise_statistics=day_wise_statistics"," )"," return mandal_dtos","",""," def _get_day_wise_cumulative_mandal_dto(self, mandal, total_active_cases):"," state_dto = DayWiseCumulativeDto("," total_recovered_cases=mandal.total_recovered_cases,"," total_confirmed_cases=mandal.total_confirmed_cases,"," total_deaths=mandal.total_deaths,"," total_active_cases=total_active_cases,"," date=mandal.date"," )"," return state_dto"],"id":3},{"start":{"row":26,"column":0},"end":{"row":27,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":25,"column":23},"end":{"row":26,"column":0},"action":"remove","lines":["",""],"id":4}],[{"start":{"row":5,"column":13},"end":{"row":5,"column":23},"action":"remove","lines":["Cumulative"],"id":5},{"start":{"row":5,"column":13},"end":{"row":5,"column":18},"action":"insert","lines":["daily"]},{"start":{"row":11,"column":29},"end":{"row":11,"column":39},"action":"remove","lines":["cumulative"]},{"start":{"row":11,"column":29},"end":{"row":11,"column":34},"action":"insert","lines":["daily"]},{"start":{"row":20,"column":8},"end":{"row":20,"column":18},"action":"remove","lines":["cumulative"]},{"start":{"row":20,"column":8},"end":{"row":20,"column":13},"action":"insert","lines":["daily"]},{"start":{"row":21,"column":33},"end":{"row":21,"column":43},"action":"remove","lines":["cumulative"]},{"start":{"row":21,"column":33},"end":{"row":21,"column":38},"action":"insert","lines":["daily"]},{"start":{"row":22,"column":55},"end":{"row":22,"column":65},"action":"remove","lines":["cumulative"]},{"start":{"row":22,"column":55},"end":{"row":22,"column":60},"action":"insert","lines":["daily"]},{"start":{"row":23,"column":20},"end":{"row":23,"column":30},"action":"remove","lines":["cumulative"]},{"start":{"row":23,"column":20},"end":{"row":23,"column":25},"action":"insert","lines":["daily"]},{"start":{"row":23,"column":38},"end":{"row":23,"column":48},"action":"remove","lines":["cumulative"]},{"start":{"row":23,"column":38},"end":{"row":23,"column":43},"action":"insert","lines":["daily"]}],[{"start":{"row":5,"column":13},"end":{"row":5,"column":14},"action":"remove","lines":["d"],"id":6}],[{"start":{"row":5,"column":13},"end":{"row":5,"column":14},"action":"insert","lines":["D"],"id":7}],[{"start":{"row":21,"column":66},"end":{"row":21,"column":67},"action":"insert","lines":[","],"id":8}],[{"start":{"row":21,"column":67},"end":{"row":21,"column":68},"action":"insert","lines":[" "],"id":9},{"start":{"row":21,"column":68},"end":{"row":21,"column":69},"action":"insert","lines":["t"]},{"start":{"row":21,"column":69},"end":{"row":21,"column":70},"action":"insert","lines":["i"]},{"start":{"row":21,"column":70},"end":{"row":21,"column":71},"action":"insert","lines":["l"]},{"start":{"row":21,"column":71},"end":{"row":21,"column":72},"action":"insert","lines":["l"]}],[{"start":{"row":21,"column":68},"end":{"row":21,"column":72},"action":"remove","lines":["till"],"id":10},{"start":{"row":21,"column":68},"end":{"row":21,"column":77},"action":"insert","lines":["till_date"]}],[{"start":{"row":21,"column":77},"end":{"row":21,"column":78},"action":"insert","lines":["="],"id":11},{"start":{"row":21,"column":78},"end":{"row":21,"column":79},"action":"insert","lines":["t"]},{"start":{"row":21,"column":79},"end":{"row":21,"column":80},"action":"insert","lines":["o"]}],[{"start":{"row":21,"column":79},"end":{"row":21,"column":80},"action":"remove","lines":["o"],"id":12}],[{"start":{"row":21,"column":79},"end":{"row":21,"column":80},"action":"insert","lines":["i"],"id":13}],[{"start":{"row":21,"column":78},"end":{"row":21,"column":80},"action":"remove","lines":["ti"],"id":14},{"start":{"row":21,"column":78},"end":{"row":21,"column":87},"action":"insert","lines":["till_date"]}],[{"start":{"row":11,"column":60},"end":{"row":11,"column":61},"action":"insert","lines":[","],"id":15}],[{"start":{"row":11,"column":61},"end":{"row":11,"column":62},"action":"insert","lines":[" "],"id":16},{"start":{"row":11,"column":62},"end":{"row":11,"column":63},"action":"insert","lines":["t"]},{"start":{"row":11,"column":63},"end":{"row":11,"column":64},"action":"insert","lines":["i"]}],[{"start":{"row":11,"column":62},"end":{"row":11,"column":64},"action":"remove","lines":["ti"],"id":17},{"start":{"row":11,"column":62},"end":{"row":11,"column":71},"action":"insert","lines":["till_date"]}],[{"start":{"row":20,"column":8},"end":{"row":20,"column":17},"action":"remove","lines":["daily_dto"],"id":18},{"start":{"row":20,"column":8},"end":{"row":20,"column":26},"action":"insert","lines":["mandals_daily_dtos"]}]]},"ace":{"folds":[],"scrolltop":0,"scrollleft":0,"selection":{"start":{"row":8,"column":30},"end":{"row":8,"column":30},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":0},"timestamp":1591359863713,"hash":"92a5c6c48ce1d23f4c144cb24c8d876222b11723"} | [
"[email protected]"
] | |
ab6185ba90c94448913c8279f6c0785763528b8d | 22946b79620a06405f5c1add7844ca7eb118b5ce | /templates/frontend/start-frontend.erb | d6bd3711d5735145a3e11a40aa7e7da2154afb3c | [
"BSD-2-Clause-Views"
] | permissive | SUNET/puppet-sunet | 91ae0c023f4e00dc863d5fe28ee0f660791506db | 39296bb5780218e28f816ee6f39ce99746d452a0 | refs/heads/main | 2023-08-28T03:11:10.798430 | 2023-08-21T13:31:40 | 2023-08-21T13:36:04 | 35,332,499 | 1 | 6 | NOASSERTION | 2023-09-04T09:39:22 | 2015-05-09T14:56:01 | Puppet | UTF-8 | Python | false | false | 10,659 | erb | #!/usr/bin/env python3
#
# Copyright 2019 SUNET. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are
# permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY SUNET ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SUNET OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those of the
# authors and should not be interpreted as representing official policies, either expressed
# or implied, of SUNET.
#
# Author: Fredrik Thulin <[email protected]>
#
"""
Start a frontend instance, with locking to not start simultaneously with another instance.
This is intended to help a systemd based frontend load balancer start more efficiently.
Systemd starts all frontend services in parallel, which causes a _lot_ of docker containers
to start simultaneously. This is really poor for performance, so better let the instances
compete for an exclusive lock and start one at a time.
Since systemd will follow the main process, that process should be the one that runs
docker-compose up in the foreground. This script therefor starts with forking off a controller
process, that will acquire the lock. When it acquires the lock, it will send a signal back
to the main process to start the frontend instance, and then start the network container
initalisation before releasing the lock again.
PARENT CHILD
|
fork |
| -------- acquire lock
| | |
| fail for 1min success
| | |
| terminate both |
| parent and child |
| using an 'exit' cmd |
| |
| <------- cmd 'compose_up' ------------|
| |
execv docker-compose |
|
run network init
|
release lock
|
exit
Ideas for further improvement:
- Start the network initialisation when the haproxy container is present
- Hang around watching the containers. If they malfunction, kill the service.
"""
import fcntl
import logging
import os
import sys
from fcntl import LOCK_EX, LOCK_NB
from logging.handlers import SysLogHandler
from multiprocessing import Process, Queue
import argparse
import datetime
import subprocess
import time
_defaults = {'syslog': True,
'debug': True,
'lockfile': '/var/lock/start-frontend',
'lock_retry': 60,
'compose_bin': '/usr/local/bin/docker-compose'
}
def parse_args(defaults=None):
if defaults is None:
defaults = _defaults
parser = argparse.ArgumentParser(description = 'Särimner instance starter',
add_help = True,
formatter_class = argparse.ArgumentDefaultsHelpFormatter,
)
# Positional arguments
parser.add_argument('basedir',
metavar='DIR',
help='Base directory (should have subdir \'scripts\')',
)
parser.add_argument('name',
metavar='NAME',
help='Name of instance',
)
parser.add_argument('compose_file',
metavar='FILE',
help='Docker Compose file to \'up\'',
)
# Optional arguments
parser.add_argument('--debug',
dest = 'debug',
action = 'store_true', default=defaults['debug'],
help = 'Enable debug operation',
)
parser.add_argument('--syslog',
dest = 'syslog',
action = 'store_true', default=defaults['syslog'],
help = 'Enable syslog output',
)
parser.add_argument('--lockfile',
dest = 'lockfile',
type=str, default=defaults['lockfile'],
help = 'Lock file to acquire',
)
parser.add_argument('--lock_retry',
dest = 'lock_retry',
type=int, default=defaults['lock_retry'],
help = 'Seconds to wait for a lock before giving up',
)
parser.add_argument('--compose_bin',
dest = 'compose_bin',
type=str, default=defaults['compose_bin'],
help = 'Full path to docker-compose binary',
)
args = parser.parse_args()
return args
def get_logger(myname, args, logger_in=None):
# initialize various components
if logger_in:
logger = logger_in
else:
# This is the root log level
level = logging.INFO
if args.debug:
level = logging.DEBUG
logging.basicConfig(level = level, stream = sys.stderr,
format='%(asctime)s: %(name)s: %(threadName)s %(levelname)s %(message)s')
logger = logging.getLogger(myname)
# If stderr is not a TTY, change the log level of the StreamHandler (stream = sys.stderr above) to WARNING
if not sys.stderr.isatty() and not args.debug:
for this_h in logging.getLogger('').handlers:
this_h.setLevel(logging.WARNING)
if args.syslog:
syslog_h = logging.handlers.SysLogHandler()
formatter = logging.Formatter('%(name)s: %(levelname)s %(message)s')
syslog_h.setFormatter(formatter)
logger.addHandler(syslog_h)
return logger
def manager_process(script, args, q, logger):
"""
This is the child process that will acquire the exclusive lock, signal the parent
to start docker-compose, initialise the container networking and then release the lock.
"""
logger.debug('Starting manager process for frontend instance {!r}'.format(args.name))
retry = args.lock_retry
with open(args.lockfile, 'w') as lock_fd:
logger.info('Acquiring lock for frontend instance {!r}'.format(args.name))
locked = False
while not locked:
try:
fcntl.lockf(lock_fd, LOCK_EX | LOCK_NB)
locked = True
except OSError:
retry -= 1
#logger.debug('Did not get a lock (retry {})'.format(retry))
time.sleep(1)
if retry and (retry % 10 == 0):
logger.info('Frontend instance {!r} still waiting for lock ({} seconds left)'.format(
args.name, retry
))
if not locked and not retry:
logger.error('Failed to acquire lock in {} seconds - exiting'.format(args.lock_retry))
q.put('exit')
return False
logger.info('Lock acquired (frontend instance {!r})'.format(args.name))
q.put('compose_up')
logger.info('Sleeping three seconds')
time.sleep(3)
logger.info('Starting {} {}'.format(script, args.name))
t1 = datetime.datetime.now()
res = subprocess.run([script, args.name])
t2 = datetime.datetime.now()
logger.info('Frontend instance {!r} network configured in {:.2f} seconds'.format(
args.name, (t2 - t1).total_seconds()))
logger.debug('Network configuration script result: {}'.format(res))
q.put('exit')
logger.info('Releasing lock and exiting (frontend instance {!r} finished)'.format(args.name))
# avoid becoming a zombie process in case the parent process has execv:d docker-compose
os.setsid()
sys.exit(0)
def main(myname='start-frontend', args=None, logger_in=None):
if not args:
args = parse_args()
logger = get_logger(myname, args, logger_in)
script = os.path.join(args.basedir, 'scripts', 'configure-container-network')
if not os.path.isfile(script):
logger.error('Incorrect basedir, {} not found'.format(script))
return False
q = Queue()
p = Process(target=manager_process, args=(script, args, q, logger.getChild('manager'),))
p.start()
logger.debug('Started process {}'.format(p))
while True:
logger.debug('Waiting for commands from the manager process...')
cmd = q.get()
logger.debug('Got command {!r}'.format(cmd))
if cmd == 'compose_up':
logger.info('Starting frontend instance {!r}'.format(args.name))
os.execv(args.compose_bin, [args.compose_bin,
'-f', args.compose_file,
'up',
'--force-recreate',
'--no-deps',
])
elif cmd == 'exit':
logger.info('Received exit command')
return True
else:
logger.info('Received command {!r} - exiting'.format(cmd))
return False
if __name__ == '__main__':
try:
progname = os.path.basename(sys.argv[0])
res = main(progname)
if res is True:
sys.exit(0)
if res is False:
sys.exit(1)
sys.exit(int(res))
except KeyboardInterrupt:
sys.exit(0)
| [
"[email protected]"
] | |
66322e287c6b4586776e85303fbc8f20198de35f | cf6c67790393b4d1ad75f5f30fef00455bc9a541 | /icgc_dataobjects/run.py | c71e438d674f2c7beed3bc51313bfcf68ef4a785 | [
"MIT"
] | permissive | ohsu-comp-bio/icgc_dataobjects | fe3b231838875c9dadb9eacb17004fc59790689c | 3cbb676d2e1a9a21e03f3cfb8a3853752aa97916 | refs/heads/master | 2022-10-09T01:04:14.259700 | 2017-05-05T20:28:50 | 2017-05-05T20:28:50 | 90,410,194 | 0 | 0 | MIT | 2022-09-23T20:52:26 | 2017-05-05T19:33:26 | Python | UTF-8 | Python | false | false | 2,301 | py | #!/usr/bin/env python
"""
Proxy front end to the dcc server
"""
import os
from flask import request, jsonify, Response, abort, Flask
from flask_cors import CORS
# our utilities
import dcc_proxy
def _configure_app():
""" set app wide config """
# start the app
app = Flask(__name__)
# allow cross site access
CORS(app)
# after commit, publish
return app
# main configuration
app = _configure_app()
# https://github.com/ohsu-comp-bio/data-object-schemas/blob/feature/gdc/proto/data_objects.proto
@app.route('/api/v1/data/object/search', methods=['POST'])
def data_object_search():
"""
ga4gh::data-object-schemas data/object/search
"""
app.logger.debug(request.data)
return dcc_proxy.data_object_search()
# https://github.com/ohsu-comp-bio/data-object-schemas/blob/feature/gdc/proto/data_objects.proto
@app.route('/api/v1/data/object/<path:id>', methods=['GET'])
def data_object_get(id):
"""
ga4gh::data-object-schemas data/object
"""
return dcc_proxy.data_object_get(id)
# https://github.com/ohsu-comp-bio/data-object-schemas/blob/feature/gdc/proto/data_objects.proto
@app.route('/api/v1/data/object', methods=['POST'])
def data_object_post():
"""
ga4gh::data-object-schemas data/object
"""
return dcc_proxy.data_object_post()
# https://github.com/ohsu-comp-bio/data-object-schemas/blob/feature/gdc/proto/data_objects.proto
@app.route('/api/v1/datasets', methods=['POST'])
def datasets_post():
"""
ga4gh::data-object-schemas data/object
"""
return dcc_proxy.datasets_post()
# https://github.com/ohsu-comp-bio/data-object-schemas/blob/feature/gdc/proto/data_objects.proto
@app.route('/api/v1/datasets/<path:id>', methods=['GET'])
def datasets_get_one(id):
"""
ga4gh::data-object-schemas data/object
"""
return dcc_proxy.datasets_get_one(id)
# Private util functions
# print useful information at startup
app.logger.debug('URL map {}'.format(app.url_map))
# Entry point of app
if __name__ == '__main__': # pragma: no cover
debug = 'API_DEBUG' in os.environ # TODO does eve override?
api_port = int(os.environ.get('API_PORT', '5000'))
api_host = os.environ.get('API_TARGET', '0.0.0.0')
app.run(debug=debug, port=api_port, host=api_host, threaded=True)
| [
"[email protected]"
] | |
e8fc1d761894c544fc9f36d9aa38ca5900bef47d | 3fda3ff2e9334433554b6cf923506f428d9e9366 | /hipeac/migrations/0014_auto_20190121_1724.py | ebf27a1920573587d68f431147ce919777e91b00 | [
"MIT"
] | permissive | CreativeOthman/hipeac | 12adb61099886a6719dfccfa5ce26fdec8951bf9 | 2ce98da17cac2c6a87ec88df1b7676db4c200607 | refs/heads/master | 2022-07-20T10:06:58.771811 | 2020-05-07T11:39:13 | 2020-05-07T11:44:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | # Generated by Django 2.1.5 on 2019-01-21 16:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("hipeac", "0013_auto_20190121_1650"),
]
operations = [
migrations.AlterField(
model_name="video", name="youtube_id", field=models.CharField(max_length=40, unique=True),
),
]
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.