blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
54b49280506de442fdd26700c04d43daacef03fb | 8dc84558f0058d90dfc4955e905dab1b22d12c08 | /third_party/android_sdk/public/platform-tools/systrace/catapult/devil/devil/android/valgrind_tools/base_tool.py | 135b050d20315bdbfc2dc8460220b3ec93c165a3 | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
]
| permissive | meniossin/src | 42a95cc6c4a9c71d43d62bc4311224ca1fd61e03 | 44f73f7e76119e5ab415d4593ac66485e65d700a | refs/heads/master | 2022-12-16T20:17:03.747113 | 2020-09-03T10:43:12 | 2020-09-03T10:43:12 | 263,710,168 | 1 | 0 | BSD-3-Clause | 2020-05-13T18:20:09 | 2020-05-13T18:20:08 | null | UTF-8 | Python | false | false | 136 | py | ../../../../../../../../../../../.cipd/pkgs/82/_current/platform-tools/systrace/catapult/devil/devil/android/valgrind_tools/base_tool.py | [
"[email protected]"
]
| |
06409b3f8d41206942c65aa5d15f4a9c29d20919 | 351175b725ac71e28b5e2811b3ad47052a352923 | /onnx/backend/test/case/node/sequenceinsert.py | 619043d45a333758b8fe047670bfb5f3327c6f6f | [
"Apache-2.0"
]
| permissive | take-cheeze/onnx | 99bf73cdde80da357a7eb3e18d542c2d58ec084d | ee7d2cdfa34b8b3c7e0b68b70daf72aaa48c23ac | refs/heads/master | 2023-08-24T05:36:45.737517 | 2022-12-15T17:24:47 | 2022-12-15T17:24:47 | 189,348,215 | 0 | 0 | MIT | 2019-05-30T04:58:40 | 2019-05-30T04:58:40 | null | UTF-8 | Python | false | false | 2,470 | py | # SPDX-License-Identifier: Apache-2.0
from typing import Any, List
import numpy as np
import onnx
from ..base import Base
from . import expect
def sequence_insert_reference_implementation(
sequence: List[Any], tensor: np.ndarray, position: np.ndarray = None
) -> List[Any]:
# make a copy of input sequence
seq = list(sequence)
if position is not None:
# In these cases, insert_position will be between [-len(sequence), len(sequence)]
# The position argument will be in the format np.array([pos_index])
insert_position = position[0]
seq.insert(insert_position, tensor)
else:
# Default position of insertion is at the end of the sequence.
seq.append(tensor)
return seq
class SequenceInsert(Base):
@staticmethod
def export() -> None:
test_cases = {
"at_back": [np.array([10, 11, 12]).astype(np.int64)],
"at_front": [np.array([-2, -1, 0]), np.array([0]).astype(np.int64)],
}
sequence = [
np.array([1, 2, 3, 4]).astype(np.int64),
np.array([5, 6, 7]).astype(np.int64),
np.array([8, 9]).astype(np.int64),
]
for test_name, test_inputs in test_cases.items():
tensor = test_inputs[0].astype(np.int64)
if len(test_inputs) > 1:
node = onnx.helper.make_node(
"SequenceInsert",
inputs=["sequence", "tensor", "position"],
outputs=["output_sequence"],
)
position = test_inputs[1]
inserted = sequence_insert_reference_implementation(
sequence, tensor, position
)
expect(
node,
inputs=[sequence, tensor, position],
outputs=[inserted],
name="test_sequence_insert_" + test_name,
)
else:
node = onnx.helper.make_node(
"SequenceInsert",
inputs=["sequence", "tensor"],
outputs=["output_sequence"],
)
inserted = sequence_insert_reference_implementation(sequence, tensor)
expect(
node,
inputs=[sequence, tensor],
outputs=[inserted],
name="test_sequence_insert_" + test_name,
)
| [
"[email protected]"
]
| |
3a26364f1b038b02eb40aad2454fd6fb3cb36c07 | 7f52724110a12d7721f3bbb7a0fce0c4b1c3dd97 | /gameserver/ResPrice.py | 6e09ece6c606661689b5cf79c23b24e13872cfe9 | [
"MIT"
]
| permissive | cssp1/assignment1 | 896cb69e8ff43e26658c65ea16b079f87eebef9a | 0839fc589cb52e7384c446593db79e0c2ea737d5 | refs/heads/master | 2023-03-10T08:03:56.954064 | 2022-07-20T04:02:15 | 2022-07-20T04:02:15 | 29,496,198 | 0 | 2 | null | 2023-02-17T17:56:53 | 2015-01-19T20:52:53 | JavaScript | UTF-8 | Python | false | false | 3,885 | py | #!/usr/bin/env python
# Copyright (c) 2015 Battlehouse Inc. All rights reserved.
# Use of this source code is governed by an MIT-style license that can be
# found in the LICENSE file.
# this is a library for use by the game server and analytics code to calculate
# the price for a bundle of fungible resources.
# When using this library from a stand-alone tool, just pass None for the session.
import math
# In order to be callable from both inside server.py and from stand-alone analytics tools,
# this is an adaptor that handles calling get_any_abtest_value where appropriate to handle overrides.
def resolve_value(session, override_name, default_value):
if session:
return session.player.get_any_abtest_value(override_name, default_value)
return default_value
# returns a parameter from store.json that might be overridden by an A/B test, and might also be a per-resource dictionary
def get_resource_parameter(gamedata, session, name, resname):
ret = resolve_value(session, name, gamedata['store'][name])
if type(ret) is dict:
ret = ret[resname]
return ret
def cost_legacy_exp_log(gamedata, session, resname, amount, currency):
if amount > 2:
scale_factor = get_resource_parameter(gamedata, session, 'resource_price_formula_scale', resname)
coeff = resolve_value(session, 'gamebucks_per_fbcredit', gamedata['store']['gamebucks_per_fbcredit']) if currency == 'gamebucks' else 1
price = scale_factor * coeff * 0.06 * math.exp(0.75 * (math.log10(amount) - 2.2 * math.pow(math.log10(amount), -1.25)))
return price
else:
return 1
def cost_piecewise_linear(gamedata, session, resname, amount, currency):
price_points = get_resource_parameter(gamedata, session, 'resource_price_formula_piecewise_linear_points', resname)
for i in xrange(1, len(price_points)):
if (amount < price_points[i][0] or i == len(price_points) - 1):
scale_factor = get_resource_parameter(gamedata, session, 'resource_price_formula_scale', resname)
coeff = (1 / resolve_value(session, 'gamebucks_per_fbcredit', gamedata['store']['gamebucks_per_fbcredit'])) if currency != 'gamebucks' else 1
# cast to float so that we don't use integer division
slope = float(price_points[i][1] - price_points[i - 1][1]) / (price_points[i][0] - price_points[i - 1][0])
return scale_factor * coeff * (price_points[i - 1][1] + slope * (amount - price_points[i - 1][0]))
raise Exception('Unhandled case while calculating piecewise_linear prices. This should never happen.')
def cost_by_townhall_level(gamedata, session, resname, amount, currency):
scale_factor = get_resource_parameter(gamedata, session, 'resource_price_formula_scale', resname)
price_points = get_resource_parameter(gamedata, session, 'resource_price_formula_by_townhall_level', resname)
if not session: raise Exception('must have session to compute townhall level')
th_level = session.player.get_townhall_level()
assert th_level >= 1 and th_level <= len(price_points)
res_per_gamebuck = price_points[th_level-1]
coeff = (1 / resolve_value(session, 'gamebucks_per_fbcredit', gamedata['store']['gamebucks_per_fbcredit'])) if currency != 'gamebucks' else 1
return scale_factor * coeff * amount / float(res_per_gamebuck)
price_formulas = {
'legacy_exp_log': cost_legacy_exp_log,
'piecewise_linear': cost_piecewise_linear,
'by_townhall_level': cost_by_townhall_level,
}
# returns the price of an arbitrary amount of fungible resources
def get_resource_price(gamedata, session, resname, amount, currency):
if amount <= 0:
return 0
price_formula_name = get_resource_parameter(gamedata, session, 'resource_price_formula', resname)
return math.ceil(price_formulas[price_formula_name](gamedata, session, resname, amount, currency))
| [
"[email protected]"
]
| |
9569faaeef0944b297f019a3868299475553cfa7 | 3468fe20cd1128eb8e18354c30490421e504e4af | /portal/apps/videologue/templatetags/videologue_tags.py | 3b8b0d5eac75b16567487b0cda7de58bf28361bb | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
]
| permissive | djpeluca/utopia-cms | 7da45422ffc4f1f397f385ea37243f2745a758de | 1e444afea565fdc734abf449b8ebe9b7c2c47d80 | refs/heads/main | 2023-08-19T23:04:44.666527 | 2021-10-27T01:55:11 | 2021-10-27T01:55:11 | 387,323,009 | 0 | 0 | BSD-3-Clause | 2021-07-19T03:03:48 | 2021-07-19T03:03:48 | null | UTF-8 | Python | false | false | 1,663 | py | # -*- coding: utf-8 -*-
from videologue.models import YouTubeVideo
from django.template import (Context, Library, loader, Node, TemplateSyntaxError)
from string import lower
register = Library()
TPL_DIR = 'videologue/templates/'
class RenderLatestVideoNode(Node):
def __init__(self, kwcontext):
self.kw = kwcontext
def render(self, context):
try:
video = YouTubeVideo.objects.latest()
except:
video = None
context.update({self.kw: video})
return ''
class RenderVideoNode(Node):
def __init__(self, kwcontext, vid):
self.kw = kwcontext
self.vid = vid
def render(self, context):
try:
video = YouTubeVideo.objects.get(id=self.vid)
except:
video = None
context.update({self.kw: video})
return ''
@register.tag
def get_latest_video(parser, token):
"""Usage: {% get_latest_video as video_object %}"""
bits = token.contents.split()
if len(bits) != 3 or bits[1] != 'as':
raise TemplateSyntaxError('Invalid arguments for %s' % bits[0])
return RenderLatestVideoNode(bits[2])
@register.tag
def get_video(parser, token):
"""Usage: {% get_video id as video_object %}"""
bits = token.contents.split()
if len(bits) != 4 or bits[2] != 'as':
raise TemplateSyntaxError('Invalid arguments for %s' % bits[0])
return RenderVideoNode(bits[3], bits[1])
@register.filter
def render_video(video):
if not video:
return ''
tpl = loader.get_template(
TPL_DIR + '%s/module.html' % lower(video.__class__.__name__))
return tpl.render({'video': video})
| [
"[email protected]"
]
| |
d4acfecc03cbaee58f18dace5a929be206713d9f | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_248/ch76_2020_04_12_20_47_41_177110.py | 048734f0d57c078395eafd03c2ef252a0ed91c86 | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 189 | py | def aniversariantes_de_setembro(dicionario):
dicionario2={}
for i in dicionario:
if dicionario[i][2:4]=='09':
dicionario2[i]=dicionario[i]
return dicionario2 | [
"[email protected]"
]
| |
1fec1f837a84009fd62bc40d7396ed04597bd931 | 76b7eb861bdaf84dc5cb7e8945de95c7605ae4bf | /campfin/efilings/management/commands/process_skede_lines.py | 815f82f0150ea0446563f6152d1ada3d1e64e1c2 | []
| no_license | jsfenfen/paper_fec | 60e4587d41b183be9a2a714d326bbcbe69657aa0 | f05d37ef78a22e2f0c071408914e6667f4d4b988 | refs/heads/master | 2018-12-28T04:20:16.589236 | 2015-09-01T06:28:07 | 2015-09-01T06:28:07 | 27,084,007 | 5 | 1 | null | 2015-08-19T16:19:43 | 2014-11-24T16:27:23 | Python | UTF-8 | Python | false | false | 4,917 | py | from django.core.management.base import BaseCommand, CommandError
from datetime import date
from fec_alerts.models import new_filing
from formdata.models import SkedE
from summary_data.models import Candidate_Overlay
from reconciliation.fec_reconciler import match_by_name, run_fec_query
from add_committees_to_skede import attach_committee_to_skedeline
from shared_utils.cycle_utils import get_cycle_from_date
def set_data_from_self(skedeline):
name = None
if skedeline.candidate_middle_name:
name = "%s, %s %s" % (skedeline.candidate_last_name, skedeline.candidate_first_name, skedeline.candidate_middle_name)
else:
name = "%s, %s" % (skedeline.candidate_last_name, skedeline.candidate_first_name)
skedeline.candidate_district_checked = skedeline.candidate_district
skedeline.candidate_office_checked = skedeline.candidate_office
skedeline.candidate_state_checked = skedeline.candidate_state
skedeline.candidate_name_checked = name
skedeline.support_oppose_checked = skedeline.support_oppose_code
skedeline.save()
def set_data_from_candidate_id(skedeline, candidate_id):
cycle_date = skedeline.effective_date
THIS_CYCLE = None
if cycle_date:
THIS_CYCLE = get_cycle_from_date(cycle_date)
try:
this_candidate = Candidate_Overlay.objects.get(fec_id=candidate_id, cycle=(THIS_CYCLE))
skedeline.candidate_id_checked = this_candidate.fec_id
skedeline.candidate_checked = this_candidate
skedeline.candidate_district_checked = this_candidate.office_district
skedeline.district_checked = this_candidate.district
skedeline.candidate_office_checked = this_candidate.office
skedeline.candidate_party_checked = this_candidate.party
skedeline.candidate_state_checked = this_candidate.state
skedeline.candidate_name_checked = this_candidate.name
skedeline.support_oppose_checked = skedeline.support_oppose_code
skedeline.save()
return True
except Candidate_Overlay.DoesNotExist:
print "Missing candidate overlay for %s filing %s" % (candidate_id, skedeline.filing_number)
return False
def fuzzy_match_candidate(skedeline):
state = skedeline.candidate_state
name_to_check = "%s, %s" % (skedeline.candidate_last_name, skedeline.candidate_first_name)
office = skedeline.candidate_office
state = skedeline.candidate_state
cycle_date = skedeline.effective_date
THIS_CYCLE = None
if cycle_date:
THIS_CYCLE = get_cycle_from_date(cycle_date)
result = run_fec_query(name_to_check, state=state, office=office, cycle=THIS_CYCLE, fuzzy=True)
if result:
if result[0]['match']:
print "Fuzzy matching matched %s, %s, %s to %s with id %s" % (name_to_check, state, office, result[0]['name'], result[0]['id'])
return set_data_from_candidate_id(skedeline, result[0]['id'])
print "Fuzzy matching couldn't match %s, %s, %s" % (name_to_check, state, office)
return False
def attach_ie_target(skedeline):
candidate_id = skedeline.candidate_id_number
# If there's a candidate id, enter the data from the overlay
if candidate_id:
result = set_data_from_candidate_id(skedeline, candidate_id)
if result:
return True
else:
# if we're still here, try a fuzzy match
fuzzy_match_result = fuzzy_match_candidate(skedeline)
if fuzzy_match_result:
return True
# fall back on data that's already there.
set_data_from_self(skedeline)
return False
class Command(BaseCommand):
help = "Set the name and details of the candidate targetted"
requires_model_validation = False
def handle(self, *args, **options):
filings_to_process = new_filing.objects.filter(data_is_processed=True, body_rows_superceded=True).exclude(ie_rows_processed=True).order_by('filing_number')
for this_filing in filings_to_process:
lines_present = this_filing.lines_present
has_sked_E = False
try:
lines_present['E']
if int(lines_present['E']) > 0:
has_sked_E = True
except KeyError:
continue
if has_sked_E:
#print "processing %s " % (this_filing.filing_number)
#print lines_present, lines_present['E']
skedelines = SkedE.objects.filter(filing_number=this_filing.filing_number)
for skede in skedelines:
attach_committee_to_skedeline(skede)
attach_ie_target(skede)
# mark that we've been processed.
this_filing.ie_rows_processed=True
this_filing.save() | [
"[email protected]"
]
| |
2ed31607d5a3eb7e3e91fa0eba51db90b0af3268 | a81c1492783e7cafcaf7da5f0402d2d283b7ce37 | /google/ads/google_ads/v6/services/transports/campaign_bid_modifier_service_grpc_transport.py | 9c627ba77a44921e7a3dd9698c767432afb6e484 | [
"Apache-2.0"
]
| permissive | VincentFritzsche/google-ads-python | 6650cf426b34392d1f58fb912cb3fc25b848e766 | 969eff5b6c3cec59d21191fa178cffb6270074c3 | refs/heads/master | 2023-03-19T17:23:26.959021 | 2021-03-18T18:18:38 | 2021-03-18T18:18:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,322 | py | # -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.api_core.grpc_helpers
from google.ads.google_ads.v6.proto.services import campaign_bid_modifier_service_pb2_grpc
class CampaignBidModifierServiceGrpcTransport(object):
"""gRPC transport class providing stubs for
google.ads.googleads.v6.services CampaignBidModifierService API.
The transport provides access to the raw gRPC stubs,
which can be used to take advantage of advanced
features of gRPC.
"""
# The scopes needed to make gRPC calls to all of the methods defined
# in this service.
_OAUTH_SCOPES = (
'https://www.googleapis.com/auth/adwords',
)
def __init__(self, channel=None, credentials=None,
address='googleads.googleapis.com:443'):
"""Instantiate the transport class.
Args:
channel (grpc.Channel): A ``Channel`` instance through
which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
address (str): The address where the service is hosted.
"""
# If both `channel` and `credentials` are specified, raise an
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
'The `channel` and `credentials` arguments are mutually '
'exclusive.',
)
# Create the channel.
if channel is None:
channel = self.create_channel(
address=address,
credentials=credentials,
options={
'grpc.max_send_message_length': -1,
'grpc.max_receive_message_length': -1,
}.items(),
)
self._channel = channel
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
'campaign_bid_modifier_service_stub': campaign_bid_modifier_service_pb2_grpc.CampaignBidModifierServiceStub(channel),
}
@classmethod
def create_channel(
cls,
address='googleads.googleapis.com:443',
credentials=None,
**kwargs):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
kwargs (dict): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return google.api_core.grpc_helpers.create_channel(
address,
credentials=credentials,
scopes=cls._OAUTH_SCOPES,
**kwargs
)
@property
def channel(self):
"""The gRPC channel used by the transport.
Returns:
grpc.Channel: A gRPC channel object.
"""
return self._channel
@property
def get_campaign_bid_modifier(self):
"""Return the gRPC stub for :meth:`CampaignBidModifierServiceClient.get_campaign_bid_modifier`.
Returns the requested campaign bid modifier in full detail.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['campaign_bid_modifier_service_stub'].GetCampaignBidModifier
@property
def mutate_campaign_bid_modifiers(self):
"""Return the gRPC stub for :meth:`CampaignBidModifierServiceClient.mutate_campaign_bid_modifiers`.
Creates, updates, or removes campaign bid modifiers.
Operation statuses are returned.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs['campaign_bid_modifier_service_stub'].MutateCampaignBidModifiers | [
"[email protected]"
]
| |
08f1767b3ce113050caf9f37a0fb334135b09bb5 | 2b0a565a2e3078c53ecdfb7ad16d38516b965746 | /src/bilm/encoder_base.py | 889b7cfd43e2b0c87d6b421fb180030199de42d2 | []
| no_license | Oneplus/ELMo | 8f2093c23fdf77205ac7a0ddc4dce829832b4850 | 8a9dfe987dd0551641bf4d023d61014ce9640a0b | refs/heads/master | 2020-04-07T21:30:13.186554 | 2019-05-20T00:40:55 | 2019-05-20T00:40:55 | 158,729,674 | 10 | 3 | null | 2018-11-22T17:02:22 | 2018-11-22T17:02:21 | null | UTF-8 | Python | false | false | 16,500 | py | from typing import Tuple, Union, Optional, Callable
import torch
from torch.autograd import Variable
from torch.nn.utils.rnn import pack_padded_sequence, PackedSequence
from allennlp.nn.util import get_lengths_from_binary_sequence_mask, sort_batch_by_length
# We have two types here for the state, because storing the state in something
# which is Iterable (like a tuple, below), is helpful for internal manipulation
# - however, the states are consumed as either Tensors or a Tuple of Tensors, so
# returning them in this format is unhelpful.
RnnState = Union[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]] # pylint: disable=invalid-name
RnnStateStorage = Tuple[torch.Tensor, ...] # pylint: disable=invalid-name
class _EncoderBase(torch.nn.Module):
# pylint: disable=abstract-method
"""
This abstract class serves as a base for the 3 ``Encoder`` abstractions in AllenNLP.
- :class:`~allennlp.modules.seq2seq_encoders.Seq2SeqEncoders`
- :class:`~allennlp.modules.seq2vec_encoders.Seq2VecEncoders`
Additionally, this class provides functionality for sorting sequences by length
so they can be consumed by Pytorch RNN classes, which require their inputs to be
sorted by length. Finally, it also provides optional statefulness to all of it's
subclasses by allowing the caching and retrieving of the hidden states of RNNs.
"""
def __init__(self, stateful: bool = False) -> None:
super(_EncoderBase, self).__init__()
self.stateful = stateful
self._states: Optional[RnnStateStorage] = None
def forward(self, inputs: torch.Tensor,
mask: torch.Tensor):
raise NotImplementedError()
def sort_and_run_forward(self,
module: Callable[[PackedSequence, Optional[RnnState]],
Tuple[Union[PackedSequence, torch.Tensor], RnnState]],
inputs: torch.Tensor,
mask: torch.Tensor,
hidden_state: Optional[RnnState] = None):
"""
This function exists because Pytorch RNNs require that their inputs be sorted
before being passed as input. As all of our Seq2xxxEncoders use this functionality,
it is provided in a base class. This method can be called on any module which
takes as input a ``PackedSequence`` and some ``hidden_state``, which can either be a
tuple of tensors or a tensor.
As all of our Seq2xxxEncoders have different return types, we return `sorted`
outputs from the module, which is called directly. Additionally, we return the
indices into the batch dimension required to restore the tensor to it's correct,
unsorted order and the number of valid batch elements (i.e the number of elements
in the batch which are not completely masked). This un-sorting and re-padding
of the module outputs is left to the subclasses because their outputs have different
types and handling them smoothly here is difficult.
Parameters
----------
module : ``Callable[[PackedSequence, Optional[RnnState]],
Tuple[Union[PackedSequence, torch.Tensor], RnnState]]``, required.
A function to run on the inputs. In most cases, this is a ``torch.nn.Module``.
inputs : ``torch.Tensor``, required.
A tensor of shape ``(batch_size, sequence_length, embedding_size)`` representing
the inputs to the Encoder.
mask : ``torch.Tensor``, required.
A tensor of shape ``(batch_size, sequence_length)``, representing masked and
non-masked elements of the sequence for each element in the batch.
hidden_state : ``Optional[RnnState]``, (default = None).
A single tensor of shape (num_layers, batch_size, hidden_size) representing the
state of an RNN with or a tuple of
tensors of shapes (num_layers, batch_size, hidden_size) and
(num_layers, batch_size, memory_size), representing the hidden state and memory
state of an LSTM-like RNN.
Returns
-------
module_output : ``Union[torch.Tensor, PackedSequence]``.
A Tensor or PackedSequence representing the output of the Pytorch Module.
The batch size dimension will be equal to ``num_valid``, as sequences of zero
length are clipped off before the module is called, as Pytorch cannot handle
zero length sequences.
final_states : ``Optional[RnnState]``
A Tensor representing the hidden state of the Pytorch Module. This can either
be a single tensor of shape (num_layers, num_valid, hidden_size), for instance in
the case of a GRU, or a tuple of tensors, such as those required for an LSTM.
restoration_indices : ``torch.LongTensor``
A tensor of shape ``(batch_size,)``, describing the re-indexing required to transform
the outputs back to their original batch order.
"""
# In some circumstances you may have sequences of zero length. ``pack_padded_sequence``
# requires all sequence lengths to be > 0, so remove sequences of zero length before
# calling self._module, then fill with zeros.
# First count how many sequences are empty.
batch_size = mask.size(0)
num_valid = torch.sum(mask[:, 0]).int().item()
sequence_lengths = get_lengths_from_binary_sequence_mask(mask)
sorted_inputs, sorted_sequence_lengths, restoration_indices, sorting_indices =\
sort_batch_by_length(inputs, sequence_lengths)
# Now create a PackedSequence with only the non-empty, sorted sequences.
packed_sequence_input = pack_padded_sequence(sorted_inputs[:num_valid, :, :],
sorted_sequence_lengths[:num_valid].data.tolist(),
batch_first=True)
# Prepare the initial states.
if not self.stateful:
if hidden_state is None:
initial_states = hidden_state
elif isinstance(hidden_state, tuple):
initial_states = [state.index_select(1, sorting_indices)[:, :num_valid, :]
for state in hidden_state]
else:
initial_states = hidden_state.index_select(1, sorting_indices)[:, :num_valid, :]
else:
initial_states = self._get_initial_states(batch_size, num_valid, sorting_indices)
# Actually call the module on the sorted PackedSequence.
module_output, final_states = module(packed_sequence_input, initial_states)
return module_output, final_states, restoration_indices
def _get_initial_states(self,
batch_size: int,
num_valid: int,
sorting_indices: torch.LongTensor) -> Optional[RnnState]:
"""
Returns an initial state for use in an RNN. Additionally, this method handles
the batch size changing across calls by mutating the state to append initial states
for new elements in the batch. Finally, it also handles sorting the states
with respect to the sequence lengths of elements in the batch and removing rows
which are completely padded. Importantly, this `mutates` the state if the
current batch size is larger than when it was previously called.
Parameters
----------
batch_size : ``int``, required.
The batch size can change size across calls to stateful RNNs, so we need
to know if we need to expand or shrink the states before returning them.
Expanded states will be set to zero.
num_valid : ``int``, required.
The batch may contain completely padded sequences which get removed before
the sequence is passed through the encoder. We also need to clip these off
of the state too.
sorting_indices ``torch.LongTensor``, required.
Pytorch RNNs take sequences sorted by length. When we return the states to be
used for a given call to ``module.forward``, we need the states to match up to
the sorted sequences, so before returning them, we sort the states using the
same indices used to sort the sequences.
Returns
-------
This method has a complex return type because it has to deal with the first time it
is called, when it has no state, and the fact that types of RNN have heterogeneous
states.
If it is the first time the module has been called, it returns ``None``, regardless
of the type of the ``Module``.
Otherwise, for LSTMs, it returns a tuple of ``torch.Tensors`` with shape
``(num_layers, num_valid, state_size)`` and ``(num_layers, num_valid, memory_size)``
respectively, or for GRUs, it returns a single ``torch.Tensor`` of shape
``(num_layers, num_valid, state_size)``.
"""
# We don't know the state sizes the first time calling forward,
# so we let the module define what it's initial hidden state looks like.
if self._states is None:
return None
# Otherwise, we have some previous states.
if batch_size > self._states[0].size(1):
# This batch is larger than the all previous states.
# If so, resize the states.
num_states_to_concat = batch_size - self._states[0].size(1)
resized_states = []
# state has shape (num_layers, batch_size, hidden_size)
for state in self._states:
# This _must_ be inside the loop because some
# RNNs have states with different last dimension sizes.
zeros = state.data.new(state.size(0),
num_states_to_concat,
state.size(2)).fill_(0)
zeros = Variable(zeros)
resized_states.append(torch.cat([state, zeros], 1))
self._states = tuple(resized_states)
correctly_shaped_states = self._states
elif batch_size < self._states[0].size(1):
# This batch is smaller than the previous one.
correctly_shaped_states = tuple(state[:, :batch_size, :] for state in self._states)
else:
correctly_shaped_states = self._states
# At this point, our states are of shape (num_layers, batch_size, hidden_size).
# However, the encoder uses sorted sequences and additionally removes elements
# of the batch which are fully padded. We need the states to match up to these
# sorted and filtered sequences, so we do that in the next two blocks before
# returning the state/s.
if len(self._states) == 1:
# GRUs only have a single state. This `unpacks` it from the
# tuple and returns the tensor directly.
correctly_shaped_state = correctly_shaped_states[0]
sorted_state = correctly_shaped_state.index_select(1, sorting_indices)
return sorted_state[:, :num_valid, :]
else:
# LSTMs have a state tuple of (state, memory).
sorted_states = [state.index_select(1, sorting_indices)
for state in correctly_shaped_states]
return tuple(state[:, :num_valid, :] for state in sorted_states)
def _update_states(self,
final_states: RnnStateStorage,
restoration_indices: torch.LongTensor) -> None:
"""
After the RNN has run forward, the states need to be updated.
This method just sets the state to the updated new state, performing
several pieces of book-keeping along the way - namely, unsorting the
states and ensuring that the states of completely padded sequences are
not updated. Finally, it also detatches the state variable from the
computational graph, such that the graph can be garbage collected after
each batch iteration.
Parameters
----------
final_states : ``RnnStateStorage``, required.
The hidden states returned as output from the RNN.
restoration_indices : ``torch.LongTensor``, required.
The indices that invert the sorting used in ``sort_and_run_forward``
to order the states with respect to the lengths of the sequences in
the batch.
"""
# TODO(Mark): seems weird to sort here, but append zeros in the subclasses.
# which way around is best?
new_unsorted_states = [state.index_select(1, restoration_indices)
for state in final_states]
if self._states is None:
# We don't already have states, so just set the
# ones we receive to be the current state.
self._states = tuple([torch.autograd.Variable(state.data)
for state in new_unsorted_states])
else:
# Now we've sorted the states back so that they correspond to the original
# indices, we need to figure out what states we need to update, because if we
# didn't use a state for a particular row, we want to preserve its state.
# Thankfully, the rows which are all zero in the state correspond exactly
# to those which aren't used, so we create masks of shape (new_batch_size,),
# denoting which states were used in the RNN computation.
current_state_batch_size = self._states[0].size(1)
new_state_batch_size = final_states[0].size(1)
# Masks for the unused states of shape (1, new_batch_size, 1)
used_new_rows_mask = [(state[0, :, :].sum(-1)
!= 0.0).float().view(1, new_state_batch_size, 1)
for state in new_unsorted_states]
new_states = []
if current_state_batch_size > new_state_batch_size:
# The new state is smaller than the old one,
# so just update the indices which we used.
for old_state, new_state, used_mask in zip(self._states,
new_unsorted_states,
used_new_rows_mask):
# zero out all rows in the previous state
# which _were_ used in the current state.
masked_old_state = old_state[:, :new_state_batch_size, :] * (1 - used_mask)
# The old state is larger, so update the relevant parts of it.
old_state[:, :new_state_batch_size, :] = new_state + masked_old_state
# Detatch the Variable.
new_states.append(torch.autograd.Variable(old_state.data))
else:
# The states are the same size, so we just have to
# deal with the possibility that some rows weren't used.
new_states = []
for old_state, new_state, used_mask in zip(self._states,
new_unsorted_states,
used_new_rows_mask):
# zero out all rows which _were_ used in the current state.
masked_old_state = old_state * (1 - used_mask)
# The old state is larger, so update the relevant parts of it.
new_state += masked_old_state
# Detatch the Variable.
new_states.append(torch.autograd.Variable(new_state.data))
# It looks like there should be another case handled here - when
# the current_state_batch_size < new_state_batch_size. However,
# this never happens, because the states themeselves are mutated
# by appending zeros when calling _get_inital_states, meaning that
# the new states are either of equal size, or smaller, in the case
# that there are some unused elements (zero-length) for the RNN computation.
self._states = tuple(new_states)
def reset_states(self):
self._states = None
| [
"[email protected]"
]
| |
db3f0149082015ad2e316bc5ffa3e4203d75ed58 | 9edaf93c833ba90ae9a903aa3c44c407a7e55198 | /netex/models/tram_submode_enumeration.py | 21e98e1f17bfaf27cc43a6cf15270a4c5b5075be | []
| no_license | tefra/xsdata-samples | c50aab4828b8c7c4448dbdab9c67d1ebc519e292 | ef027fe02e6a075d8ed676c86a80e9647d944571 | refs/heads/main | 2023-08-14T10:31:12.152696 | 2023-07-25T18:01:22 | 2023-07-25T18:01:22 | 222,543,692 | 6 | 1 | null | 2023-06-25T07:21:04 | 2019-11-18T21:00:37 | Python | UTF-8 | Python | false | false | 355 | py | from enum import Enum
__NAMESPACE__ = "http://www.netex.org.uk/netex"
class TramSubmodeEnumeration(Enum):
UNKNOWN = "unknown"
UNDEFINED = "undefined"
CITY_TRAM = "cityTram"
LOCAL_TRAM = "localTram"
REGIONAL_TRAM = "regionalTram"
SIGHTSEEING_TRAM = "sightseeingTram"
SHUTTLE_TRAM = "shuttleTram"
TRAIN_TRAM = "trainTram"
| [
"[email protected]"
]
| |
47cc84638073c784a1f807c048912002bf7587f6 | 780b01976dad99c7c2ed948b8473aa4e2d0404ba | /scripts/alphas_archive/zs_callspread/alpha_ichimokucloud_long_bullish_dec13.py | 61e52350abd7d1776b0b7786ea4b29869a6c59b1 | []
| no_license | trendmanagement/tmqrexo_alexveden | a8ad699c2c3df4ce283346d287aff4364059a351 | 4d92e2ee2bc97ea2fcf075382d4a5f80ce3d72e4 | refs/heads/master | 2021-03-16T08:38:00.518593 | 2019-01-23T08:30:18 | 2019-01-23T08:30:18 | 56,336,692 | 1 | 1 | null | 2019-01-22T14:21:03 | 2016-04-15T17:05:53 | Python | UTF-8 | Python | false | false | 1,425 | py | #
#
# Automatically generated file
# Created at: 2016-12-16 11:13:41.634827
#
from backtester.strategy import OptParam
from backtester.swarms.rebalancing import SwarmRebalance
from backtester.strategy import OptParamArray
from backtester.swarms.rankingclasses import RankerBestWithCorrel
from backtester.costs import CostsManagerEXOFixed
from strategies.strategy_ichimokucloud import StrategyIchimokuCloud
STRATEGY_NAME = StrategyIchimokuCloud.name
STRATEGY_SUFFIX = "_Bullish_Dec13"
STRATEGY_CONTEXT = {
'strategy': {
'class': StrategyIchimokuCloud,
'opt_params': [
OptParamArray('Direction', [1]),
OptParam('conversion_line_period', 9, 5, 5, 13),
OptParam('base_line_period', 26, 26, 26, 13),
OptParam('leading_spans_lookahead_period', 26, 26, 26, 10),
OptParam('leading_span_b_period', 52, 13, 13, 10),
OptParamArray('RulesIndex', [0]),
OptParam('MedianPeriod', 5, 45, 45, 10),
],
'exo_name': 'ZS_CallSpread',
},
'costs': {
'context': {
'costs_options': 3.0,
'costs_futures': 3.0,
},
'manager': CostsManagerEXOFixed,
},
'swarm': {
'rebalance_time_function': SwarmRebalance.every_friday,
'members_count': 1,
'ranking_class': RankerBestWithCorrel(window_size=-1, correl_threshold=-0.5),
},
}
| [
"[email protected]"
]
| |
c3259cc6b10289095af347d864f6e9ffaaad2ed2 | c3feebac5afce89b0261168286cc5052c20a89b7 | /gui/imageViewWidget.py | 6a4a745cec118d0a796e5de7fd5846cca2c62e7f | []
| no_license | iHaD/meShaderEd | 6252337ba8d152f89854186b468ff3ce226a254e | f305ae7aaf669317eb0470af18ee82b4b62a3e7d | refs/heads/master | 2021-01-18T02:31:06.974282 | 2014-07-14T11:05:52 | 2014-07-14T11:05:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,902 | py | #===============================================================================
# imageViewWidget.py
#
#
#
#===============================================================================
import os, sys
from PyQt4 import QtCore, QtGui
from PyQt4.QtCore import QDir, QString, QModelIndex
from PyQt4.QtGui import QFileSystemModel
from PyQt4.QtGui import QFileIconProvider
from ui_imageViewWidget import Ui_imageViewWidget
import gui.ui_settings as UI
from core.node import Node
from core.nodeLibrary import NodeLibrary
#
# ImageViewWidget
#
class ImageViewWidget ( QtGui.QWidget ) :
#
# __init__
#
def __init__ ( self ) :
#
QtGui.QWidget.__init__ ( self )
# This is always the same
self.ui = Ui_imageViewWidget ()
self.ui.setupUi ( self )
self.ui.selector.setMinimumSize ( QtCore.QSize ( UI.COMBO_WIDTH, UI.COMBO_HEIGHT ) )
self.ui.selector.setMaximumSize ( QtCore.QSize( UI.MAX, UI.COMBO_HEIGHT ) )
self.imageNodes = []
#self.ui.treeView.setDragEnabled ( True )
#self.ui.treeView.setRootIsDecorated( True )
QtCore.QObject.connect ( self.ui.imageArea, QtCore.SIGNAL ( 'mouseDoubleClickEvent' ), self.updateViewer )
QtCore.QObject.connect ( self.ui.selector, QtCore.SIGNAL ( 'currentIndexChanged(int)' ), self.onViewerChanged )
#QtCore.QObject.connect( self.ui, QtCore.SIGNAL( 'paramChanged()' ), self.onParamChanged )
#self.updateGui()
#self.emit( QtCore.SIGNAL( 'onGfxNodeParamChanged(QObject,QObject)' ), self, param.name )
#
# currentImageNode
#
def currentImageNode ( self ) :
gfxNode = None
idx = self.ui.selector.currentIndex ()
if len ( self.imageNodes ) > 0 :
gfxNode = self.imageNodes [ idx ]
return gfxNode
#
# addViewer
#
def addViewer ( self, gfxNode ) :
#
self.imageNodes.append ( gfxNode )
self.ui.selector.addItem ( gfxNode.node.label )
#
# removeAllViewers
#
def removeAllViewers ( self ) :
#
self.imageNodes = []
self.ui.selector.clear()
#
# removeViewer
#
def removeViewer ( self, gfxNode ) :
#
for i in range ( 0, len ( self.imageNodes ) ) :
if gfxNode == self.imageNodes [ i ] :
self.imageNodes.pop ( i )
self.ui.selector.removeItem ( i )
#QtCore.QObject.disconnect ( gfxNode.node, QtCore.SIGNAL( 'onNodeParamChanged(QObject,QObject)' ), self.onNodeParamChanged )
break
#
# onViewerChanged
#
def onViewerChanged ( self, idx ) :
#
if len ( self.imageNodes ) > 0 :
print ">> ImageViewWidget.onViewerChanged to %s" % self.imageNodes [ idx ].node.label
#QtCore.QObject.connect( self.imageNodes[ idx ].node, QtCore.SIGNAL( 'onNodeParamChanged(QObject,QObject)' ), self.onNodeParamChanged )
self.updateViewer ( compute = False )
#
# updateViewer
#
def updateViewer ( self, compute = True ) :
#
print ">> ImageViewWidget.updateViewer"
RenderViewMode = False
idx = self.ui.selector.currentIndex ()
if len ( self.imageNodes ) > 0 :
gfxNode = self.imageNodes [ idx ]
print ">> ImageViewWidget.getImageName on %s" % gfxNode.node.label
imageInputParam = gfxNode.node.getInputParamByName ( 'image' )
if imageInputParam is not None :
if gfxNode.node.isInputParamLinked ( imageInputParam ):
link = gfxNode.node.inputLinks [ imageInputParam ]
displayParam = link.srcNode.getInputParamByName ( 'DisplayDriver' )
if displayParam is not None :
print '>> Display driver = %s' % displayParam.value
if displayParam.value != 'tiff' :
RenderViewMode = True
if compute :
imageName = gfxNode.node.computeNode ()
else :
imageName = gfxNode.node.imageName
print ">> ImageViewWidget: imageName = %s" % imageName
if not RenderViewMode :
self.ui.imageArea.setImage ( imageName )
#imageParam = None
#for param in gfxNode.node.inputParams :
# if param.name == 'image' :
# imageParam = param
# break
#if imageParam is not None :
# print ">> ImageViewWidget: image = %s" % imageParam.value
# self.ui.imageArea.setImage ( imageParam.value )
#
# autoUpdate
#
def autoUpdate ( self ) : return self.ui.chk_auto.isChecked ()
#
# onNodeParamChanged
#
def onNodeParamChanged ( self, node, param ) :
#
print ">> ImageViewWidget.onNodeParamChanged %s %s" % ( node.label, param.name )
if node == self.currentImageNode().node :
self.updateViewer ()
#
# onNodeLabelChanged
#
def onNodeLabelChanged ( self, gfxNode, newLabel ) :
#
print ">> ImageViewWidget.onNodeLabelChanged %s %s" % ( gfxNode.node.label, newLabel )
i = 0
for i in range ( len ( self.imageNodes ) ) :
if gfxNode == self.imageNodes [ i ] :
self.ui.selector.setItemText ( i, newLabel )
break
i += 1
| [
"[email protected]"
]
| |
bb775d1214a866c4b577069f0c4dc8e59ea5672e | 90386753276ced3360e76f5551d25f6618613a23 | /Python 100例/44.py | 4a489a499ad6870560b2f7f32b7732f19f290c0b | []
| no_license | yflfly/funny-python | 5d69dbcafd1c98c5e4046b85f8678e4bcf53870c | ff42b84b46152234e3bc824ae8016f354af450c4 | refs/heads/master | 2023-03-09T16:58:27.116227 | 2021-03-01T14:07:46 | 2021-03-01T14:07:46 | 280,602,099 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 731 | py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
'''
两个 3 行 3 列的矩阵,实现其对应位置的数据相加,并返回一个新矩阵:
X = [[12,7,3],
[4 ,5,6],
[7 ,8,9]]
Y = [[5,8,1],
[6,7,3],
[4,5,9]]
程序分析:创建一个新的 3 行 3 列的矩阵,使用 for 迭代并取出 X 和 Y 矩阵中对应位置的值,相加后放到新矩阵的对应位置中。
'''
X = [[12, 7, 3],
[4, 5, 6],
[7, 8, 9]]
Y = [[5, 8, 1],
[6, 7, 3],
[4, 5, 9]]
result = [[0, 0, 0],
[0, 0, 0],
[0, 0, 0]]
# 迭代输出行
for i in range(len(X)):
# 迭代输出列
for j in range(len(X[0])):
result[i][j] = X[i][j] + Y[i][j]
for r in result:
print(r) | [
"[email protected]"
]
| |
7aff24db643aa477df397dca2c7229896579646e | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03032/s079897721.py | f427a2d0ce6513e4da77d12dc104cf67a5636e74 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 625 | py | import sys
input = sys.stdin.readline
def main():
N, K = map(int, input().split())
V = list(map(int, input().split()))
ans = -float("inf")
for t in range(min(N, K) + 1):
s = K - t
for l in range(t + 1):
r = t - l
gem = V[:l]
gem += V[-r:] if r != 0 else []
gem.sort()
value = sum(gem)
for i in range(min(s, t)):
if gem[i] < 0:
value -= gem[i]
else:
break
ans = max(ans, value)
print(ans)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
3a96c3ae0d08fa37f2a0225887bc6118277e5fdc | 4eee7b0e53818bd21ca009d742ac8391202620ba | /home/three/num_nd.py | e73b8254580d1b8166605ff2b113fb765d64cf50 | []
| no_license | mysqlf/python | e7c44bafee5abefc1356da9fb123fe3d6b3d2e7c | e8aacf30e046d71681a93a5f333de72e48410ebf | refs/heads/master | 2020-05-21T13:33:28.289668 | 2017-11-09T02:13:48 | 2017-11-09T02:13:48 | 61,173,293 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 658 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @author Greedywolf
print(round(1.23, 1))
# 1.2
print(round(11.27, -1))
# 10.0
print(round(16.27, -1))
# 20.0
print(round(-1.27, -1))
#-0.0
tmpr = round(1.25361, 3)
print(tmpr)
# 1.254
tmp = 1.25361
tmpf = format(tmp, '0.3f')
print(tmpf)
print(tmp)
a = 1627731
# 传给 round() 函数的 ndigits 参数可以是负数,这种情况下,
# 舍入运算会作用在十位、百位、千位等上面
print(round(a, -1))
# 1627730
print(round(a, -2))
# 1627700
print(round(a, -4))
# 1630000
a = 2.1
b = 4.2
c = a + b
print(c)
# 6.300000000000001
print(round(c, 2))
# 6.3
# 这个与输出一定宽度的
| [
"[email protected]"
]
| |
342c27c478778c9c198081a0a88745f5c731d310 | c61b87703a2e385815cf4abc7cf62221fe6f0d70 | /build/lib/tugalinhas/util.py | 139f04d0127d749bd90670555bc2e5a9db0d1052 | []
| no_license | griselgrisel/pytuga | 39f89c24826e46685da34ec912e56c735d4b786e | e9596ac81a0c563130462ac71bfe96dd01120001 | refs/heads/master | 2020-12-28T20:41:48.815642 | 2015-11-11T20:15:30 | 2015-11-11T20:15:30 | 45,982,808 | 0 | 0 | null | 2015-11-11T13:17:41 | 2015-11-11T13:17:41 | null | UTF-8 | Python | false | false | 7,838 | py | '''A random collection of utility functions'''
import os
import math
from random import randrange
from math import copysign
from PyQt4 import QtSvg, QtGui, QtCore
from tugalinhas import SHARED_ART_PATH, NODEFAULT
#
# Custom errors
#
class TooManyPens(RuntimeError):
pass
#
# Custom functions
#
def as_qpoint(pt):
'''enforce that pt is an instance of QPointF'''
if isinstance(pt, QtCore.QPointF):
return pt
else:
return QtCore.QPointF(*pt)
def sign(x):
'return 1 if x is positive, -1 if negative, or zero'
return copysign(1, x)
def plist(f):
'''given function object,
return a list of [(arg name: default value or None), ...]
'''
parameter_defaults = []
defaults = f.__defaults__
if defaults is not None:
defaultcount = len(defaults)
else:
defaultcount = 0
argcount = f.__code__.co_argcount
for i in range(f.__code__.co_argcount):
name = f.__code__.co_varnames[i]
value = NODEFAULT
if i >= argcount - defaultcount:
value = defaults[i - (argcount - defaultcount)]
parameter_defaults.append((name, value))
return parameter_defaults
class SvgRenderer(object):
'factory for svg renderer objects'
def __init__(self, app):
self.app = app
def getrend(self, filepath=None):
'''Return a handle to the shared SVG renderer for the given svg file.
If no filepath is given, return the renderer for the default svg file.
'''
if filepath is None:
filepath = os.path.join(SHARED_ART_PATH)
return QtSvg.QSvgRenderer(filepath, self.app)
def choose_color(r=None, g=None, b=None, a=None):
'''Normalize input to a tuple of (r, g, b, a)'''
if a is None:
a = 255
elif not (0 <= a <= 255):
raise ValueError('Alpha value must be between 0 and 255')
# Random colors
if r == 'random':
r, g, b = [randrange(256) for _ in range(3)]
elif r == 'rlight':
r, g, b = [randrange(200, 256) for _ in range(3)]
elif r == 'rmedium':
r, g, b = [randrange(100, 200) for _ in range(3)]
elif r == 'rdark':
r, g, b = [randrange(100) for _ in range(3)]
elif r == 'ralpha':
r, g, b = [randrange(256) for _ in range(3)]
a = randrange(100, 200)
# Null colors (shouldn't raise an error?)
elif r is g is b is None:
return None, None, None, None
# From RGB components
elif g is not None and b is not None:
if not (0 <= r <= 255 and 0 <= g <= 255 and 0 <= b <= 255):
raise ValueError('Color components must be between 0 and 255')
c = QtGui.QColor.fromRgb(r, g, b, a)
r, g, b, a = c.red(), c.green(), c.blue(), c.alpha()
# From a tuple or sequence
elif r is not None:
try:
if len(r) == 4:
rr, gg, bb, aa = r
rr, gg, bb, aa = int(rr), int(gg), int(bb), int(aa)
elif len(r) == 3:
rr, gg, bb = r
rr, gg, bb = int(rr), int(gg), int(bb)
aa = 255
else:
raise ValueError
except ValueError:
try:
ci = int(r)
c = QtGui.QColor.fromRgba(ci)
except ValueError:
if not QtGui.QColor.isValidColor(r):
raise ValueError
c = QtGui.QColor(r)
r, g, b, a = c.red(), c.green(), c.blue(), c.alpha()
else:
r, g, b, a = rr, gg, bb, aa
# Bad input...
elif r is None or g is None or b is None:
raise TypeError
return r, g, b, a
def nudge_color(color, r=None, g=None, b=None, a=None):
"""Change the color (a 3-element tuple) by given amounts,
return the new RGB tuple.
Clamps the RGB return values such that 0 <= RGB <= 255
but does not necessarily return only integer values.
Not returning strictly integers allows for smoother color
variations, but note that when the values are passed
to the tugalinhas color() function the values will be
converted to integers. So in order to take advantage
of the more precise values you will need to keep those
separately from the actual tugalinhas color values.
The function's r, g, b parameters can be either:
numbers to be added to or subtracted from the RGB tuple
components, or
percentages (as strings) that will be multiplied by the component
to increase or decrease that component by given the given
percent.
>>> color = (100, 100, 100)
>>> nudge_color(color, g=15)
(100, 115, 100)
>>> color = (100, 100, 100)
>>> nudge_color(color, r=-12.5)
(87.5, 100, 100)
>>> color = (100, 100, 100)
>>> color = nudge_color(color, b='75%')
>>> color
(100, 100, 75.0)
>>> nudge_color(color, b='75%')
(100, 100, 57.25)
>>> color = (100, 100, 100)
>>> nudge_color(color, r=50, g='105%', b=-10)
(150, 105, 90)
"""
if len(color) == 3:
rc, gc, bc = color
ac = 255
elif len(color) == 4:
rc, gc, bc, ac = color
else:
raise ValueError
if r is not None:
try:
rc += r
except TypeError:
rc *= (float(r[:-1]) / 100.0)
if g is not None:
try:
gc += g
except TypeError:
gc *= (float(g[:-1]) / 100.0)
if b is not None:
try:
bc += b
except TypeError:
bc *= (float(b[:-1]) / 100.0)
if a is not None:
try:
ac += a
except TypeError:
ac *= (float(a[:-1]) / 100.0)
rc = min(rc, 255)
gc = min(gc, 255)
bc = min(bc, 255)
ac = min(ac, 255)
rc = max(rc, 0)
gc = max(gc, 0)
bc = max(bc, 0)
ac = max(ac, 0)
return (rc, gc, bc, ac)
def docfrom(function, decorated=None):
'''Creates a decorator that saves documentation from the given
function.
>>> @docfrom(sum)
... def my_sum(args):
... return sum(args, 0.0)
'''
if decorated is not None:
decorated.__doc__ = function.__doc__
return decorated
else:
def decorator(func):
return docfrom(function, func)
return decorator
#
# From Python's turtle module
#
class Vec2D(tuple):
"""Simple 2D vector arithmetic
Provides (for a, b vectors, k number):
a + b vector addition
a - b vector subtraction
a * b inner product
k * a and a * k multiplication with scalar
abs(a) absolute value of a
a.rotate(angle) rotation
"""
def __new__(cls, x, y):
return tuple.__new__(cls, (x, y))
def __add__(self, other):
return Vec2D(self[0] + other[0], self[1] + other[1])
def __mul__(self, other):
if isinstance(other, Vec2D):
return self[0] * other[0] + self[1] * other[1]
return Vec2D(self[0] * other, self[1] * other)
def __rmul__(self, other):
if isinstance(other, int) or isinstance(other, float):
return Vec2D(self[0] * other, self[1] * other)
def __sub__(self, other):
return Vec2D(self[0] - other[0], self[1] - other[1])
def __neg__(self):
return Vec2D(-self[0], -self[1])
def __abs__(self):
return (self[0] ** 2 + self[1] ** 2) ** 0.5
def rotate(self, angle):
"""rotate self counterclockwise by angle
"""
perp = Vec2D(-self[1], self[0])
angle = angle * math.pi / 180.0
c, s = math.cos(angle), math.sin(angle)
return Vec2D(self[0] * c + perp[0] * s, self[1] * c + perp[1] * s)
def __getnewargs__(self):
return (self[0], self[1])
def __repr__(self):
return "(%.2f,%.2f)" % self
| [
"[email protected]"
]
| |
2ee0e724dff3317c1dfc707c560983a8e24f7cb5 | f4434c85e3814b6347f8f8099c081ed4af5678a5 | /sdk/communication/azure-communication-administration/samples/phone_number_capabilities_sample.py | 44b9dcb1a8796731fbe8b24846fef0450db96aee | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
]
| permissive | yunhaoling/azure-sdk-for-python | 5da12a174a37672ac6ed8e3c1f863cb77010a506 | c4eb0ca1aadb76ad892114230473034830116362 | refs/heads/master | 2022-06-11T01:17:39.636461 | 2020-12-08T17:42:08 | 2020-12-08T17:42:08 | 177,675,796 | 1 | 0 | MIT | 2020-03-31T20:35:17 | 2019-03-25T22:43:40 | Python | UTF-8 | Python | false | false | 2,811 | py | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: phone_number_capabilities_sample.py
DESCRIPTION:
This sample demonstrates how to get number capabilities via a connection string, capabilities update id and phone number for capabilities.
USAGE:
python phone_number_capabilities_sample.py
Set the environment variables with your own values before running the sample:
1) AZURE_COMMUNICATION_SERVICE_CONNECTION_STRING - The endpoint of your Azure Communication Service
2) AZURE_COMMUNICATION_SERVICE_PHONENUMBERS_CAPABILITIES_ID - The capabilities id you want to get
3) AZURE_COMMUNICATION_SERVICE_PHONENUMBERS_PHONENUMBER_FOR_CAPABILITIES - The phone number you want to update capabilities to
"""
import os
from azure.communication.administration import (
PhoneNumberAdministrationClient,
NumberUpdateCapabilities
)
connection_str = os.getenv('AZURE_COMMUNICATION_SERVICE_CONNECTION_STRING')
phone_number_administration_client = PhoneNumberAdministrationClient.from_connection_string(connection_str)
capabilities_id = os.getenv('AZURE_COMMUNICATION_SERVICE_PHONENUMBERS_CAPABILITIES_ID', "capabilities-id")
phonenumber_for_capabilities = os.getenv('AZURE_COMMUNICATION_SERVICE_PHONENUMBERS_PHONENUMBER_FOR_CAPABILITIES', "+17771234567")
def list_all_phone_numbers():
# [START list_all_phone_numbers]
list_all_phone_numbers_response = phone_number_administration_client.list_all_phone_numbers()
# [END list_all_phone_numbers]
print('list_all_phone_numbers_response:')
for phone_number in list_all_phone_numbers_response:
print(phone_number)
def get_capabilities_update():
# [START get_capabilities_update]
capabilities_response = phone_number_administration_client.get_capabilities_update(
capabilities_update_id=capabilities_id
)
# [END get_capabilities_update]
print('capabilities_response:')
print(capabilities_response)
def update_capabilities():
# [START update_capabilities]
update = NumberUpdateCapabilities(add=iter(["InboundCalling"]))
phone_number_capabilities_update = {
phonenumber_for_capabilities: update
}
capabilities_response = phone_number_administration_client.update_capabilities(
phone_number_capabilities_update=phone_number_capabilities_update
)
# [END update_capabilities]
print('capabilities_response:')
print(capabilities_response)
if __name__ == '__main__':
list_all_phone_numbers()
get_capabilities_update()
update_capabilities()
| [
"[email protected]"
]
| |
22623047d1de9e48c8ead3eabedeac514d5c8f48 | f4c4e131ce63ce795822d1ff27c9294e4b00887d | /modules/common.py | 6acd27d318860603203386fe30940b0eaa62dcb4 | []
| no_license | shundev/ruscorpora_tagging | d77196b75808743a41f5ecfb3ee5ef204db72c44 | b6d41d1a33ad25cf6a1f24e6fe94c0816e378fa0 | refs/heads/master | 2021-05-30T17:18:37.065460 | 2015-10-20T13:41:37 | 2015-10-20T13:41:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 450 | py | # All rights belong to Non-commercial Partnership "Russian National Corpus"
# http://ruscorpora.ru
# editor marks inside a word
editor_brackets = ur'\[\]\<\>'
def quotetext(s):
if not s:
return u""
return s.replace(u'&', u'&').replace(u'<', u'<').replace(u'>', u'>')
def quoteattr(s):
return quotetext(s).replace(u"'", u''').replace(u'"', u'"').replace(u'\n', u'
').replace(u'\r', u'
').replace(u'\t', u'	') | [
"[email protected]"
]
| |
5852301d44f5755d54598d0a6b389ab9759a8f16 | 636ba2700eaf3a151b73144b510f38c75ab1919d | /Kaggle2/02_image.py | 6fbe71238c67a7659f02cd543a3d79a6841ab069 | []
| no_license | Taerimmm/ML | 17997f388e18c28dfd9de83af98a6d4bebe7e1f0 | 6147cede81ebcc95f21adebf75731fbbb11edfab | refs/heads/master | 2023-06-10T14:26:45.335219 | 2021-07-05T15:30:47 | 2021-07-05T15:30:47 | 324,874,959 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from PIL import Image
import tifffile as tiff
import cv2
import os
from tqdm.notebook import tqdm
import zipfile
import rasterio | [
"[email protected]"
]
| |
7095b4a9a2e2976486cba6762f2b417aa67ed27e | 910d4dd8e56e9437cf09dd8b9c61167673140a1f | /0521/경로찾기.py | 812e5313e58abf8a17e66ba6912e44000ace33d7 | []
| no_license | nopasanadamindy/Algorithms | 10825b212395680401b200a37ab4fde9085bc61f | 44b82d2f129c4cc6e811b651c0202a18719689cb | refs/heads/master | 2022-09-28T11:39:54.630487 | 2020-05-29T09:49:56 | 2020-05-29T09:49:56 | 237,923,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 693 | py | import sys
sys.stdin = open('경로찾기.txt')
def dfs(v):
stack = []
visited = [0 for _ in range(N)]
while 1 :
for w in range(len(G)):
if G[v][w] == 1 and visited[w] == 0:
stack.append(w)
visited[w] = 1
if len(stack) == 0:
return visited
else:
v = stack.pop()
def prin(a):
for i in range(len(a)):
print(*a[i])
T = int(input())
for test_case in range(1, T+1):
N = int(input())
G = []
result = []
for i in range(N):
temp = list(map(int, input().split()))
G.append(temp)
for i in range(N):
result.append(dfs(i))
prin(result) | [
"[email protected]"
]
| |
9738d62381adf8b93b9b3f7c29b65d7326b5ba7e | 312dc11233a147e01b01ad42166eca7e8ebf3c66 | /testing/parabolic.py | a0ecc42c218bbf69486a64e488db627605a33d55 | []
| no_license | byronwasti/GuitarTuner | 622f64f21f913f537fffe8c0fd04970fac99af75 | 1a748c436b01b399cc57d24070bddfb61d2f61f8 | refs/heads/master | 2016-09-05T12:24:45.663439 | 2015-04-21T17:50:45 | 2015-04-21T17:50:45 | 32,682,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,670 | py | # -*- coding: utf-8 -*-
#
from __future__ import division
from numpy import polyfit, arange
def parabolic(f, x):
"""Quadratic interpolation for estimating the true position of an
inter-sample maximum when nearby samples are known.
f is a vector and x is an index for that vector.
Returns (vx, vy), the coordinates of the vertex of a parabola that goes
through point x and its two neighbors.
Example:
Defining a vector f with a local maximum at index 3 (= 6), find local
maximum if points 2, 3, and 4 actually defined a parabola.
In [3]: f = [2, 3, 1, 6, 4, 2, 3, 1]
In [4]: parabolic(f, argmax(f))
Out[4]: (3.2142857142857144, 6.1607142857142856)
"""
xv = 1/2. * (f[x-1] - f[x+1]) / (f[x-1] - 2 * f[x] + f[x+1]) + x
yv = f[x] - 1/4. * (f[x-1] - f[x+1]) * (xv - x)
return (xv, yv)
def parabolic_polyfit(f, x, n):
"""Use the built-in polyfit() function to find the peak of a parabola
f is a vector and x is an index for that vector.
n is the number of samples of the curve used to fit the parabola.
"""
a, b, c = polyfit(arange(x-n//2, x+n//2+1), f[x-n//2:x+n//2+1], 2)
xv = -0.5 * b/a
yv = a * xv**2 + b * xv + c
return (xv, yv)
if __name__=="__main__":
from numpy import argmax
import matplotlib.pyplot as plt
y = [2, 1, 4, 8, 11, 10, 7, 3, 1, 1]
xm, ym = argmax(y), y[argmax(y)]
xp, yp = parabolic(y, argmax(y))
plot = plt.plot(y)
plt.hold(True)
plt.plot(xm, ym, 'o', color='silver')
plt.plot(xp, yp, 'o', color='blue')
plt.title('silver = max, blue = estimated max')
| [
"[email protected]"
]
| |
654ba558a2d92208ff911b919fba2a9cb2e848c5 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03105/s703688480.py | 95f7be8e7125a8046c346b3209e2a73e83397228 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 91 | py | a, b, c = [int(i) for i in input().split()]
if b > a*c:
print(c)
else:
print(b//a) | [
"[email protected]"
]
| |
8adf40e302cbe5a0093ac6c8e47055eef1a47754 | 2d445c21e58e80841b0ac99cc678137812f0a670 | /lib/python/adjacency_matrix_graph_bfs.py | 8b98b321ead9a21fae4e993e7057258d2c35ff22 | []
| no_license | kajyuuen/programming_contest | 65c9bc7b7101d7b5fb35cd0cf3fdc6b4213fd0f6 | d2b29edf655f64d018947773a29c9a7e3c4a3456 | refs/heads/master | 2020-04-23T09:51:04.554152 | 2020-03-06T08:09:29 | 2020-03-06T08:09:29 | 171,083,409 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,074 | py | from collections import deque
INF = float("inf")
def bfs(G, i, visited = []):
if len(visited) == 0:
visited.append(i)
queue = deque()
queue.append(i)
while queue:
i = queue.popleft()
for j in range(len(G)):
if (G[i][j] != INF) and (G[i][j] != 0) and (j not in visited):
visited.append(j)
queue.append(j)
return visited
if __name__ == '__main__':
def i_inpl(): return int(input())
def s_inpl(): return map(int,input().split())
def l_inpl(): return list(map(int, input().split()))
# 入力例
# https://atcoder.jp/contests/abc016/tasks/abc016_3
N, M = s_inpl()
# 隣接行列 G, 存在しない辺はINF
G = [[INF] * N for _ in range(N)]
# 自身に向かうコストは0
for i in range(N):
G[i][i] = 0
for _ in range(M):
a, b = s_inpl()
a, b = a-1, b-1
G[a][b] = 1
G[b][a] = 1
# ある頂点から訪れることができる頂点の列挙
for i in range(N):
print(bfs(G, i, [])) | [
"[email protected]"
]
| |
c2dba4555aa853c5706f495c56459e3efea80e93 | 799a0af9c05deabe5d5250a10e480ec15ae0216e | /Xpath_test/xpath_test_2.py | a097fbf67eebc5c9494a6c7e014742b599bcfbbe | [
"MIT"
]
| permissive | waws520waws/waws_spider | 9b2be28834c08166463fe265e0f5c37a874369c8 | c6a5988121f32619a5c5134c09fdfd556c696fe7 | refs/heads/master | 2021-03-20T06:01:22.041937 | 2020-03-28T02:49:16 | 2020-03-28T02:49:16 | 247,183,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 733 | py | # encoding:utf-8
import lxml
"""
lxml.etree.HTML() 处理文本字符串
lxml.etree.parse() 处理的是文件内容
"""
import lxml.etree
html = lxml.etree.parse("1.html") # 处理文件
print(html)
print(type(html))
print(lxml.etree.tostring(html))
"""
报错:
lxml.etree.XMLSyntaxError: Opening and ending tag mismatch: meta line 4 and head, line 6, column 8
这个主要是标签不匹配的原因,将html中的meta标签去掉即可
"""
"""
知识点:lxml.etree.parse(html_file_path,解析器),使用tostring()得到的数据是bytes类型的,decode解码查看
from lxml import etree
html = etree.parse('./test.html', etree.HTMLParser())
result = etree.tostring(html)
print(result.decode('utf-8'))
""" | [
"[email protected]"
]
| |
9623ef12cdf511e3619a20585d60b7650ee1a19b | 7a1b08c64b29522d4bbb913475275c1bc8ad61a4 | /diag/doc_diag4/diag_read.py | a8fe6e0e0eca9a9bd598badc98c0dfb99f4dd8a9 | [
"MIT"
]
| permissive | erichilarysmithsr/time-track | 8f84d4cc92cebaedce550b3741982d204e734a6c | dc0a7b63c937d561309f9b1c84af65fb581a8e18 | refs/heads/master | 2023-03-27T08:07:46.717221 | 2021-03-30T16:45:50 | 2021-03-30T16:45:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,214 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
from tkinter import *
from tkinter import messagebox
import os
import subprocess
fen=Tk()
fen.title("Diagnostics and ATCD")
fen.configure(background='cyan')
# To place side by side labelo + entrylab
top = Frame(fen, bg='cyan')
bottom = Frame(fen, bg='cyan')
top.pack(side=TOP)
bottom.pack(side=BOTTOM, fill=BOTH, expand=YES)
labelo=Label(fen, text="Diagnostics and ATCD for : ",
font='Arial 18 bold', fg='navy', bg='cyan')
labelo.pack(in_=top, side=LEFT, padx=5, pady=20)
with open('./newpatient/entryfile4.txt', 'r') as filename:
line1=filename.readline()
entrytext=StringVar()
entrytext.set(line1)
entryName=Entry(fen, textvariable=entrytext)
entryName.pack(in_=top, side=LEFT, padx=10, pady=20)
labelallergy=Label(fen, text="Allergy",
font='Arial 18 bold', fg='coral', bg='cyan')
labelallergy.pack(padx=5, pady=10)
with open('./allergy/allergyfile4.txt', 'r') as filename:
lineA1=filename.readline()
lineA2=filename.readline()
lineA3=filename.readline()
lineA4=filename.readline()
lineA5=filename.readline()
lineA6=filename.readline()
lineA7=filename.readline()
entrytext=StringVar()
entrytext.set(lineA1 + ', ' + lineA3 + ', ' + lineA5 + ', ' + lineA7)
entryName=Entry(fen, textvariable=entrytext, width=60)
entryName.pack(padx=10, pady=10)
def importationFile(fichier, encodage="Utf-8"):
file = open(fichier, 'r', encoding=encodage)
content=file.readlines()
file.close()
for li in content:
textBox.insert(END, li)
textBox=Text(fen, height=15, width=60, font=18, relief=SUNKEN)
textBox.pack(padx=30, pady=30)
buttonClose=Button(fen, text="Quit", fg='white', width=10, bd=3,
bg='navy', activebackground='dark turquoise', activeforeground='navy',
highlightbackground='grey17', command=quit)
buttonClose.pack(side='right', padx=10, pady=10)
try:
if os.path.getsize('./diag/doc_diag4/diagrecap4.txt'):
importationFile('./diag/doc_diag4/diagrecap4.txt',
encodage="Utf-8")
except FileNotFoundError as err_file:
print("+ File not found !", err_file)
messagebox.showwarning("WARNING", "File does not exist or "
"file not found !")
fen.mainloop()
| [
"[email protected]"
]
| |
82b7d1c5796f6c1174bf3088591602482f1c4054 | fd21d6384ba36aa83d0c9f05f889bdbf8912551a | /a10sdk/core/network/network_vlan_global.py | 0777512fc463e065ad6d609790980c39208eecad | [
"Apache-2.0"
]
| permissive | 0xtobit/a10sdk-python | 32a364684d98c1d56538aaa4ccb0e3a5a87ecd00 | 1ea4886eea3a1609b2ac1f81e7326758d3124dba | refs/heads/master | 2021-01-18T03:08:58.576707 | 2014-12-10T00:31:52 | 2014-12-10T00:31:52 | 34,410,031 | 0 | 0 | null | 2015-04-22T19:05:12 | 2015-04-22T19:05:12 | null | UTF-8 | Python | false | false | 1,242 | py | from a10sdk.common.A10BaseClass import A10BaseClass
class VlanGlobal(A10BaseClass):
"""Class Description::
Configure global options for vlan.
Class vlan-global supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param l3_vlan_fwd_disable: {"default": 0, "optional": true, "type": "number", "description": "Disable L3 forwarding between VLANs", "format": "flag"}
:param enable_def_vlan_l2_forwarding: {"default": 0, "optional": true, "type": "number", "description": "Enable layer 2 forwarding on default vlan", "format": "flag"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/network/vlan-global`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "vlan-global"
self.a10_url="/axapi/v3/network/vlan-global"
self.DeviceProxy = ""
self.l3_vlan_fwd_disable = ""
self.enable_def_vlan_l2_forwarding = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
| [
"[email protected]"
]
| |
c3798284253165b3bbdb13b0f739fe26d1a2a201 | 495cbf24ca6db5702b023d5476de91f881f477bf | /bulletin_board/bboard/views.py | 833e6fa25452d5b70d450d57bc2b891a16c6cb8e | []
| no_license | Nikola1001/bulletin_board_django | 29b08368848137628534c3c305c8890632708021 | 32a6e89cc2257329aa314deec58e05531c52c9a0 | refs/heads/master | 2022-12-10T13:18:28.205733 | 2020-09-13T09:46:57 | 2020-09-13T09:46:57 | 295,108,347 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 977 | py | from django.shortcuts import render
from django.http import HttpResponse
from django.views.generic.edit import CreateView
from .models import Bb, Rubric
from .forms import BbForm
def index(request):
bbs = Bb.objects.all()
rubrics = Rubric.objects.all()
context = {'bbs': bbs, 'rubrics': rubrics}
return render(request, 'bboard/index.html', context)
def by_rubric(request, rubric_id):
bbs = Bb.objects.filter(rubric = rubric_id)
rubrics = Rubric.objects.all()
current_rubric = Rubric.objects.get(pk=rubric_id)
context = {'bbs': bbs, 'rubrics': rubrics, 'current_rubric': current_rubric}
return render(request, 'bboard/by_rubric.html', context)
class BbCreateView(CreateView):
template_name = 'bboard/create.html'
form_class = BbForm
success_url = '/bboard/'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['rubrics'] = Rubric.objects.all()
return context
| [
"[email protected]"
]
| |
0c28571a296e24bae9794871056eb8085fb3f316 | f8ac0d9c2954ee131bb7358441974a6809c7a323 | /tests/compute/lorentz/test_Mt2.py | 140237939a75b259c2d2c83a8923653d292139bf | [
"BSD-3-Clause"
]
| permissive | scikit-hep/vector | 38d285deae1ef9b9cae14eec3cf38e9b5c8d35ae | a79ccfb9574421870029506895dcb9c2162ac59d | refs/heads/main | 2023-08-31T10:49:57.311014 | 2023-08-29T17:55:09 | 2023-08-29T17:55:09 | 217,698,030 | 64 | 25 | BSD-3-Clause | 2023-09-12T05:13:17 | 2019-10-26T11:20:23 | Python | UTF-8 | Python | false | false | 4,032 | py | # Copyright (c) 2019-2023, Jonas Eschle, Jim Pivarski, Eduardo Rodrigues, and Henry Schreiner.
#
# Distributed under the 3-clause BSD license, see accompanying file LICENSE
# or https://github.com/scikit-hep/vector for details.
from __future__ import annotations
import pytest
import vector.backends.object
def test_xy_z_t():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectXY(3, 4),
vector.backends.object.LongitudinalObjectZ(10),
vector.backends.object.TemporalObjectT(20),
)
assert vec.Mt2 == pytest.approx(300)
def test_xy_z_tau():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectXY(3, 4),
vector.backends.object.LongitudinalObjectZ(10),
vector.backends.object.TemporalObjectTau(16.583123951777),
)
assert vec.Mt2 == pytest.approx(300)
def test_xy_theta_t():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectXY(3, 4),
vector.backends.object.LongitudinalObjectTheta(0.4636476090008061),
vector.backends.object.TemporalObjectT(20),
)
assert vec.Mt2 == pytest.approx(300)
def test_xy_theta_tau():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectXY(3, 4),
vector.backends.object.LongitudinalObjectTheta(0.4636476090008061),
vector.backends.object.TemporalObjectTau(16.583123951777),
)
assert vec.Mt2 == pytest.approx(300)
def test_xy_eta_t():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectXY(3, 4),
vector.backends.object.LongitudinalObjectEta(1.4436354751788103),
vector.backends.object.TemporalObjectT(20),
)
assert vec.Mt2 == pytest.approx(300)
def test_xy_eta_tau():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectXY(3, 4),
vector.backends.object.LongitudinalObjectEta(1.4436354751788103),
vector.backends.object.TemporalObjectTau(16.583123951777),
)
assert vec.Mt2 == pytest.approx(300)
def test_rhophi_z_t():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectRhoPhi(5, 0),
vector.backends.object.LongitudinalObjectZ(10),
vector.backends.object.TemporalObjectT(20),
)
assert vec.Mt2 == pytest.approx(300)
def test_rhophi_z_tau():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectRhoPhi(5, 0),
vector.backends.object.LongitudinalObjectZ(10),
vector.backends.object.TemporalObjectTau(16.583123951777),
)
assert vec.Mt2 == pytest.approx(300)
def test_rhophi_theta_t():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectRhoPhi(5, 0),
vector.backends.object.LongitudinalObjectTheta(0.4636476090008061),
vector.backends.object.TemporalObjectT(20),
)
assert vec.Mt2 == pytest.approx(300)
def test_rhophi_theta_tau():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectRhoPhi(5, 0),
vector.backends.object.LongitudinalObjectTheta(0.4636476090008061),
vector.backends.object.TemporalObjectTau(16.583123951777),
)
assert vec.Mt2 == pytest.approx(300)
def test_rhophi_eta_t():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectRhoPhi(5, 0),
vector.backends.object.LongitudinalObjectEta(1.4436354751788103),
vector.backends.object.TemporalObjectT(20),
)
assert vec.Mt2 == pytest.approx(300)
def test_rhophi_eta_tau():
vec = vector.backends.object.MomentumObject4D(
vector.backends.object.AzimuthalObjectRhoPhi(5, 0),
vector.backends.object.LongitudinalObjectEta(1.4436354751788103),
vector.backends.object.TemporalObjectTau(16.583123951777),
)
assert vec.Mt2 == pytest.approx(300)
| [
"[email protected]"
]
| |
1fb584988286f1cd09e42691e0dfa71985ff579d | 74912c10f66e90195bf87fd71e9a78fa09f017ec | /execroot/syntaxnet/bazel-out/local-opt/bin/dragnn/python/evaluation_test.runfiles/org_tensorflow/tensorflow/contrib/keras/python/keras/layers/convolutional_recurrent.py | 549a540f9dbae47ca107b6cfa2a8b9d8061abacb | []
| no_license | koorukuroo/821bda42e7dedbfae9d936785dd2d125- | 1f0b8f496da8380c6e811ed294dc39a357a5a8b8 | 237fcc152ff436f32b2b5a3752a4181d279b3a57 | refs/heads/master | 2020-03-17T03:39:31.972750 | 2018-05-13T14:35:24 | 2018-05-13T14:35:24 | 133,244,956 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 159 | py | /root/.cache/bazel/_bazel_root/821bda42e7dedbfae9d936785dd2d125/external/org_tensorflow/tensorflow/contrib/keras/python/keras/layers/convolutional_recurrent.py | [
"k"
]
| k |
d5e8384f5d233327669f8bdad3a26c03cbc9eac4 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/eventgrid/azure-mgmt-eventgrid/generated_samples/domains_list_by_subscription.py | 5f604daa1c5b83f0174a44c3eebdcc116e2d7681 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
]
| permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 1,543 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.eventgrid import EventGridManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-eventgrid
# USAGE
python domains_list_by_subscription.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = EventGridManagementClient(
credential=DefaultAzureCredential(),
subscription_id="8f6b6269-84f2-4d09-9e31-1127efcd1e40",
)
response = client.domains.list_by_subscription()
for item in response:
print(item)
# x-ms-original-file: specification/eventgrid/resource-manager/Microsoft.EventGrid/preview/2023-06-01-preview/examples/Domains_ListBySubscription.json
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
89a56284bc3930a3a7e104fbbcddb3349ebd68bb | 4c601eaa346e660c296e270cc2d79aea9a3721fe | /homeassistant/components/monoprice/__init__.py | 9bceff1531c142933a839999f0e1fedc053c9141 | [
"Apache-2.0"
]
| permissive | basnijholt/home-assistant | f55110af9ff602274c0a929c7298ef97a0ef282f | ba55b4b8338a2dc0ba3f1d750efea49d86571291 | refs/heads/dev | 2023-01-21T11:53:52.621353 | 2020-08-08T15:03:06 | 2020-08-08T15:03:06 | 220,313,680 | 5 | 1 | Apache-2.0 | 2023-01-13T06:04:49 | 2019-11-07T19:29:54 | Python | UTF-8 | Python | false | false | 2,378 | py | """The Monoprice 6-Zone Amplifier integration."""
import asyncio
import logging
from pymonoprice import get_monoprice
from serial import SerialException
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PORT
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from .const import (
CONF_NOT_FIRST_RUN,
DOMAIN,
FIRST_RUN,
MONOPRICE_OBJECT,
UNDO_UPDATE_LISTENER,
)
PLATFORMS = ["media_player"]
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Monoprice 6-Zone Amplifier component."""
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Monoprice 6-Zone Amplifier from a config entry."""
port = entry.data[CONF_PORT]
try:
monoprice = await hass.async_add_executor_job(get_monoprice, port)
except SerialException:
_LOGGER.error("Error connecting to Monoprice controller at %s", port)
raise ConfigEntryNotReady
# double negative to handle absence of value
first_run = not bool(entry.data.get(CONF_NOT_FIRST_RUN))
if first_run:
hass.config_entries.async_update_entry(
entry, data={**entry.data, CONF_NOT_FIRST_RUN: True}
)
undo_listener = entry.add_update_listener(_update_listener)
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = {
MONOPRICE_OBJECT: monoprice,
UNDO_UPDATE_LISTENER: undo_listener,
FIRST_RUN: first_run,
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]()
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
async def _update_listener(hass: HomeAssistant, entry: ConfigEntry):
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
| [
"[email protected]"
]
| |
3b75be4e050249e7eaafd24415499319c3bce4d2 | c236fd1f3d54fa79ac79d5154d31f220cfd63ace | /setup.py | 87bb3b45e3d99a7461e84cc32ae233928061cfd2 | [
"MIT"
]
| permissive | mmmika/fcn | 46b6e7959d39f68280ca00626dfb1c6ca44ebcd1 | 876ce009d37ca36f65a3c2128102ac04fca47898 | refs/heads/master | 2020-06-10T05:38:16.261089 | 2016-11-27T16:46:13 | 2016-11-27T16:46:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,209 | py | #!/usr/bin/env python
import shlex
import subprocess
import sys
from setuptools import find_packages
from setuptools import setup
version = '5.3.0'
if sys.argv[-1] == 'release':
commands = [
'python setup.py sdist',
'twine upload dist/fcn-{0}.tar.gz'.format(version),
'git tag v{0}'.format(version),
'git push origin master --tag',
]
for cmd in commands:
subprocess.call(shlex.split(cmd))
sys.exit(0)
setup(
name='fcn',
version=version,
packages=find_packages(),
scripts=[
'scripts/fcn_infer.py',
'scripts/fcn_learning_curve.py',
],
install_requires=open('requirements.txt').readlines(),
description='Fully Convolutional Networks',
long_description=open('README.rst').read(),
author='Kentaro Wada',
author_email='[email protected]',
url='http://github.com/wkentaro/fcn',
license='MIT',
keywords='machine-learning',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Topic :: Internet :: WWW/HTTP',
],
)
| [
"[email protected]"
]
| |
6b5c04de0172a279af1ce7e32c289d8968fe2ed4 | e8ae11e5017507da59e2e92d423b6a1994490de4 | /env/lib/python2.7/site-packages/azure/mgmt/network/operations/express_route_circuits_operations.py | e8da1a662f5ed4d4643510020ff1f0b0f86f9693 | []
| no_license | teopeurt/ansible-ubuntu-server | 613d00cea28bc6531acf4a39aeeb9cd0baa2a391 | b5b6127d2ee9723c5088443efe2ffb8ae30cfea7 | refs/heads/master | 2021-06-28T12:49:50.935753 | 2017-07-31T17:34:33 | 2017-07-31T17:34:33 | 98,912,808 | 0 | 1 | null | 2020-07-24T00:05:31 | 2017-07-31T17:32:56 | Makefile | UTF-8 | Python | false | false | 38,321 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
import uuid
from .. import models
class ExpressRouteCircuitsOperations(object):
"""ExpressRouteCircuitsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def delete(
self, resource_group_name, circuit_name, custom_headers=None, raw=False, **operation_config):
"""
The delete ExpressRouteCircuit operation deletes the specified
ExpressRouteCircuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route Circuit.
:type circuit_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.delete(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [204, 202, 200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get(
self, resource_group_name, circuit_name, custom_headers=None, raw=False, **operation_config):
"""
The Get ExpressRouteCircuit operation retreives information about the
specified ExpressRouteCircuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ExpressRouteCircuit
<azure.mgmt.network.models.ExpressRouteCircuit>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuit', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, circuit_name, parameters, custom_headers=None, raw=False, **operation_config):
"""
The Put ExpressRouteCircuit operation creates/updates a
ExpressRouteCircuit
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param parameters: Parameters supplied to the create/delete
ExpressRouteCircuit operation
:type parameters: :class:`ExpressRouteCircuit
<azure.mgmt.network.models.ExpressRouteCircuit>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`ExpressRouteCircuit
<azure.mgmt.network.models.ExpressRouteCircuit>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'ExpressRouteCircuit')
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [201, 200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuit', response)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuit', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_arp_table(
self, resource_group_name, circuit_name, peering_name, device_path, custom_headers=None, raw=False, **operation_config):
"""
The ListArpTable from ExpressRouteCircuit opertion retrieves the
currently advertised arp table associated with the
ExpressRouteCircuits in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`ExpressRouteCircuitsArpTableListResult
<azure.mgmt.network.models.ExpressRouteCircuitsArpTableListResult>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/arpTables/{devicePath}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_routes_table(
self, resource_group_name, circuit_name, peering_name, device_path, custom_headers=None, raw=False, **operation_config):
"""
The ListRoutesTable from ExpressRouteCircuit opertion retrieves the
currently advertised routes table associated with the
ExpressRouteCircuits in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns
:class:`ExpressRouteCircuitsRoutesTableListResult
<azure.mgmt.network.models.ExpressRouteCircuitsRoutesTableListResult>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routesTable/{devicePath}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_routes_table_summary(
self, resource_group_name, circuit_name, peering_name, device_path, custom_headers=None, raw=False, **operation_config):
"""
The ListRoutesTable from ExpressRouteCircuit opertion retrieves the
currently advertised routes table associated with the
ExpressRouteCircuits in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns
:class:`ExpressRouteCircuitsRoutesTableSummaryListResult
<azure.mgmt.network.models.ExpressRouteCircuitsRoutesTableSummaryListResult>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableSummaryListResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get_stats(
self, resource_group_name, circuit_name, custom_headers=None, raw=False, **operation_config):
"""
The Liststats ExpressRouteCircuit opertion retrieves all the stats
from a ExpressRouteCircuits in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ExpressRouteCircuitStats
<azure.mgmt.network.models.ExpressRouteCircuitStats>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/stats'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitStats', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_peering_stats(
self, resource_group_name, circuit_name, peering_name, custom_headers=None, raw=False, **operation_config):
"""
The Liststats ExpressRouteCircuit opertion retrieves all the stats
from a ExpressRouteCircuits in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ExpressRouteCircuitStats
<azure.mgmt.network.models.ExpressRouteCircuitStats>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/stats'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitStats', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""
The List ExpressRouteCircuit opertion retrieves all the
ExpressRouteCircuits in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ExpressRouteCircuitPaged
<azure.mgmt.network.models.ExpressRouteCircuitPaged>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ExpressRouteCircuitPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ExpressRouteCircuitPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list_all(
self, custom_headers=None, raw=False, **operation_config):
"""
The List ExpressRouteCircuit opertion retrieves all the
ExpressRouteCircuits in a subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ExpressRouteCircuitPaged
<azure.mgmt.network.models.ExpressRouteCircuitPaged>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteCircuits'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.ExpressRouteCircuitPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ExpressRouteCircuitPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
| [
"[email protected]"
]
| |
2e495fb99b1432842a9429eca1429da37cf0ff2a | 1e65ca80032b1b5a4ab3631044c3d41a9f3dd035 | /01_Jump_to_Python/Chapter07/321.py | 2c4c9f6ab079a7f22ee78e41b397caec449e2e37 | []
| no_license | bj730612/Bigdata | cdd398c56023c67a2e56c36151e9f2bca067a40a | 9bb38e30bb3728b4a4e75bc763fa858029414d4e | refs/heads/master | 2020-03-15T09:27:23.995217 | 2018-10-02T00:07:38 | 2018-10-02T00:07:38 | 132,075,198 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 161 | py | import re
p = re.compile('.+:')
m = p.search('http://google.com')
print(m.group())
p = re.compile('.+(?=:)')
m = p.search('http://google.com')
print(m.group())
| [
"[email protected]"
]
| |
65656cd0f8cace0bcc0fb1f5113f197aa36b12b7 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_benefactions.py | d23b224a84c6e58631f4000da88b4c70a68ff6df | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 242 | py |
#calss header
class _BENEFACTIONS():
def __init__(self,):
self.name = "BENEFACTIONS"
self.definitions = benefaction
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['benefaction']
| [
"[email protected]"
]
| |
e91ddeb3e191e0e42abe3dff6ea872f61d02497f | a17bb550574747585f36ba159c4415d5e42835e7 | /handle/class_list_handle.py | 8c583d55aa8194a70a6d4b3be5275f6174fc0efc | []
| no_license | z1069867141/sjh | 36895969bc472c9608c14fee84f800adf6ff4af3 | 197b3a7fab135f7a277ba4260dcf3ca10bb7c53e | refs/heads/master | 2022-11-15T19:32:16.934370 | 2020-07-09T10:01:49 | 2020-07-09T10:01:49 | 261,260,925 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,673 | py | import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__),".."))
from base.FindElement import FindElement
from selenium import webdriver
import time
class class_list(object):
def __init__(self,driver):
self.driver = driver
self.file_path = os.path.join(os.getcwd()+"/config/"+"class_element.ini")
self.cl = FindElement(self.driver,file_path=self.file_path,node="class_list")
def click_class_1(self):
self.cl.get_element("class_1").click()
def click_class_2(self):
self.cl.get_element("class_2").click()
def get_sorting_text(self):
try:
time.sleep(2)
text = self.cl.get_element("sorting_text").text
if text == "综合排序":
return True
else:
return False
except:
return False
def click_QR_tabber(self):
try:
self.cl.get_element("QR_code").click()
time.sleep(2)
text = self.cl.get_element("title_text").text
if text == "分 享":
return True
else:
return False
except:
return False
if __name__ == "__main__":
driver = webdriver.Chrome()
# base = class_list(driver)
# driver.get("http://b2bsaas.qianyansoft.com/Sjh/#/pwdlogin?qythc=")
# base.user_base()
# time.sleep(1)
# driver.get("http://b2bsaas.qianyansoft.com/Sjh/#/category")
# a = class_list(driver)
# # time.sleep(2)
# a.click_QR_tabber()
driver.get("http://b2bsaas.qianyansoft.com/Sjh/#/category/goods?gcId=303")
driver.find_element_by_name("销 量").click() | [
"[email protected]"
]
| |
bc28a05454e5f28444f62d7d5ce5b39db48ee9f3 | cb9f816c672a55d0e6b0109f368358a6276a11d9 | /noseapp/core/suite/performers/gevent.py | 59ee046682d668d64df451c72af4d48ec6386a7f | []
| no_license | noseapp/noseapp | ef3d361f6a7505d822b05c2dc6d40c662b3ba285 | 7c3e2e38b6b9fe027847a466615f7d72ed4ea334 | refs/heads/master | 2020-05-17T20:12:14.213873 | 2015-10-06T14:47:30 | 2015-10-06T14:47:30 | 30,531,499 | 3 | 2 | null | 2015-10-01T13:44:16 | 2015-02-09T10:54:26 | Python | UTF-8 | Python | false | false | 872 | py | # -*- coding: utf8 -*-
from __future__ import absolute_import
from multiprocessing import cpu_count
from gevent.pool import Pool
from noseapp.core.suite.base import BaseSuite
from noseapp.core.suite.base import SuitePerformer
class GeventSuitePerformer(SuitePerformer):
"""
Run tests with gevent pool
"""
def __call__(self, pool=None):
self_pool = not bool(pool)
size = self.suite.config.options.async_tests
if size <= 0:
size = cpu_count() / 2
pool = pool or Pool(int(round(size)) or 2)
for test in self.suite.tests:
if self.result.shouldStop:
break
if isinstance(test, BaseSuite):
test.run(self.result, pool=pool)
continue
pool.spawn(self.run_one_test, test)
if self_pool:
pool.join()
| [
"[email protected]"
]
| |
49ce32e81a63bc46de29e7eb1cd17f6fe3f229f6 | 2ffd079c34cb07c738f7e5f703764fed68f2c8c0 | /Solutions/Evaluate_Reverse_Polish_Notation.py | 708a3f4dc84222f8e3a3a9ac462cf0dc57d6c3db | []
| no_license | WuIFan/LeetCode | bc96355022c875bdffb39c89a2088457b97d30ab | 689a100ada757bc20334d5f0084587af3039ca7b | refs/heads/master | 2022-05-24T07:13:01.023733 | 2022-04-03T15:26:23 | 2022-04-03T15:26:23 | 202,471,960 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 749 | py | from typing import List
class Solution:
def evalRPN(self, tokens: List[str]) -> int:
nums = []
for t in tokens:
if t.lstrip('-').isnumeric():
nums.append(int(t))
else:
second = nums.pop()
first = nums.pop()
if t == '+':
ans = first + second
elif t == '-':
ans = first - second
elif t == '*':
ans = first * second
else:
neg = first*second < 0
ans = abs(first) // abs(second)
if neg:
ans = -ans
nums.append(ans)
return nums[-1] | [
"[email protected]"
]
| |
14a313d6bc063b870815b658c0cc5045efd8eae0 | 038e6e13ad4a81cee5dbbd6ccc322d48330d15d7 | /AnswerCode/083RemoveDuplicatesfromSortedList.py | 6ebe8ac3b56ba15d5779fcf189dd251a77daac8c | []
| no_license | aistoume/Leetcode | ad69dae6d9f41a03c883fc2582d0afd6997f83d6 | d8dc574b611d0e3d42367ccd47a44fd8443b0b27 | refs/heads/master | 2021-01-12T14:27:18.245818 | 2018-11-09T00:21:04 | 2018-11-09T00:21:04 | 70,066,007 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 589 | py | # 83. Remove Duplicates from Sorted List
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def deleteDuplicates(self, head):
if not head: return None
curr = head
while curr.next:
if curr.val==curr.next.val:
curr.next = curr.next.next
else:
curr = curr.next
return head
So = Solution()
L = [1,1,2,3,3]
head = ListNode(L[0])
curr = head
for i in L[1:]:
curr.next = ListNode(i)
curr = curr.next
ans = So.deleteDuplicates(head)
while ans:
print ans.val
ans = ans.next | [
"[email protected]"
]
| |
80a0b1e0442bbe0ac5cd82a59fd965b1e3ee8dfd | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/sieve-big-8004.py | 9d19e79ac406f55e889ca9bcb3ccb2b5e5d58030 | []
| no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,758 | py | # A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [$Exp]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"[email protected]"
]
| |
51938e7c55f9356b7ec6b93bcda53735d7801af7 | 4cc75836f13b9829afd59eb9b2ac3a5f6b85c543 | /models/final_experiment_scripts/tpc.py | 4caf041f1b06ce575758e8d7cf03393137faa64a | [
"MIT"
]
| permissive | TanmDL/eICU-LoS-prediction | 71316bf072b3bd47e61e22df71631f6d7996e583 | eb19fc84c5702595b052f436408570af314418a2 | refs/heads/master | 2022-11-29T04:14:11.929595 | 2020-08-15T10:33:46 | 2020-08-15T10:33:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 599 | py | from eICU_preprocessing.split_train_test import create_folder
from models.run_tpc import TPC
from models.initialise_arguments import initialise_tpc_arguments
if __name__=='__main__':
c = initialise_tpc_arguments()
c['mode'] = 'test'
c['exp_name'] = 'TPC'
c['model_type'] = 'tpc'
log_folder_path = create_folder('models/experiments/final', c.exp_name)
tpc = TPC(config=c,
n_epochs=c.n_epochs,
name=c.exp_name,
base_dir=log_folder_path,
explogger_kwargs={'folder_format': '%Y-%m-%d_%H%M%S{run_number}'})
tpc.run() | [
"[email protected]"
]
| |
debe0aa51bb68d9ac0a6d202b03091d9d134df10 | 18d51ac0a6ca14c8221c26f0dacd8d3721ca28e9 | /HUN105.py | 49c07262edd2b69c9d246bc431227e0a2f30f73a | []
| no_license | mahakalai/mahak | 05f96d52880ed7b2e5eb70dd1dbf14fc533236e8 | 613be9df7743ef59b1f0e07b7df987d29bb23ec7 | refs/heads/master | 2020-04-15T05:01:58.541930 | 2019-07-15T16:28:32 | 2019-07-15T16:28:32 | 164,406,486 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | n=int(input())
sum=0
s=str(n)
l=len(s)
while n>0:
rem=n%10
sq=rem**l
sum=sum+sq
n=n//10
print(sum)
| [
"[email protected]"
]
| |
ab9caac8043a3d31ccc062a0bc33b43c65f5d2e0 | d4e219c07379a08f37dff8ed9b889a1c75531e90 | /pasahero/commuters/migrations/0002_allowed_routes.py | 39780cec580f66546b71b54c3365d415dbe520f7 | []
| no_license | SanCampos/anti-covid | c9d306f584d61b9a1e1e1bc5cda4ac7497acee55 | 4f4ae5f63abfb1c59e29ad4bfc8a16a4e88ff6ad | refs/heads/master | 2021-05-22T16:30:55.933727 | 2020-04-01T14:55:14 | 2020-04-01T14:55:14 | 253,004,388 | 1 | 1 | null | 2020-04-04T14:38:33 | 2020-04-04T13:30:27 | null | UTF-8 | Python | false | false | 850 | py | # Generated by Django 2.2.5 on 2020-03-20 10:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('transportations', '0015_auto_20200320_0644'),
('commuters', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Allowed_Routes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('commuter_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='allowed_routes', to='commuters.Commuters')),
('route_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='allowed_routes', to='transportations.Routes')),
],
),
]
| [
"[email protected]"
]
| |
04df278d4e43bf1c8ed2fdc7f1a049f8cacf1c9a | d6e65aa23ff8b2344dacac93fe00fcfcd64cc414 | /ac_div.py | a1266114bf5958a013167ab61cfa310c6a59d1fd | []
| no_license | diwadd/sport | c4b0ec3547cde882c549fa7b89e0132fdaf0c8fb | 220dfaf1329b4feea5b5ca490ffc17ef7fe76cae | refs/heads/master | 2023-05-29T13:17:23.516230 | 2023-05-20T22:08:28 | 2023-05-20T22:08:28 | 223,636,723 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 29 | py |
n = int(input())
print(n-1) | [
"[email protected]"
]
| |
1a3283004646eabd4e8de4243225147a2f252f8c | e5d83ede8521027b05d9b91c43be8cab168610e6 | /0x0A-python-inheritance/2-is_same_class.py | 21c3ccee88f486cb44c6c4084c015a32a21f6fc8 | []
| no_license | Danielo814/holbertonschool-higher_level_programming | 8918c3a6a9c136137761d47c5162b650708dd5cd | 832b692529198bbee44d2733464aedfe650bff7e | refs/heads/master | 2020-03-28T11:09:00.343055 | 2019-02-22T03:33:54 | 2019-02-22T03:33:54 | 148,181,433 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 307 | py | #!/usr/bin/python3
"""
2-is_same_class module that tests if an object is an
instance of the specified class
"""
def is_same_class(obj, a_class):
"""
returns True if obj is instance of a_class, False otherwise
"""
if type(obj) != a_class:
return False
else:
return True
| [
"[email protected]"
]
| |
8a9223c0a0896e8c858df8c838ce1237053174f0 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2951/60714/272996.py | 9cd79dd531efc7ba42812797b13c7e90adcf7421 | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 480 | py | a = input()
b = input()
ans1 = 0
for i in range(len(a) - 1, -1, -1):
ans1 += int(a[i]) * pow(2, len(a) - i - 1)
ans2 = 0
for i in range(len(b) - 1, -1, -1):
ans2 += int(b[i]) * pow(3, len(b) - i - 1)
flag = True
if ans1 > ans2:
temp = ans1 - ans2
else:
temp = ans2 - ans1
flag = False
while True:
i = 1
temp -= pow(2, i)
if temp % 3 is 0:
if flag:
print(ans2 + temp)
else:
print(ans2 - temp)
break
| [
"[email protected]"
]
| |
4706b93e3604fba7a00dd32cb2c085a8d838a2bd | f03155acea2660fb04576e3ed60f248b57f43d68 | /migrations/versions/2e1382ecc795_.py | 3c15acd95cf280dafae28d698527e449f60c6cc3 | []
| no_license | hreeder/SlackInSpace | 22ecb413fd31dad8707afd7ae968f895b425e452 | cc44ad0834343f0616f9d5bd5f6820546b105d77 | refs/heads/master | 2020-04-01T23:02:27.078901 | 2015-05-07T13:58:26 | 2015-05-07T13:58:26 | 33,796,690 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,072 | py | """empty message
Revision ID: 2e1382ecc795
Revises: 21ca2aa72535
Create Date: 2015-04-12 13:34:58.152000
"""
# revision identifiers, used by Alembic.
revision = '2e1382ecc795'
down_revision = '21ca2aa72535'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('team_member',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('team_id', sa.String(length=64), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('email_address', sa.String(length=128), nullable=True),
sa.Column('slack_user_id', sa.String(length=64), nullable=True),
sa.Column('status', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('team_member')
### end Alembic commands ###
| [
"[email protected]"
]
| |
3da62f1e1a192e84a4f600ef64a2e48ec708cb18 | fee9bae88bf4ed22b93e3e9df7806f8c897f5f25 | /manage/cli_utils.py | af579c32d5dec42317b115e3f7262a4a77f93dd8 | []
| no_license | fake-name/xA-Scraper | 4123378c5ed87896fc389a90dfd8836cca7a5923 | 9eae8d1220fd0c7cabc97cef9378e4efe3361061 | refs/heads/master | 2023-01-27T15:33:05.349679 | 2023-01-22T11:39:57 | 2023-01-22T11:39:57 | 23,865,307 | 76 | 12 | null | 2019-06-11T12:38:27 | 2014-09-10T07:39:43 | Python | UTF-8 | Python | false | false | 3,271 | py |
#pylint: disable-msg=F0401, W0142
import logging
import psycopg2
import urllib.parse
import traceback
from xascraper import db
from xascraper import database
from settings import settings
from plugins import JOBS
from plugins import JOBS_DISABLED
from plugins import JOBS_NO_CONF
PLUGINS = {
key : (cls_def, cls_def.pluginName)
for cls_def, dummy_interval, key in JOBS
}
DISABLED_PLUGINS = {
key : (cls_def, cls_def.pluginName)
for cls_def, dummy_interval, key in JOBS_DISABLED
}
UNRUNNABLE_PLUGINS = {
cls_def.pluginShortName : (cls_def, cls_def.pluginName)
for cls_def in JOBS_NO_CONF
}
def print_help():
print()
print("Manager interface")
print("Options")
print()
print(" help")
print(" print this message")
print(" reset-run-state")
print(" reset the run-state monitor flags. This is normally done")
print(" at start by main.py, but if you're using just the CLI fetch")
print(" calls, you can do it manually too.")
print(" reset-run-state [sitename]")
print(" reset the run-state for a specific plugin only.")
print(" reset-last-fetched-times")
print(" Reset the last fetch times for all the artists in the database.")
print(" this means the next fetch will re-walk all artists in the database")
print(" in random(ish) order")
print(" reset-last-fetched-times [sitename]")
print(" Reset the last-fetch time for all the artists associated with a specific site")
print(" 'rss-import'")
print(" Import tumblr feeds from a ttrss database instance.")
print(" 'tumblr-import'")
print(" Import the artists you follow on tumblr to your scraped-artists list.")
print(" 'upgrade-db'")
print(" Make any needed schema changes to the database, if needed.")
print(" 'name-clean'")
print(" Checks and does some cleanup of the artist-names in the database.")
print(" 'db-misrelink-clean'")
print(" Does release sanity checks on item URLs")
print(" fetch [sitename]")
print(" with no sitename, this executes all plugins in sequence.")
print(" With a sitename, executes the named plugin.")
print(" fetch-all")
print(" Executes all plugins in parallel.")
print(" import <sitename> <filename>")
print(" Open a text file <filename>, and import the names from")
print(" it into the monitored names database for site <sitename>.")
print(" The file <filename> must be a simple text file with")
print(" one artist name per-line.")
print(" Note that this does not support pixiv names, due to the ")
print(" different mechanism used for supporting pixiv namelist")
print(" tracking.")
print(" Note: this will call `name-clean` after execution automatically.")
print(" dump [export_path] [sitename]")
print(" Dump the database contents for users from a specific site to [export_path]")
print("")
print("Plugins (sitename -> Human-Readable name)")
print(" Available plugins (will be run by the scheduler):")
for key, tup in PLUGINS.items():
print(" {} -> {}".format(key.ljust(8), tup[1]))
print(" Disabled plugins (can be run manually, will not auto run):")
for key, tup in DISABLED_PLUGINS.items():
print(" {} -> {}".format(key.ljust(8), tup[1]))
print(" Unconfigured plugins (cannot be used):")
for key, tup in UNRUNNABLE_PLUGINS.items():
print(" {} -> {}".format(key.ljust(8), tup[1]))
| [
"[email protected]"
]
| |
648ef60602eb9eba9c4caeb8a2ac8a960cb43412 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=3.5_rd=0.5_rw=0.06_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=10/sched.py | 6b1fdcf3ee82164a4757179b8107b4b603c7ba07 | []
| no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 316 | py | -X FMLP -Q 0 -L 5 119 400
-X FMLP -Q 0 -L 5 86 400
-X FMLP -Q 0 -L 5 86 400
-X FMLP -Q 1 -L 2 59 175
-X FMLP -Q 1 -L 2 44 150
-X FMLP -Q 2 -L 1 38 200
-X FMLP -Q 2 -L 1 31 200
-X FMLP -Q 3 -L 1 30 125
-X FMLP -Q 3 -L 1 30 125
29 150
25 100
24 175
18 125
17 175
14 100
12 100
12 100
11 100
| [
"[email protected]"
]
| |
4ddae7c4756405d61f6100fe0848351c7b1e0d1b | 7426522061b222e8d3336b18ff941bb98ff9626c | /qtoggleserver/core/api/funcs/backup.py | bf69f7ad08788ba2632330bd4b703b90bcaa9ddb | [
"Apache-2.0"
]
| permissive | DigitEgal/qtoggleserver | 82833aaeb6f0bdad5f28243f132a639f4b406001 | 54b6ac53742af9529fd349d4fc207b0dc8a38d3b | refs/heads/dev | 2023-05-07T14:49:11.273023 | 2021-04-30T20:40:08 | 2021-04-30T20:40:08 | 360,039,836 | 0 | 0 | Apache-2.0 | 2021-04-21T05:18:08 | 2021-04-21T05:13:07 | null | UTF-8 | Python | false | false | 862 | py |
import logging
from qtoggleserver.conf import settings
from qtoggleserver.core import api as core_api
from qtoggleserver.core.typing import GenericJSONList
from qtoggleserver.system import conf as system_conf
logger = logging.getLogger(__name__)
@core_api.api_call(core_api.ACCESS_LEVEL_ADMIN)
async def get_backup_endpoints(request: core_api.APIRequest) -> GenericJSONList:
endpoints = []
if system_conf.can_write_conf_file():
endpoints.append({
'path': '/system',
'display_name': 'System Configuration',
'restore_method': 'PUT',
'order': 5
})
if settings.frontend.enabled:
endpoints.append({
'path': '/frontend',
'display_name': 'App Configuration',
'restore_method': 'PUT',
'order': 45
})
return endpoints
| [
"[email protected]"
]
| |
fc19c1f114424cc9ab63e7e87a0966cc3ab775aa | 4a48593a04284ef997f377abee8db61d6332c322 | /python/graph_and_tree/tree_structure/iterative_dfs_left_right_tree_traversal.py | 56f0871fee7900fab3ae3d614ed12845a1c9b031 | [
"MIT"
]
| permissive | jeremiedecock/snippets | 8feaed5a8d873d67932ef798e16cb6d2c47609f0 | b90a444041c42d176d096fed14852d20d19adaa7 | refs/heads/master | 2023-08-31T04:28:09.302968 | 2023-08-21T07:22:38 | 2023-08-21T07:22:38 | 36,926,494 | 26 | 9 | MIT | 2023-06-06T02:17:44 | 2015-06-05T10:19:09 | Python | UTF-8 | Python | false | false | 2,119 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2014 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Depth-first search.
"""
from node import Node as Node
#from node import GraphvizNode as Node
def walk(start_node):
"""The tree traversal function."""
stack = [start_node]
while len(stack) > 0:
# Retrive the last element
node = stack.pop()
# Do something with node value...
print(node.getValue())
# Add child node into the stack
stack.extend(reversed(node.getChildNodes()))
#print([n.getValue() for n in stack])
def test():
r"""Main function
Build the following test tree and traverse it.
1
/|\
2 3 4
/ \
5 6
Top-down (left-right) traversal should print: 1, 2, 5, 6, 3, 4.
"""
# Build the test tree
n5 = Node(5)
n6 = Node(6)
n4 = Node(4)
n3 = Node(3)
n2 = Node(2, [n5, n6])
n1 = Node(1, [n2, n3, n4])
# Traverse the tree
walk(n1)
if __name__ == '__main__':
test()
| [
"[email protected]"
]
| |
b9908c2801383dd89cbb0c3a1b75c7acaeba368a | e4cae3759a053ca88a936e87e3329aec203608db | /sdk/communication/azure-communication-identity/tests/test_communication_identity_client_async.py | 0dafc2b5b76c122df008680dc360e1d5e5d4f9fd | [
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
]
| permissive | a-santamaria/azure-sdk-for-python | c9413858747ccfcec2fbbefd50922c515cb4f634 | 9dec418ad621ac75f217e56e901f15b6624800b0 | refs/heads/master | 2022-05-19T00:01:07.604118 | 2021-02-01T22:52:25 | 2021-02-01T22:52:25 | 202,599,021 | 0 | 0 | MIT | 2019-08-15T19:22:33 | 2019-08-15T19:22:32 | null | UTF-8 | Python | false | false | 6,215 | py | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import pytest
from azure.core.credentials import AccessToken
from azure.communication.identity.aio import CommunicationIdentityClient
from azure.communication.identity._shared.utils import parse_connection_str
from azure_devtools.scenario_tests import RecordingProcessor
from devtools_testutils import ResourceGroupPreparer
from _shared.helper import URIIdentityReplacer
from _shared.asynctestcase import AsyncCommunicationTestCase
from _shared.testcase import BodyReplacerProcessor
from _shared.communication_service_preparer import CommunicationServicePreparer
from azure.identity import DefaultAzureCredential
class FakeTokenCredential(object):
def __init__(self):
self.token = AccessToken("Fake Token", 0)
def get_token(self, *args):
return self.token
class CommunicationIdentityClientTestAsync(AsyncCommunicationTestCase):
def setUp(self):
super(CommunicationIdentityClientTestAsync, self).setUp()
self.recording_processors.extend([
BodyReplacerProcessor(keys=["id", "token"]),
URIIdentityReplacer()])
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_create_user_from_managed_identity(self, connection_string):
endpoint, access_key = parse_connection_str(connection_string)
from devtools_testutils import is_live
if not is_live():
credential = FakeTokenCredential()
else:
credential = DefaultAzureCredential()
identity_client = CommunicationIdentityClient(endpoint, credential)
async with identity_client:
user = await identity_client.create_user()
assert user.identifier is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_create_user(self, connection_string):
identity_client = CommunicationIdentityClient.from_connection_string(connection_string)
async with identity_client:
user = await identity_client.create_user()
assert user.identifier is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_issue_token_from_managed_identity(self, connection_string):
endpoint, access_key = parse_connection_str(connection_string)
from devtools_testutils import is_live
if not is_live():
credential = FakeTokenCredential()
else:
credential = DefaultAzureCredential()
identity_client = CommunicationIdentityClient(endpoint, credential)
async with identity_client:
user = await identity_client.create_user()
token_response = await identity_client.issue_token(user, scopes=["chat"])
assert user.identifier is not None
assert token_response.token is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_issue_token(self, connection_string):
identity_client = CommunicationIdentityClient.from_connection_string(connection_string)
async with identity_client:
user = await identity_client.create_user()
token_response = await identity_client.issue_token(user, scopes=["chat"])
assert user.identifier is not None
assert token_response.token is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_revoke_tokens_from_managed_identity(self, connection_string):
endpoint, access_key = parse_connection_str(connection_string)
from devtools_testutils import is_live
if not is_live():
credential = FakeTokenCredential()
else:
credential = DefaultAzureCredential()
identity_client = CommunicationIdentityClient(endpoint, credential)
async with identity_client:
user = await identity_client.create_user()
token_response = await identity_client.issue_token(user, scopes=["chat"])
await identity_client.revoke_tokens(user)
assert user.identifier is not None
assert token_response.token is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_revoke_tokens(self, connection_string):
identity_client = CommunicationIdentityClient.from_connection_string(connection_string)
async with identity_client:
user = await identity_client.create_user()
token_response = await identity_client.issue_token(user, scopes=["chat"])
await identity_client.revoke_tokens(user)
assert user.identifier is not None
assert token_response.token is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_delete_user_from_managed_identity(self, connection_string):
endpoint, access_key = parse_connection_str(connection_string)
from devtools_testutils import is_live
if not is_live():
credential = FakeTokenCredential()
else:
credential = DefaultAzureCredential()
identity_client = CommunicationIdentityClient(endpoint, credential)
async with identity_client:
user = await identity_client.create_user()
await identity_client.delete_user(user)
assert user.identifier is not None
@ResourceGroupPreparer(random_name_enabled=True)
@CommunicationServicePreparer()
async def test_delete_user(self, connection_string):
identity_client = CommunicationIdentityClient.from_connection_string(connection_string)
async with identity_client:
user = await identity_client.create_user()
await identity_client.delete_user(user)
assert user.identifier is not None
| [
"[email protected]"
]
| |
702475d5c06c0afe6130323ed6491fb661057ae9 | 8d472f9facb895dda9e1df81f3bb6c2f81b9c357 | /master/bt5/slapos_wechat/SkinTemplateItem/portal_skins/slapos_wechat/PaymentTransaction_updateWechatPaymentStatus.py | faad8b2197c8500892082735e31629a8166b80c4 | []
| no_license | SlapOS/slapos.core | 852485eed9382685f3df6ba8532f8192bb1389c4 | 369e8d56636e1c59a745e68dc68154abfc5b7840 | refs/heads/master | 2023-08-31T04:42:34.722241 | 2023-08-30T15:13:08 | 2023-08-30T15:13:08 | 1,825,920 | 11 | 4 | null | null | null | null | UTF-8 | Python | false | false | 368 | py | state = context.getSimulationState()
if (state != 'started') or (context.getPaymentMode() != 'wechat'):
return "state not started (%s)" % state
else:
# ???
_, transaction_id = context.PaymentTransaction_getWechatId()
if transaction_id is not None:
# so the payment is registered in wechat
context.PaymentTransaction_createWechatEvent().updateStatus()
| [
"[email protected]"
]
| |
57a5992e8ab69bc124ae719a7a981da9a13774a3 | 8b3bc4efea5663b356acbabec231d1d647891805 | /214/Solution.py | cd011e9d4d6cb6afbfd0f914ff3c954c02a9140e | []
| no_license | FawneLu/leetcode | 9a982b97122074d3a8488adec2039b67e709af08 | 03020fb9b721a1c345e32bbe04f9b2189bfc3ac7 | refs/heads/master | 2021-06-18T20:13:34.108057 | 2021-03-03T05:14:13 | 2021-03-03T05:14:13 | 177,454,524 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 268 | py | class Solution:
def shortestPalindrome(self, s: str) -> str:
n = len(s)
t =s[::-1]
if n == 0:
return ""
for i in range(n, 0 ,-1):
if s[:i] == t[n-i:]:
break
return t[:n-i] + s | [
"[email protected]"
]
| |
b86f3709de8bb3479af18ab8d8a462010242978a | 4038af23324241fe74ef0604af395cac6da68b26 | /Hackerrank/minimum_time_required/min_time.py | a746548f3b8f028071988d16ee4c7389c30ffae3 | []
| no_license | narnat/algorithms_and_datastructures | 4fd5b0fa401d3f441c522f61eceecd4f8f17010d | f319f64f1774c92ce2a94cc6106eec68215f573b | refs/heads/master | 2020-09-29T05:38:41.099526 | 2019-12-22T21:12:53 | 2019-12-22T21:12:53 | 226,966,112 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 683 | py | #!/usr/bin/python3
import math
""" Minimum time required problem """
def minTime(machines, goal):
l_bound = math.ceil(min(machines) * goal / len(machines))
r_bound = math.ceil(max(machines) * goal / len(machines))
while l_bound < r_bound:
mid = (l_bound + r_bound) // 2
s = sum(mid // i for i in machines)
if s < goal:
l_bound = mid + 1
else:
r_bound = mid
return r_bound
ans1 = minTime([2, 3], 5)
ans2 = minTime([1, 3, 4], 10)
ans3 = minTime([2, 3, 2], 10)
ans4 = minTime([4, 5, 6], 12)
print(ans1)
print(ans2)
print(ans3)
print(ans4)
assert ans1 == 6 and ans2 == 7 and ans3 == 8 and ans4 == 20 | [
"[email protected]"
]
| |
1924cf501b9069a7dcfd9aa0c4af61bae2945b6d | 6093dca86097633d337a8a8d13c0f7513dd33db6 | /defining_classes_lecture1/LAB/02. Scope Mess.py | 0afb2155d65faddf8ed0ef0538a74e99e8c08802 | []
| no_license | tony-andreev94/Python-OOP | 7b1e862f793193aae7be5a759314b214eef7a8c6 | 26c2143da12ae93f515d9e2823c0e46e0fffbaad | refs/heads/master | 2023-01-28T10:18:25.901244 | 2020-12-10T19:40:43 | 2020-12-10T19:40:43 | 254,870,076 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | # https://judge.softuni.bg/Contests/Practice/Index/1934#1
x = "global"
def outer():
x = "local"
def inner():
nonlocal x
x = "nonlocal"
print("inner:", x)
return x
def change_global():
global x
x = "global: changed!"
print("outer:", x)
inner()
print("outer:", x)
return change_global()
print(x)
outer()
print(x)
| [
"[email protected]"
]
| |
590af1c251015ae9d7f6be2d779f44f1e6addb1c | 3fc4cac282465350d9b2983527140fc735a0d273 | /0903/12_updowntest.py | 3621b0dc8af94837fbda6ae7399450f1efa3502e | []
| no_license | Orderlee/SBA_STUDY | 2cfeea54d4a9cbfd0c425e1de56324afcc547b81 | 4642546e7546f896fc8b06e9daba25d27c29e154 | refs/heads/master | 2022-12-25T01:08:05.168970 | 2020-09-27T14:57:23 | 2020-09-27T14:57:23 | 299,050,168 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 805 | py | # 모듈을 사용하고자 할때 import 키워드 사용
import random # random 모듈 : 랜덤한 데이터를 추출하고자 할 때 사용하는 모듈
# for idx in range(1,11):
# answer = random.randint(1,100);
# print(answer)
# answer = 컴퓨터가 기억한 값
answer = random.randint(1,100)
print('정답: %d' % answer)
cnt=0 #시도 횟수
while True:
# su 우리가: 입력한 숫
su = int(input('1부터 100사이의 정수 1개 입력:'))
cnt += 1 #시도 횟수를 1증가
if answer > su:
print('%d보다 큰 수를 입력하세요.' % su)
elif answer < su:
print('%d보다 작은 수를 입력하세요.' % su)
else:
print('정답입니다.')
print('%d번만에 맞췄습니다.' % cnt)
break
print('finished')
| [
"[email protected]"
]
| |
a4e78b137f746516856f31f699789563c337e5e2 | 444a9480bce2035565332d4d4654244c0b5cd47b | /official/recommend/Wide_and_Deep_Multitable/src/config.py | 7c268bc3ca56e954a9f8d298c9e674960db089b2 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license"
]
| permissive | mindspore-ai/models | 7ede9c6454e77e995e674628204e1c6e76bd7b27 | eab643f51336dbf7d711f02d27e6516e5affee59 | refs/heads/master | 2023-07-20T01:49:34.614616 | 2023-07-17T11:43:18 | 2023-07-17T11:43:18 | 417,393,380 | 301 | 92 | Apache-2.0 | 2023-05-17T11:22:28 | 2021-10-15T06:38:37 | Python | UTF-8 | Python | false | false | 4,013 | py | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" config. """
import argparse
def argparse_init():
"""
argparse_init
"""
parser = argparse.ArgumentParser(description='WideDeep')
parser.add_argument("--data_path", type=str, default="./test_raw_data/") # The location of the input data.
parser.add_argument("--epochs", type=int, default=8) # The number of epochs used to train.
parser.add_argument("--batch_size", type=int, default=131072) # Batch size for training and evaluation
parser.add_argument("--eval_batch_size", type=int, default=131072) # The batch size used for evaluation.
parser.add_argument("--deep_layers_dim", type=int, nargs='+', default=[1024, 512, 256, 128]) # The sizes of hidden layers for MLP
parser.add_argument("--deep_layers_act", type=str, default='relu') # The act of hidden layers for MLP
parser.add_argument("--keep_prob", type=float, default=1.0) # The Embedding size of MF model.
parser.add_argument("--adam_lr", type=float, default=0.003) # The Adam lr
parser.add_argument("--ftrl_lr", type=float, default=0.1) # The ftrl lr.
parser.add_argument("--l2_coef", type=float, default=0.0) # The l2 coefficient.
parser.add_argument("--is_tf_dataset", type=int, default=1) # The l2 coefficient.
parser.add_argument("--dropout_flag", type=int, default=1) # The dropout rate
parser.add_argument("--output_path", type=str, default="./output/") # The location of the output file.
parser.add_argument("--ckpt_path", type=str, default="./") # The location of the checkpoints file.
parser.add_argument("--eval_file_name", type=str, default="eval.log") # Eval output file.
parser.add_argument("--loss_file_name", type=str, default="loss.log") # Loss output file.
return parser
class WideDeepConfig():
"""
WideDeepConfig
"""
def __init__(self):
self.data_path = ''
self.epochs = 200
self.batch_size = 131072
self.eval_batch_size = 131072
self.deep_layers_act = 'relu'
self.weight_bias_init = ['normal', 'normal']
self.emb_init = 'normal'
self.init_args = [-0.01, 0.01]
self.dropout_flag = False
self.keep_prob = 1.0
self.l2_coef = 0.0
self.adam_lr = 0.003
self.ftrl_lr = 0.1
self.is_tf_dataset = True
self.input_emb_dim = 0
self.output_path = "./output/"
self.eval_file_name = "eval.log"
self.loss_file_name = "loss.log"
self.ckpt_path = "./"
def argparse_init(self):
"""
argparse_init
"""
parser = argparse_init()
args, _ = parser.parse_known_args()
self.data_path = args.data_path
self.epochs = args.epochs
self.batch_size = args.batch_size
self.eval_batch_size = args.eval_batch_size
self.deep_layers_act = args.deep_layers_act
self.keep_prob = args.keep_prob
self.weight_bias_init = ['normal', 'normal']
self.emb_init = 'normal'
self.init_args = [-0.01, 0.01]
self.l2_coef = args.l2_coef
self.ftrl_lr = args.ftrl_lr
self.adam_lr = args.adam_lr
self.is_tf_dataset = bool(args.is_tf_dataset)
self.output_path = args.output_path
self.eval_file_name = args.eval_file_name
self.loss_file_name = args.loss_file_name
self.ckpt_path = args.ckpt_path
self.dropout_flag = bool(args.dropout_flag)
| [
"[email protected]"
]
| |
0f82bf000b4eb72c2fdb98ee606bbd51f9eb88b8 | ebbf6e6db50bc26326075b38d95e42859c48b271 | /ptpdb/__init__.py | c0341ee5c1fd4a103cc585fdd4ad10993b90d231 | []
| no_license | msabramo/ptpdb | 83b7f353325b47bc912a8b5abd07596b63c57232 | acb1345616ecd5422f029ab672023b7e74c82dfb | refs/heads/master | 2023-06-09T15:56:37.879305 | 2015-01-26T21:49:25 | 2015-01-26T21:49:25 | 30,851,304 | 0 | 0 | null | 2015-02-16T02:47:12 | 2015-02-16T02:47:12 | null | UTF-8 | Python | false | false | 9,308 | py | #!/usr/bin/env python
"""
Python debugger prompt.
Enhanced version of Pdb, using a prompt-toolkit front-end.
Usage::
from prompt_toolkit.contrib.pdb import set_trace
set_trace()
"""
from __future__ import unicode_literals, absolute_import
from pygments.lexers import PythonLexer
from prompt_toolkit import AbortAction, Exit
from prompt_toolkit.buffer import Buffer
from prompt_toolkit.contrib.regular_languages.completion import GrammarCompleter
from prompt_toolkit.contrib.regular_languages.validation import GrammarValidator
from prompt_toolkit.document import Document
from prompt_toolkit.filters import IsDone
from prompt_toolkit.history import FileHistory
from prompt_toolkit.key_binding.manager import KeyBindingManager
from prompt_toolkit.layout import HSplit, Window
from prompt_toolkit.layout.controls import BufferControl
from prompt_toolkit.completion import Completer
from prompt_toolkit.validation import Validator
from ptpython.completer import PythonCompleter
from ptpython.key_bindings import load_python_bindings
from ptpython.python_input import PythonCLISettings , PythonCommandLineInterface
from ptpython.utils import document_is_multiline_python
from ptpython.validator import PythonValidator
from .commands import commands_with_help, shortcuts
from .completers import PythonFileCompleter, PythonFunctionCompleter, BreakPointListCompleter, AliasCompleter, PdbCommandsCompleter
from .grammar import create_pdb_grammar
#from .key_bindings import load_custom_pdb_key_bindings
from .layout import PdbLeftMargin
from .toolbars import PdbShortcutsToolbar, FileLocationToolbar
from .completion_hints import CompletionHint
from .style import PdbStyle
import linecache
import os
import pdb
import sys
import weakref
__all__ = (
'PtPdb',
'set_trace',
)
class DynamicCompleter(Completer):
"""
Proxy to a real completer which we can change at runtime.
"""
def __init__(self, get_completer_func):
self.get_completer_func = get_completer_func
def get_completions(self, document, complete_event):
for c in self.get_completer_func().get_completions(document, complete_event):
yield c
class DynamicValidator(Validator):
"""
Proxy to a real validator which we can change at runtime.
"""
def __init__(self, get_validator_func):
self.get_validator_func = get_validator_func
def validate(self, document):
return self.get_validator_func().validate(document)
class PtPdb(pdb.Pdb):
def __init__(self):
pdb.Pdb.__init__(self)
# Cache for the grammar.
self._grammar_cache = None # (current_pdb_commands, grammar) tuple.
self._cli_history = FileHistory(os.path.expanduser('~/.ptpdb_history'))
self.python_cli_settings = PythonCLISettings()
self.completer = None
self.validator = None
# def is_multiline(document):
# if (self.python_cli_settings.paste_mode or
# self.python_cli_settings.currently_multiline):
# return True
# match = g.match_prefix(document.text)
# if match:
# for v in match.variables().getall('python_code'):
# if document_is_multiline_python(Document(v)):
# return True
# return False
self.cli = PythonCommandLineInterface(
style=PdbStyle,
get_locals=lambda: self.curframe.f_locals,
get_globals=lambda: self.curframe.f_globals,
_completer=DynamicCompleter(lambda: self.completer),
_validator=DynamicValidator(lambda: self.validator),
_python_prompt_control=PdbLeftMargin(self.python_cli_settings,
self._get_current_pdb_commands()),
_extra_buffers={'source_code': Buffer()},
_extra_buffer_processors=[CompletionHint()],
_extra_sidebars=[
HSplit([
FileLocationToolbar(weakref.ref(self)),
Window(
BufferControl(
buffer_name='source_code',
lexer=PythonLexer,
),
filter=~IsDone(),
),
PdbShortcutsToolbar(weakref.ref(self)),
]),
],
)
# XXX: TODO: add CompletionHint() after the input!!
# XXX: TODO: Add PDB key bindings again.
# # The key bindings manager. We reuse it between Pdb calls, in order to
# # remember vi/emacs state, etc..)
# self.key_bindings_manager = self._create_key_bindings_manager(self.python_cli_settings)
#
# def _create_key_bindings_manager(self, settings):
# key_bindings_manager = KeyBindingManager()
# load_custom_pdb_key_bindings(key_bindings_manager.registry) # XXX: implement
# load_python_bindings(key_bindings_manager, settings, None, None) # XXX: pass create tab functions
#
# return key_bindings_manager
def cmdloop(self, intro=None):
"""
Copy/Paste of pdb.Pdb.cmdloop. But using our own CommandLineInterface
for reading input instead.
"""
self.preloop()
if intro is not None:
self.intro = intro
if self.intro:
self.stdout.write(str(self.intro)+"\n")
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue.pop(0)
else:
if self.use_rawinput:
line = self._get_input()
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.postloop()
def _get_current_pdb_commands(self):
return (
list(commands_with_help.keys()) +
list(shortcuts.keys()) +
list(self.aliases.keys()))
def _create_grammar(self):
"""
Return the compiled grammar for this PDB shell.
The grammar of PDB depends on the available list of PDB commands (which
depends on the currently defined aliases.) Therefor we generate a new
grammar when it changes, but cache it otherwise. (It's still expensive
to compile.)
"""
pdb_commands = self._get_current_pdb_commands()
if self._grammar_cache is None or self._grammar_cache[0] != pdb_commands:
self._grammar_cache = [
pdb_commands,
create_pdb_grammar(pdb_commands)]
return self._grammar_cache[1]
def _get_input(self):
"""
Read PDB input. Return input text.
"""
# Reset multiline/paste mode every time.
self.python_cli_settings.paste_mode = False
self.python_cli_settings.currently_multiline = False
# Make sure not to start in Vi navigation mode.
# self.key_bindings_manager.reset()
# Set source code document.
self.cli.cli.buffers['source_code'].document = Document(self._get_source_code())
# Set up a new completer and validator for the new grammar.
g = self._create_grammar()
self.completer = GrammarCompleter(g, completers={
'enabled_breakpoint': BreakPointListCompleter(only_enabled=True),
'disabled_breakpoint': BreakPointListCompleter(only_disabled=True),
'alias_name': AliasCompleter(self),
'python_code': PythonCompleter(lambda: self.curframe.f_globals, lambda: self.curframe.f_locals),
'breakpoint': BreakPointListCompleter(),
'pdb_command': PdbCommandsCompleter(self),
'python_file': PythonFileCompleter(),
'python_function': PythonFunctionCompleter(self),
})
self.validator = GrammarValidator(g, {
'python_code': PythonValidator()
})
try:
return self.cli.cli.read_input(on_exit=AbortAction.RAISE_EXCEPTION).text
except Exit:
# Turn Control-D key press into a 'quit' command.
return 'q'
def _get_source_code(self):
"""
Return source code around current line as string.
(Partly taken from Pdb.do_list.)
"""
filename = self.curframe.f_code.co_filename
breaklist = self.get_file_breaks(filename)
first = max(1, self.curframe.f_lineno - 3)
last = first + 12 # 6
result = []
for lineno in range(first, last+1):
line = linecache.getline(filename, lineno, self.curframe.f_globals)
if not line:
line = '[EOF]'
break
else:
s = repr(lineno).rjust(3)
if len(s) < 4:
s = s + ' '
if lineno in breaklist:
s = s + 'B'
else:
s = s + ' '
if lineno == self.curframe.f_lineno:
s = s + '->'
else:
s = s + ' '
result.append(s + ' ' + line)
return ''.join(result)
def set_trace():
PtPdb().set_trace(sys._getframe().f_back)
| [
"[email protected]"
]
| |
f078e52815b8d620a3bc948a26f081c1548b83be | 20c4868b88fd20402ef4a6d589d7382122e48e26 | /python/L1PFProducer_cff.py | 315177ef15fdbf97b3a0fecf3b94d7cd6060f38f | []
| no_license | isobelojalvo/phase2Demonstrator | 7134c526da7e47c67b5a32c70fe76e561b66276c | 64c07512c01593d9e324fea33c61957d8e5fe6fd | refs/heads/master | 2021-01-20T09:27:09.928037 | 2018-07-12T12:51:16 | 2018-07-12T12:51:16 | 90,258,126 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 580 | py | import FWCore.ParameterSet.Config as cms
L1PFProducer = cms.EDProducer("PFObjectProducer",
debug = cms.untracked.bool(False),
EoH_cut = cms.untracked.int32(50),
HoE_cut = cms.untracked.int32(2),
L1Clusters = cms.InputTag("L1CaloClusterProducer","L1Phase2CaloClusters"),
L1TrackInputTag = cms.InputTag("TTTracksFromTracklet", "Level1TTTracks"),
)
| [
"[email protected]"
]
| |
3c3e9c924344cc2a78174033eb88efdd0652695c | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03665/s598490894.py | 27c1418aa65f1383e6daecca8d1c52215b4074ec | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | n, p = map(int, input().split())
A = list(map(int, input().split()))
U = n*100
dp = [0]*(U+1)
dp[0] = 1
for a in A:
for j in reversed(range(U+1)):
if 0 <= j-a:
dp[j] += dp[j-a]
print(sum(dp[p::2]))
| [
"[email protected]"
]
| |
ad70f7d8ca908f1273ae019f830e9ac792c4f170 | 1b30905742e50f8e45494c847c2bacdd43da21e7 | /src/test50_02.py | cc5f2ee905dd75bc496c56fcdfcb1f880495e42a | []
| no_license | choijaehoon1/baekjoon_workbook | e57e30f84fafa3ffcd8da9a2238260eab29f7d9f | 26966e9fc814c2099408a6b96906522f432aa602 | refs/heads/main | 2023-07-20T18:54:36.727559 | 2021-08-20T14:39:54 | 2021-08-20T14:39:54 | 352,350,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,745 | py | from collections import deque
import sys
def island(x,y,cnt):
visit[x][y] = 1
q = deque()
q.append([x,y])
new_board[x][y] = cnt
while q:
x,y = q.popleft()
for k in range(4):
nx = x + dx[k]
ny = y + dy[k]
if 0<=nx<N and 0<=ny<N:
if visit[nx][ny] == 0 and board[nx][ny] == 1:
visit[nx][ny] = 1
new_board[nx][ny] = cnt
q.append([nx,ny])
def bfs(num):
while q:
x,y = q.popleft()
for k in range(4):
nx = x + dx[k]
ny = y + dy[k]
if 0<=nx<N and 0<=ny<N:
if new_board[nx][ny] != num and board[nx][ny] == 1:
return dist[x][y] # 가장 먼저 접하게 섬에 다른 섬에 도착했을 때 리턴 됨
if dist[nx][ny] == -1 and board[nx][ny] == 0:
dist[nx][ny] = dist[x][y] + 1
q.append([nx,ny])
dx = [-1,1,0,0]
dy = [0,0,-1,1]
N = int(sys.stdin.readline().rstrip())
new_board = [[0]*N for _ in range(N)]
visit = [[0]*N for _ in range(N)]
board = []
for i in range(N):
board.append(list(map(int,sys.stdin.readline().rstrip().split())))
cnt = 0
for i in range(N):
for j in range(N):
if board[i][j] == 1 and visit[i][j] == 0:
cnt += 1
island(i,j,cnt)
# print(new_board)
answer = int(1e9)
for k in range(1,cnt+1):
dist = [[-1]*N for _ in range(N)]
q = deque()
for i in range(N):
for j in range(N):
if new_board[i][j] == k and board[i][j] == 1:
q.append([i,j])
dist[i][j] = 0
tmp = bfs(k)
answer = min(tmp,answer)
print(answer)
| [
"[email protected]"
]
| |
e548e0467e9821fefcefe8959ce76648d0c8d5b6 | 578db86c51d44ebddd0dc7b1738985b3dc69eb74 | /corehq/apps/sms/migrations/0034_auto_20191007_0756_noop.py | 2022476a223562183c944ac937d5515939d06c95 | [
"BSD-3-Clause"
]
| permissive | dimagi/commcare-hq | a43c7dd32b5f89c89fd5aa1b1359ab7301f4ff6b | e7391ddae1af1dbf118211ecb52c83fc508aa656 | refs/heads/master | 2023-08-16T22:38:27.853437 | 2023-08-16T19:07:19 | 2023-08-16T19:07:19 | 247,278 | 499 | 203 | BSD-3-Clause | 2023-09-14T19:03:24 | 2009-07-09T17:00:07 | Python | UTF-8 | Python | false | false | 3,062 | py | from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sms', '0033_starfishbackend'),
]
operations = [
migrations.AlterField(
model_name='messagingevent',
name='content_type',
field=models.CharField(choices=[('NOP', 'None'), ('SMS', 'SMS Message'), ('CBK', 'SMS Expecting Callback'), ('SVY', 'SMS Survey'), ('IVR', 'IVR Survey'), ('VER', 'Phone Verification'), ('ADH', 'Manually Sent Message'), ('API', 'Message Sent Via API'), ('CHT', 'Message Sent Via Chat'), ('EML', 'Email')], max_length=3),
),
migrations.AlterField(
model_name='messagingevent',
name='recipient_type',
field=models.CharField(choices=[('CAS', 'Case'), ('MOB', 'Mobile Worker'), ('WEB', 'Web User'), ('UGP', 'User Group'), ('CGP', 'Case Group'), ('MUL', 'Multiple Recipients'), ('LOC', 'Location'), ('LC+', 'Location (including child locations)'), ('VLC', 'Multiple Locations'), ('VL+', 'Multiple Locations (including child locations)'), ('UNK', 'Unknown Contact')], db_index=True, max_length=3, null=True),
),
migrations.AlterField(
model_name='messagingevent',
name='status',
field=models.CharField(choices=[('PRG', 'In Progress'), ('CMP', 'Completed'), ('NOT', 'Not Completed'), ('ERR', 'Error')], max_length=3),
),
migrations.AlterField(
model_name='messagingsubevent',
name='content_type',
field=models.CharField(choices=[('NOP', 'None'), ('SMS', 'SMS Message'), ('CBK', 'SMS Expecting Callback'), ('SVY', 'SMS Survey'), ('IVR', 'IVR Survey'), ('VER', 'Phone Verification'), ('ADH', 'Manually Sent Message'), ('API', 'Message Sent Via API'), ('CHT', 'Message Sent Via Chat'), ('EML', 'Email')], max_length=3),
),
migrations.AlterField(
model_name='messagingsubevent',
name='recipient_type',
field=models.CharField(choices=[('CAS', 'Case'), ('MOB', 'Mobile Worker'), ('WEB', 'Web User')], max_length=3),
),
migrations.AlterField(
model_name='messagingsubevent',
name='status',
field=models.CharField(choices=[('PRG', 'In Progress'), ('CMP', 'Completed'), ('NOT', 'Not Completed'), ('ERR', 'Error')], max_length=3),
),
migrations.AlterField(
model_name='selfregistrationinvitation',
name='phone_type',
field=models.CharField(choices=[('android', 'Android'), ('other', 'Other')], max_length=20, null=True),
),
migrations.AlterField(
model_name='sqlmobilebackend',
name='backend_type',
field=models.CharField(choices=[('SMS', 'SMS'), ('IVR', 'IVR')], default='SMS', max_length=3),
),
migrations.AlterField(
model_name='sqlmobilebackendmapping',
name='backend_type',
field=models.CharField(choices=[('SMS', 'SMS'), ('IVR', 'IVR')], max_length=3),
),
]
| [
"[email protected]"
]
| |
d74903f5c33364fcf7a60715f24a8920190c6ec7 | d3efc82dfa61fb82e47c82d52c838b38b076084c | /Autocase_Result/SjShObligationPut/YW_GGQQ_YWFSJHA_GU_082.py | 3a8a856158287639128a7453120a01d573d4397e | []
| no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,927 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
import json
sys.path.append("/home/yhl2/workspace/xtp_test/xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test/option/service")
from OptMainService import *
from OptQueryStkPriceQty import *
sys.path.append("/home/yhl2/workspace/xtp_test/service")
from log import *
from CaseParmInsertMysql import *
sys.path.append("/home/yhl2/workspace/xtp_test/option/mysql")
from Opt_SqlData_Transfer import *
sys.path.append("/home/yhl2/workspace/xtp_test/mysql")
from QueryOrderErrorMsg import queryOrderErrorMsg
sys.path.append("/home/yhl2/workspace/xtp_test/utils")
from env_restart import *
reload(sys)
sys.setdefaultencoding('utf-8')
class YW_GGQQ_YWFSJHA_GU_082(xtp_test_case):
def setUp(self):
sql_transfer = Opt_SqlData_Transfer()
sql_transfer.transfer_fund_asset('YW_GGQQ_YWFSJHA_GU_082')
clear_data_and_restart_sh()
Api.trade.Logout()
Api.trade.Login()
def test_YW_GGQQ_YWFSJHA_GU_082(self):
title = '买平(义务方平仓):市价剩余转限价-验资(可用资金<0且下单导致可用资金减少)'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '废单',
'errorID': 11010120,
'errorMSG': queryOrderErrorMsg(11010120),
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('11002397', '1', '*', '1', '0', 'P', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
logger.error('查询结果为False,错误原因: {0}'.format(
json.dumps(rs['测试错误原因'], encoding='UTF-8', ensure_ascii=False)))
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type':Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_OPTION'],
'order_client_id':1,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_BUY'],
'position_effect':Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_CLOSE'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_REVERSE_BEST_LIMIT'],
'price': stkparm['涨停价'],
'quantity': 1
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = serviceTest(Api, case_goal, wt_reqs)
if rs['用例测试结果']:
logger.warning('执行结果为{0}'.format(str(rs['用例测试结果'])))
else:
logger.warning('执行结果为{0},{1},{2}'.format(
str(rs['用例测试结果']), str(rs['用例错误源']),
json.dumps(rs['用例错误原因'], encoding='UTF-8', ensure_ascii=False)))
self.assertEqual(rs['用例测试结果'], True) # 4
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
4fcee9011b31911038eb69bd7ab3d6f7f2911743 | 6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386 | /google/cloud/gaming/v1beta/gaming-v1beta-py/google/cloud/gaming_v1beta/services/game_server_clusters_service/transports/grpc_asyncio.py | e5aa514582076b0bea2b051bf03da0b7d8f279aa | [
"Apache-2.0"
]
| permissive | oltoco/googleapis-gen | bf40cfad61b4217aca07068bd4922a86e3bbd2d5 | 00ca50bdde80906d6f62314ef4f7630b8cdb6e15 | refs/heads/master | 2023-07-17T22:11:47.848185 | 2021-08-29T20:39:47 | 2021-08-29T20:39:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,342 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.gaming_v1beta.types import game_server_clusters
from google.longrunning import operations_pb2 # type: ignore
from .base import GameServerClustersServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import GameServerClustersServiceGrpcTransport
class GameServerClustersServiceGrpcAsyncIOTransport(GameServerClustersServiceTransport):
"""gRPC AsyncIO backend transport for GameServerClustersService.
The game server cluster maps to Kubernetes clusters running
Agones and is used to manage fleets within clusters.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(cls,
host: str = 'gameservices.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
def __init__(self, *,
host: str = 'gameservices.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Sanity check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
return self._operations_client
@property
def list_game_server_clusters(self) -> Callable[
[game_server_clusters.ListGameServerClustersRequest],
Awaitable[game_server_clusters.ListGameServerClustersResponse]]:
r"""Return a callable for the list game server clusters method over gRPC.
Lists game server clusters in a given project and
location.
Returns:
Callable[[~.ListGameServerClustersRequest],
Awaitable[~.ListGameServerClustersResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_game_server_clusters' not in self._stubs:
self._stubs['list_game_server_clusters'] = self.grpc_channel.unary_unary(
'/google.cloud.gaming.v1beta.GameServerClustersService/ListGameServerClusters',
request_serializer=game_server_clusters.ListGameServerClustersRequest.serialize,
response_deserializer=game_server_clusters.ListGameServerClustersResponse.deserialize,
)
return self._stubs['list_game_server_clusters']
@property
def get_game_server_cluster(self) -> Callable[
[game_server_clusters.GetGameServerClusterRequest],
Awaitable[game_server_clusters.GameServerCluster]]:
r"""Return a callable for the get game server cluster method over gRPC.
Gets details of a single game server cluster.
Returns:
Callable[[~.GetGameServerClusterRequest],
Awaitable[~.GameServerCluster]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_game_server_cluster' not in self._stubs:
self._stubs['get_game_server_cluster'] = self.grpc_channel.unary_unary(
'/google.cloud.gaming.v1beta.GameServerClustersService/GetGameServerCluster',
request_serializer=game_server_clusters.GetGameServerClusterRequest.serialize,
response_deserializer=game_server_clusters.GameServerCluster.deserialize,
)
return self._stubs['get_game_server_cluster']
@property
def create_game_server_cluster(self) -> Callable[
[game_server_clusters.CreateGameServerClusterRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the create game server cluster method over gRPC.
Creates a new game server cluster in a given project
and location.
Returns:
Callable[[~.CreateGameServerClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_game_server_cluster' not in self._stubs:
self._stubs['create_game_server_cluster'] = self.grpc_channel.unary_unary(
'/google.cloud.gaming.v1beta.GameServerClustersService/CreateGameServerCluster',
request_serializer=game_server_clusters.CreateGameServerClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['create_game_server_cluster']
@property
def preview_create_game_server_cluster(self) -> Callable[
[game_server_clusters.PreviewCreateGameServerClusterRequest],
Awaitable[game_server_clusters.PreviewCreateGameServerClusterResponse]]:
r"""Return a callable for the preview create game server
cluster method over gRPC.
Previews creation of a new game server cluster in a
given project and location.
Returns:
Callable[[~.PreviewCreateGameServerClusterRequest],
Awaitable[~.PreviewCreateGameServerClusterResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'preview_create_game_server_cluster' not in self._stubs:
self._stubs['preview_create_game_server_cluster'] = self.grpc_channel.unary_unary(
'/google.cloud.gaming.v1beta.GameServerClustersService/PreviewCreateGameServerCluster',
request_serializer=game_server_clusters.PreviewCreateGameServerClusterRequest.serialize,
response_deserializer=game_server_clusters.PreviewCreateGameServerClusterResponse.deserialize,
)
return self._stubs['preview_create_game_server_cluster']
@property
def delete_game_server_cluster(self) -> Callable[
[game_server_clusters.DeleteGameServerClusterRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the delete game server cluster method over gRPC.
Deletes a single game server cluster.
Returns:
Callable[[~.DeleteGameServerClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_game_server_cluster' not in self._stubs:
self._stubs['delete_game_server_cluster'] = self.grpc_channel.unary_unary(
'/google.cloud.gaming.v1beta.GameServerClustersService/DeleteGameServerCluster',
request_serializer=game_server_clusters.DeleteGameServerClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['delete_game_server_cluster']
@property
def preview_delete_game_server_cluster(self) -> Callable[
[game_server_clusters.PreviewDeleteGameServerClusterRequest],
Awaitable[game_server_clusters.PreviewDeleteGameServerClusterResponse]]:
r"""Return a callable for the preview delete game server
cluster method over gRPC.
Previews deletion of a single game server cluster.
Returns:
Callable[[~.PreviewDeleteGameServerClusterRequest],
Awaitable[~.PreviewDeleteGameServerClusterResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'preview_delete_game_server_cluster' not in self._stubs:
self._stubs['preview_delete_game_server_cluster'] = self.grpc_channel.unary_unary(
'/google.cloud.gaming.v1beta.GameServerClustersService/PreviewDeleteGameServerCluster',
request_serializer=game_server_clusters.PreviewDeleteGameServerClusterRequest.serialize,
response_deserializer=game_server_clusters.PreviewDeleteGameServerClusterResponse.deserialize,
)
return self._stubs['preview_delete_game_server_cluster']
@property
def update_game_server_cluster(self) -> Callable[
[game_server_clusters.UpdateGameServerClusterRequest],
Awaitable[operations_pb2.Operation]]:
r"""Return a callable for the update game server cluster method over gRPC.
Patches a single game server cluster.
Returns:
Callable[[~.UpdateGameServerClusterRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_game_server_cluster' not in self._stubs:
self._stubs['update_game_server_cluster'] = self.grpc_channel.unary_unary(
'/google.cloud.gaming.v1beta.GameServerClustersService/UpdateGameServerCluster',
request_serializer=game_server_clusters.UpdateGameServerClusterRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs['update_game_server_cluster']
@property
def preview_update_game_server_cluster(self) -> Callable[
[game_server_clusters.PreviewUpdateGameServerClusterRequest],
Awaitable[game_server_clusters.PreviewUpdateGameServerClusterResponse]]:
r"""Return a callable for the preview update game server
cluster method over gRPC.
Previews updating a GameServerCluster.
Returns:
Callable[[~.PreviewUpdateGameServerClusterRequest],
Awaitable[~.PreviewUpdateGameServerClusterResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'preview_update_game_server_cluster' not in self._stubs:
self._stubs['preview_update_game_server_cluster'] = self.grpc_channel.unary_unary(
'/google.cloud.gaming.v1beta.GameServerClustersService/PreviewUpdateGameServerCluster',
request_serializer=game_server_clusters.PreviewUpdateGameServerClusterRequest.serialize,
response_deserializer=game_server_clusters.PreviewUpdateGameServerClusterResponse.deserialize,
)
return self._stubs['preview_update_game_server_cluster']
__all__ = (
'GameServerClustersServiceGrpcAsyncIOTransport',
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
]
| bazel-bot-development[bot]@users.noreply.github.com |
6b9fc9a69050a9b0c6db9f8b90649613a52e8654 | 077c91b9d5cb1a6a724da47067483c622ce64be6 | /fuzz_pyretic_mesh_proactive_firewall_no_close_check_loop_mcs/interreplay_53_l_6/openflow_replay_config.py | 451f2b5836e94a2683df01ee2a01405c012fc4bd | []
| no_license | Spencerx/experiments | 0edd16398725f6fd9365ddbb1b773942e4878369 | aaa98b0f67b0d0c0c826b8a1565916bf97ae3179 | refs/heads/master | 2020-04-03T10:11:40.671606 | 2014-06-11T23:55:11 | 2014-06-11T23:55:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,550 | py |
from config.experiment_config_lib import ControllerConfig
from sts.topology import *
from sts.control_flow import OpenFlowReplayer
from sts.simulation_state import SimulationConfig
from sts.input_traces.input_logger import InputLogger
simulation_config = SimulationConfig(controller_configs=[ControllerConfig(start_cmd='./pyretic.py -m p0 pyretic.examples.firewall_for_sts_no_close', label='c1', address='127.0.0.1', cwd='../pyretic', kill_cmd='ps aux | grep -e pox -e pyretic | grep -v simulator | cut -c 9-15 | xargs kill -9')],
topology_class=MeshTopology,
topology_params="num_switches=3",
patch_panel_class=BufferedPatchPanel,
multiplex_sockets=False,
kill_controllers_on_exit=True)
control_flow = OpenFlowReplayer(simulation_config, "experiments/fuzz_pyretic_mesh_proactive_firewall_no_close_check_loop_mcs/interreplay_53_l_6/events.trace")
# wait_on_deterministic_values=False
# delay_flow_mods=False
# Invariant check: 'InvariantChecker.python_check_loops'
# Bug signature: '{'hs_history': [(x^L) - ([]), (dl_vlan:65535,dl_vlan_pcp:0,dl_type:2054,nw_src:123.123.1.3/32,nw_dst:123.123.3.3/32) - ([]), (dl_vlan:65535,dl_vlan_pcp:0,dl_type:2054,nw_src:123.123.1.3/32,nw_dst:123.123.3.3/32) - ([]), (dl_vlan:65535,dl_vlan_pcp:0,dl_type:2054,nw_src:123.123.1.3/32,nw_dst:123.123.3.3/32) - ([])], 'hdr': (dl_vlan:65535,dl_vlan_pcp:0,dl_type:2054,nw_src:123.123.1.3/32,nw_dst:123.123.3.3/32) - ([]), 'visits': [100004, 200002, 300001, 100001], 'port': 200002}'
| [
"[email protected]"
]
| |
f697f4f21e50a268bb9b96f4632268b6cd769f87 | a36eb4685fd050c8e1ecb4a333470724bd76df60 | /Leetcode/Jul20/260720/q3/q3.py | 1f406db0251d7cdda7716651a8d622110954fbbd | []
| no_license | phibzy/Contests | c9cff976909234cfafc51db9d9dde01c26123168 | 24aac4c81f34916945be03ed0b7c916dae4dbbb4 | refs/heads/master | 2023-01-20T06:23:06.837937 | 2020-11-30T06:54:58 | 2020-11-30T06:54:58 | 265,750,701 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | #!/usr/bin/python3
"""
@author : Chris Phibbs
@created : Sunday Jul 26, 2020 13:14:31 AEST
@file : q3
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def countPairs(self, root, distance):
pass
| [
"[email protected]"
]
| |
1d71e6128516fc6148c21ca11b9959d94edff31c | 1a166165ab8287d01cbb377a13efdb5eff5dfef0 | /sdk/communication/azure-communication-phonenumbers/test/_shared/testcase.py | fa09dc67deb019afa1bced230916cbee2dc65c51 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
]
| permissive | manoj0806/azure-sdk-for-python | 7a14b202ff80f528abd068bf50334e91001a9686 | aab999792db1132232b2f297c76800590a901142 | refs/heads/master | 2023-04-19T16:11:31.984930 | 2021-04-29T23:19:49 | 2021-04-29T23:19:49 | 363,025,016 | 1 | 0 | MIT | 2021-04-30T04:23:35 | 2021-04-30T04:23:35 | null | UTF-8 | Python | false | false | 3,389 | py |
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import re
import os
from devtools_testutils import AzureTestCase
from azure_devtools.scenario_tests import RecordingProcessor, ReplayableTest
from azure_devtools.scenario_tests.utilities import is_text_payload
from azure.communication.phonenumbers._shared.utils import parse_connection_str
class ResponseReplacerProcessor(RecordingProcessor):
def __init__(self, keys=None, replacement="sanitized"):
self._keys = keys if keys else []
self._replacement = replacement
def process_response(self, response):
import json
try:
body = json.loads(response['body']['string'])
if 'phoneNumbers' in body:
for item in body["phoneNumbers"]:
if isinstance(item, str):
body["phoneNumbers"] = [self._replacement]
break
if "phoneNumber" in item:
item['phoneNumber'] = self._replacement
if "id" in item:
item['id'] = self._replacement
response['body']['string'] = json.dumps(body)
response['url'] = self._replacement
return response
except (KeyError, ValueError, TypeError):
return response
class BodyReplacerProcessor(RecordingProcessor):
"""Sanitize the sensitive info inside request or response bodies"""
def __init__(self, keys=None, replacement="sanitized"):
self._replacement = replacement
self._keys = keys if keys else []
def process_request(self, request):
if is_text_payload(request) and request.body:
request.body = self._replace_keys(request.body.decode()).encode()
return request
def process_response(self, response):
if is_text_payload(response) and response['body']['string']:
response['body']['string'] = self._replace_keys(response['body']['string'])
return response
def _replace_keys(self, body):
import json
try:
body = json.loads(body)
for key in self._keys:
if key in body:
body[key] = self._replacement
except (KeyError, ValueError):
return body
return json.dumps(body)
class CommunicationTestCase(AzureTestCase):
FILTER_HEADERS = ReplayableTest.FILTER_HEADERS + ['x-azure-ref', 'x-ms-content-sha256', 'location']
def __init__(self, method_name, *args, **kwargs):
super(CommunicationTestCase, self).__init__(method_name, *args, **kwargs)
def setUp(self):
super(CommunicationTestCase, self).setUp()
if self.is_playback():
self.connection_str = "endpoint=https://sanitized.communication.azure.com/;accesskey=fake==="
else:
self.connection_str = os.getenv('AZURE_COMMUNICATION_SERVICE_CONNECTION_STRING')
endpoint, _ = parse_connection_str(self.connection_str)
self._resource_name = endpoint.split(".")[0]
self.scrubber.register_name_pair(self._resource_name, "sanitized") | [
"[email protected]"
]
| |
5bcf0dda20970a9d1a8e0b883d785cd389b0b7f1 | b853c16efafa74a9e1cb076008a17c9d85389fca | /HOME/笔记/待整理笔记/线程/12.23-t/alarm.py | f1cf5c62caf9f6032bd2f21d19cba926cd7085c8 | []
| no_license | Jason0221/backup | 14c48f1adb871b915d6f0ba49a26396e7cf0cd64 | dfd54cbcf7c27b0df6249104747e9a7ceffcb392 | refs/heads/master | 2020-06-03T13:14:39.751679 | 2017-05-15T08:50:38 | 2017-05-15T08:50:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | #!/usr/bin/python
import signal
import time
signal.alarm(5)
time.sleep(3)
num = signal.alarm(4)
print num
#signal.pause()
while True:
time.sleep(1)
print "wait....."
| [
"[email protected]"
]
| |
566258060889cb72a0fb1766a1d2280c6a668f14 | 3f1ba75a78568754f221988e69c17df20d69aa8d | /day07/03-函数嵌套.py | 2ebe517f1f2fe89e1c2c4612d6eee9c92c05275e | []
| no_license | itkasumy/LNHPython | 8d2a961c6446923cebc4e4bb99ed4631a90cf3d5 | 4a5c0e7991e167b0406c1e56bae73899dd90390b | refs/heads/master | 2020-04-15T10:16:51.981833 | 2019-01-25T08:34:07 | 2019-01-25T08:34:07 | 164,589,222 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | def father(name):
print('from father %s' %name)
def son():
print('from son')
def grandson():
print('from grandson...')
grandson()
son()
father('ksm')
| [
"[email protected]"
]
| |
14bf7a46bf9a6a287a37ba5287a064e071cc029e | aaa204ad7f134b526593c785eaa739bff9fc4d2a | /tests/cli/commands/test_kubernetes_command.py | 8a0045f27d86d7adcb69635d21d26a3171d916f3 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
]
| permissive | cfei18/incubator-airflow | 913b40efa3d9f1fdfc5e299ce2693492c9a92dd4 | ffb2078eb5546420864229cdc6ee361f89cab7bd | refs/heads/master | 2022-09-28T14:44:04.250367 | 2022-09-19T16:50:23 | 2022-09-19T16:50:23 | 88,665,367 | 0 | 1 | Apache-2.0 | 2021-02-05T16:29:42 | 2017-04-18T20:00:03 | Python | UTF-8 | Python | false | false | 11,834 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import os
import tempfile
import unittest
from unittest import mock
from unittest.mock import MagicMock, call
import kubernetes
from dateutil.parser import parse
from airflow.cli import cli_parser
from airflow.cli.commands import kubernetes_command
class TestGenerateDagYamlCommand(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.parser = cli_parser.get_parser()
def test_generate_dag_yaml(self):
with tempfile.TemporaryDirectory("airflow_dry_run_test/") as directory:
file_name = "miscellaneous_test_dag_run_after_loop_2020-11-03T00_00_00_plus_00_00.yml"
kubernetes_command.generate_pod_yaml(
self.parser.parse_args(
[
'kubernetes',
'generate-dag-yaml',
'miscellaneous_test_dag',
"2020-11-03",
"--output-path",
directory,
]
)
)
assert len(os.listdir(directory)) == 1
out_dir = directory + "/airflow_yaml_output/"
assert len(os.listdir(out_dir)) == 6
assert os.path.isfile(out_dir + file_name)
assert os.stat(out_dir + file_name).st_size > 0
class TestCleanUpPodsCommand(unittest.TestCase):
label_selector = ','.join(['dag_id', 'task_id', 'try_number', 'airflow_version'])
@classmethod
def setUpClass(cls):
cls.parser = cli_parser.get_parser()
@mock.patch('kubernetes.client.CoreV1Api.delete_namespaced_pod')
def test_delete_pod(self, delete_namespaced_pod):
kubernetes_command._delete_pod('dummy', 'awesome-namespace')
delete_namespaced_pod.assert_called_with(body=mock.ANY, name='dummy', namespace='awesome-namespace')
@mock.patch('airflow.cli.commands.kubernetes_command._delete_pod')
@mock.patch('kubernetes.client.CoreV1Api.list_namespaced_pod')
@mock.patch('airflow.kubernetes.kube_client.config.load_incluster_config')
def test_running_pods_are_not_cleaned(self, load_incluster_config, list_namespaced_pod, delete_pod):
pod1 = MagicMock()
pod1.metadata.name = 'dummy'
pod1.metadata.creation_timestamp = parse("2021-12-20T08:01:07Z")
pod1.status.phase = 'Running'
pod1.status.reason = None
pods = MagicMock()
pods.metadata._continue = None
pods.items = [pod1]
list_namespaced_pod.return_value = pods
kubernetes_command.cleanup_pods(
self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace'])
)
list_namespaced_pod.assert_called_once_with(
namespace='awesome-namespace', limit=500, label_selector=self.label_selector
)
delete_pod.assert_not_called()
load_incluster_config.assert_called_once()
@mock.patch('airflow.cli.commands.kubernetes_command._delete_pod')
@mock.patch('kubernetes.client.CoreV1Api.list_namespaced_pod')
@mock.patch('airflow.kubernetes.kube_client.config.load_incluster_config')
def test_cleanup_succeeded_pods(self, load_incluster_config, list_namespaced_pod, delete_pod):
pod1 = MagicMock()
pod1.metadata.name = 'dummy'
pod1.metadata.creation_timestamp = parse("2021-12-20T08:01:07Z")
pod1.status.phase = 'Succeeded'
pod1.status.reason = None
pods = MagicMock()
pods.metadata._continue = None
pods.items = [pod1]
list_namespaced_pod.return_value = pods
kubernetes_command.cleanup_pods(
self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace'])
)
list_namespaced_pod.assert_called_once_with(
namespace='awesome-namespace', limit=500, label_selector=self.label_selector
)
delete_pod.assert_called_with('dummy', 'awesome-namespace')
load_incluster_config.assert_called_once()
@mock.patch('airflow.cli.commands.kubernetes_command._delete_pod')
@mock.patch('kubernetes.client.CoreV1Api.list_namespaced_pod')
@mock.patch('kubernetes.config.load_incluster_config')
def test_no_cleanup_failed_pods_wo_restart_policy_never(
self, load_incluster_config, list_namespaced_pod, delete_pod
):
pod1 = MagicMock()
pod1.metadata.name = 'dummy2'
pod1.metadata.creation_timestamp = parse("2021-12-20T08:01:07Z")
pod1.status.phase = 'Failed'
pod1.status.reason = None
pod1.spec.restart_policy = 'Always'
pods = MagicMock()
pods.metadata._continue = None
pods.items = [pod1]
list_namespaced_pod.return_value = pods
kubernetes_command.cleanup_pods(
self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace'])
)
list_namespaced_pod.assert_called_once_with(
namespace='awesome-namespace', limit=500, label_selector=self.label_selector
)
delete_pod.assert_not_called()
load_incluster_config.assert_called_once()
@mock.patch('airflow.cli.commands.kubernetes_command._delete_pod')
@mock.patch('kubernetes.client.CoreV1Api.list_namespaced_pod')
@mock.patch('kubernetes.config.load_incluster_config')
def test_cleanup_failed_pods_w_restart_policy_never(
self, load_incluster_config, list_namespaced_pod, delete_pod
):
pod1 = MagicMock()
pod1.metadata.name = 'dummy3'
pod1.metadata.creation_timestamp = parse("2021-12-20T08:01:07Z")
pod1.status.phase = 'Failed'
pod1.status.reason = None
pod1.spec.restart_policy = 'Never'
pods = MagicMock()
pods.metadata._continue = None
pods.items = [pod1]
list_namespaced_pod.return_value = pods
kubernetes_command.cleanup_pods(
self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace'])
)
list_namespaced_pod.assert_called_once_with(
namespace='awesome-namespace', limit=500, label_selector=self.label_selector
)
delete_pod.assert_called_with('dummy3', 'awesome-namespace')
load_incluster_config.assert_called_once()
@mock.patch('airflow.cli.commands.kubernetes_command._delete_pod')
@mock.patch('kubernetes.client.CoreV1Api.list_namespaced_pod')
@mock.patch('kubernetes.config.load_incluster_config')
def test_cleanup_evicted_pods(self, load_incluster_config, list_namespaced_pod, delete_pod):
pod1 = MagicMock()
pod1.metadata.name = 'dummy4'
pod1.metadata.creation_timestamp = parse("2021-12-20T08:01:07Z")
pod1.status.phase = 'Failed'
pod1.status.reason = 'Evicted'
pod1.spec.restart_policy = 'Never'
pods = MagicMock()
pods.metadata._continue = None
pods.items = [pod1]
list_namespaced_pod.return_value = pods
kubernetes_command.cleanup_pods(
self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace'])
)
list_namespaced_pod.assert_called_once_with(
namespace='awesome-namespace', limit=500, label_selector=self.label_selector
)
delete_pod.assert_called_with('dummy4', 'awesome-namespace')
load_incluster_config.assert_called_once()
@mock.patch('airflow.cli.commands.kubernetes_command._delete_pod')
@mock.patch('kubernetes.client.CoreV1Api.list_namespaced_pod')
@mock.patch('kubernetes.config.load_incluster_config')
def test_cleanup_pending_pods(self, load_incluster_config, list_namespaced_pod, delete_pod):
pod1 = MagicMock()
pod1.metadata.name = 'dummy5'
pod1.metadata.creation_timestamp = parse("2021-12-20T08:01:07Z")
pod1.status.phase = 'Pending'
pod1.status.reason = 'Unschedulable'
pods = MagicMock()
pods.metadata._continue = None
pods.items = [pod1]
list_namespaced_pod.return_value = pods
kubernetes_command.cleanup_pods(
self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace'])
)
list_namespaced_pod.assert_called_once_with(
namespace='awesome-namespace', limit=500, label_selector=self.label_selector
)
delete_pod.assert_called_with('dummy5', 'awesome-namespace')
load_incluster_config.assert_called_once()
@mock.patch('airflow.cli.commands.kubernetes_command._delete_pod')
@mock.patch('kubernetes.client.CoreV1Api.list_namespaced_pod')
@mock.patch('kubernetes.config.load_incluster_config')
def test_cleanup_api_exception_continue(self, load_incluster_config, list_namespaced_pod, delete_pod):
delete_pod.side_effect = kubernetes.client.rest.ApiException(status=0)
pod1 = MagicMock()
pod1.metadata.name = 'dummy'
pod1.metadata.creation_timestamp = parse("2021-12-20T08:01:07Z")
pod1.status.phase = 'Succeeded'
pod1.status.reason = None
pods = MagicMock()
pods.metadata._continue = None
pods.items = [pod1]
list_namespaced_pod.return_value = pods
kubernetes_command.cleanup_pods(
self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace'])
)
list_namespaced_pod.assert_called_once_with(
namespace='awesome-namespace', limit=500, label_selector=self.label_selector
)
load_incluster_config.assert_called_once()
@mock.patch('airflow.cli.commands.kubernetes_command._delete_pod')
@mock.patch('kubernetes.client.CoreV1Api.list_namespaced_pod')
@mock.patch('kubernetes.config.load_incluster_config')
def test_list_pod_with_continue_token(self, load_incluster_config, list_namespaced_pod, delete_pod):
pod1 = MagicMock()
pod1.metadata.name = 'dummy'
pod1.metadata.creation_timestamp = parse("2021-12-20T08:01:07Z")
pod1.status.phase = 'Succeeded'
pod1.status.reason = None
pods = MagicMock()
pods.metadata._continue = 'dummy-token'
pods.items = [pod1]
next_pods = MagicMock()
next_pods.metadata._continue = None
next_pods.items = [pod1]
list_namespaced_pod.side_effect = [pods, next_pods]
kubernetes_command.cleanup_pods(
self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace'])
)
calls = [
call.first(namespace='awesome-namespace', limit=500, label_selector=self.label_selector),
call.second(
namespace='awesome-namespace',
limit=500,
label_selector=self.label_selector,
_continue='dummy-token',
),
]
list_namespaced_pod.assert_has_calls(calls)
delete_pod.assert_called_with('dummy', 'awesome-namespace')
load_incluster_config.assert_called_once()
| [
"[email protected]"
]
| |
74de8906c049b86432a83972f2d2a1cd447e69ad | 835db5ec0fc127df1de58a9a3af4a869a1a7cd84 | /assignments/functions/every_other_chr.py | 5720eb6ef06ff3a5f6025e13d3a57aff21b84fc7 | []
| no_license | thorhilduranna/2020-3-T-111-PROG | 3ba097e1b54d68bdd6efbf1d7f90911a9336fa5a | c9758b61256aa6e39a3308e576c8ad0bf2b6d027 | refs/heads/master | 2023-02-09T23:39:22.879653 | 2021-01-07T12:59:19 | 2021-01-07T12:59:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 228 | py | def every_other_chr(a_str):
'''Returns a new string containing every other character in a_str.'''
return a_str[::2]
input_str = input("Enter a string: ")
print("Every other character:", every_other_chr(input_str))
| [
"[email protected]"
]
| |
37f2ba7809ae2039b8567f6cd6be0f8f8ebe2447 | 88eeba6df8382687f36a4765bb298f76465c8e81 | /general/chainerrl/chainerrl/chainerrl/q_function.py | b47c78e34912c52abd9195222df91db7c7c75e7c | [
"MIT"
]
| permissive | daniellawson9999/quick_start | db0b6e382efd640754ca1e7800753c94e668423a | 947d61f118433dcd4cb845f27649ebfbc8062ecc | refs/heads/master | 2022-02-23T21:54:16.273530 | 2019-09-27T01:46:41 | 2019-09-27T01:46:41 | 197,873,032 | 0 | 0 | null | 2019-07-20T03:12:34 | 2019-07-20T03:12:31 | null | UTF-8 | Python | false | false | 673 | py | from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
from abc import ABCMeta
from abc import abstractmethod
from future.utils import with_metaclass
class StateQFunction(with_metaclass(ABCMeta, object)):
@abstractmethod
def __call__(self, x):
raise NotImplementedError()
class StateActionQFunction(with_metaclass(ABCMeta, object)):
@abstractmethod
def __call__(self, x, a):
raise NotImplementedError()
| [
"[email protected]"
]
| |
73f8720174f8e6518ef3716c337664d59c628864 | 82f6a6c50a1fef2d7522a43cc4f60e5ff80b37a8 | /solutions/Ambiguous Coordinates/solution.py | 309717524e6eb2befded94b0bafc3907f4e2069b | [
"MIT"
]
| permissive | nilax97/leetcode-solutions | ca0f9545ce70975617738f053e0935fac00b04d4 | d3c12f2b289662d199510e0431e177bbf3cda121 | refs/heads/master | 2023-05-14T02:21:48.893716 | 2021-06-08T13:16:53 | 2021-06-08T13:16:53 | 374,466,870 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 597 | py | class Solution:
def ambiguousCoordinates(self, S: str) -> List[str]:
def make(frag):
N = len(frag)
for d in range(1, N+1):
left = frag[:d]
right = frag[d:]
if ((not left.startswith('0') or left == '0')
and (not right.endswith('0'))):
yield left + ('.' if d != N else '') + right
S = S[1:-1]
return ["({}, {})".format(*cand)
for i in range(1, len(S))
for cand in itertools.product(make(S[:i]), make(S[i:]))]
| [
"[email protected]"
]
| |
4291ea8563309410ba811f227c3159ae6c856f88 | cc0e381fde5cc6870770396d990d2bad66a3186c | /PythonExercicios/ex006.py | 653eff93c4c8ecd980c2238d4ae97cf870bd2c40 | []
| no_license | jnthmota/Python-PySpark-Cursos | 2c7fac79867059e0dfe4f0c4b6b6e1d32260530f | 680a4c422e14a26036379f49f0de6b5e73d7e431 | refs/heads/main | 2023-08-15T00:22:59.189649 | 2021-09-12T23:00:39 | 2021-09-12T23:00:39 | 373,610,471 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | #06)CRIE UM ALGORITMO QUE LEIA UM NÚMERO E MOSTRE O SEU DOBRO, TRIPLO E RAIZ QUADRADA
a = int(input('Digite um valor: '))
d = a * 2
t = a * 3
#rq = a ** (1/2)
#OU
rq = pow(a, (1/2)) #POW BASE EXPONENCIAL **
print('O seu dobro de {} é: {} \n O seu triplo de {} é: {} \n A raiz Quadrada de {} é: {:.4}'.format(a, d, a, t, a, rq)) | [
"[email protected]"
]
| |
21e359d18b7e8e9f78fb251a5fb993843cc4ce54 | 762bd83dd4c96a6a5890e21b14252104fcfdc51f | /hw_2/code/conftest.py | 24d17a5bcb26cb484469f373c9bd8b63c1ad7deb | []
| no_license | Batroff/2021-1-MAILRU-SDET-Python-S-Savranskii | 1c1207bd9b22e9f2bd99af40767d6507e63c7380 | 133efd2960ddacc51ec7cba29bd7fce5e29223d9 | refs/heads/main | 2023-06-02T18:06:05.752307 | 2021-06-16T15:23:03 | 2021-06-16T15:23:03 | 349,384,217 | 0 | 0 | null | 2021-06-11T13:14:27 | 2021-03-19T10:28:47 | Python | UTF-8 | Python | false | false | 1,976 | py | import logging
import os
import shutil
import allure
from ui.fixtures import *
def pytest_addoption(parser):
parser.addoption('--url', default='http://www.target.my.com')
parser.addoption('--browser', default='chrome')
parser.addoption('--debug_log', action='store_true')
def pytest_configure(config):
base_test_dir = os.path.join('tmp', 'tests')
if not hasattr(config, 'workerinput'):
if os.path.exists(base_test_dir):
shutil.rmtree(base_test_dir)
os.makedirs(base_test_dir)
config.base_test_dir = base_test_dir
@pytest.fixture(scope='session')
def config(request):
url = request.config.getoption('--url')
browser = request.config.getoption('--browser')
debug_log = request.config.getoption('--debug_log')
return {'url': url, 'browser': browser, 'debug_log': debug_log}
@pytest.fixture(scope='session')
def repo_root():
return os.path.abspath(os.path.join(__file__, os.pardir))
@pytest.fixture(scope='function')
def test_dir(request):
# filename.py-classname-test_name
test_dir = os.path.join(request.config.base_test_dir, request._pyfuncitem.nodeid.replace('::', '-'))
os.makedirs(test_dir)
return test_dir
@pytest.fixture(scope='function', autouse=True)
def logger(test_dir, config):
log_formatter = logging.Formatter('%(asctime)s - %(filename)-15s - %(levelname)-6s - %(message)s')
log_file = os.path.join(test_dir, 'test.log')
log_level = logging.DEBUG if config['debug_log'] else logging.INFO
file_handler = logging.FileHandler(log_file, 'w')
file_handler.setFormatter(log_formatter)
file_handler.setLevel(log_level)
log = logging.getLogger('test')
log.propagate = False
log.setLevel(log_level)
log.addHandler(file_handler)
yield log
for handler in log.handlers:
handler.close()
with open(log_file, 'r') as f:
allure.attach(f.read(), 'test.log', attachment_type=allure.attachment_type.TEXT)
| [
"[email protected]"
]
| |
49e700cbe29de94dcbe30c3986931889084d727b | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/built-in/nlp/Bert_Chinese_ID3433_for_PyTorch/transformers/tests/xlm/test_tokenization_xlm.py | bd056b69d430916d4db4caa60fd158aa3492ff77 | [
"Apache-2.0",
"GPL-1.0-or-later",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 3,292 | py | # coding=utf-8
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import unittest
from transformers.models.xlm.tokenization_xlm import VOCAB_FILES_NAMES, XLMTokenizer
from transformers.testing_utils import slow
from ..test_tokenization_common import TokenizerTesterMixin
class XLMTokenizationTest(TokenizerTesterMixin, unittest.TestCase):
tokenizer_class = XLMTokenizer
test_rust_tokenizer = False
def setUp(self):
super().setUp()
# Adapted from Sennrich et al. 2015 and https://github.com/rsennrich/subword-nmt
vocab = [
"l",
"o",
"w",
"e",
"r",
"s",
"t",
"i",
"d",
"n",
"w</w>",
"r</w>",
"t</w>",
"lo",
"low",
"er</w>",
"low</w>",
"lowest</w>",
"newer</w>",
"wider</w>",
"<unk>",
]
vocab_tokens = dict(zip(vocab, range(len(vocab))))
merges = ["l o 123", "lo w 1456", "e r</w> 1789", ""]
self.vocab_file = os.path.join(self.tmpdirname, VOCAB_FILES_NAMES["vocab_file"])
self.merges_file = os.path.join(self.tmpdirname, VOCAB_FILES_NAMES["merges_file"])
with open(self.vocab_file, "w") as fp:
fp.write(json.dumps(vocab_tokens))
with open(self.merges_file, "w") as fp:
fp.write("\n".join(merges))
def get_input_output_texts(self, tokenizer):
input_text = "lower newer"
output_text = "lower newer"
return input_text, output_text
def test_full_tokenizer(self):
"""Adapted from Sennrich et al. 2015 and https://github.com/rsennrich/subword-nmt"""
tokenizer = XLMTokenizer(self.vocab_file, self.merges_file)
text = "lower"
bpe_tokens = ["low", "er</w>"]
tokens = tokenizer.tokenize(text)
self.assertListEqual(tokens, bpe_tokens)
input_tokens = tokens + ["<unk>"]
input_bpe_tokens = [14, 15, 20]
self.assertListEqual(tokenizer.convert_tokens_to_ids(input_tokens), input_bpe_tokens)
@slow
def test_sequence_builders(self):
tokenizer = XLMTokenizer.from_pretrained("xlm-mlm-en-2048")
text = tokenizer.encode("sequence builders", add_special_tokens=False)
text_2 = tokenizer.encode("multi-sequence build", add_special_tokens=False)
encoded_sentence = tokenizer.build_inputs_with_special_tokens(text)
encoded_pair = tokenizer.build_inputs_with_special_tokens(text, text_2)
assert encoded_sentence == [0] + text + [1]
assert encoded_pair == [0] + text + [1] + text_2 + [1]
| [
"[email protected]"
]
| |
6bfede719838e1bfce9c066b0dcf798a99fe4c9f | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_23405.py | 3acd2d2d0d8cd4d8d381f05872a1405cf1eb80bc | []
| no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | # Interactive matplotlib/ipython figures on Windows
git pull && python setup.py install
| [
"[email protected]"
]
| |
e95654a88cccfe3a701fa50cef273760697ac47e | 1b5d39f9dd5126b6f21e83efe58b7e86ef8d94f2 | /CodeForces/round633/A.py | 75c2d2dad491cacb5df76cb375b625d7a1b16a9c | []
| no_license | jai-dewani/Competitive-Programming | dfad61106a648b80cc97c85cc5c8bc5d1cd335d9 | a2006e53b671ba56d4b0a20dd81fd0e21d0b0806 | refs/heads/master | 2021-07-03T16:08:02.466423 | 2020-09-24T16:22:28 | 2020-09-24T16:22:28 | 178,812,685 | 1 | 2 | null | 2019-10-18T14:43:19 | 2019-04-01T07:51:47 | Python | UTF-8 | Python | false | false | 65 | py | for _ in range(int(input())):
n = int(input())
print(n) | [
"[email protected]"
]
| |
6d72fc5e1fd1df9222164bb3be5e6ec1ec3b123e | 81c344b8df43ed550cb9496c664a8de2687eda3e | /venv/lib/python3.8/site-packages/ansible_collections/fortinet/fortios/plugins/modules/fortios_switch_controller_system.py | 3a8271b6b43f3bd147ba71128a63cce53822870d | []
| no_license | anhdoan-ntt/cisco-aci | dc0e52b6d19ee0bafb2b24e0febe955952bf39ef | 185be6d6f13eabd65fb0ff328ea54f6507ccf0d4 | refs/heads/main | 2022-12-20T00:07:27.465096 | 2020-10-05T08:15:29 | 2020-10-05T08:15:29 | 300,500,699 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,062 | py | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019-2020 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_switch_controller_system
short_description: Configure system-wide switch controller settings in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify switch_controller feature and system category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.0
version_added: "2.9"
author:
- Link Zheng (@chillancezen)
- Hongbin Lu (@fgtdev-hblu)
- Frank Shen (@frankshen01)
- Jie Xue (@JieX19)
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Legacy fortiosapi has been deprecated, httpapi is the preferred way to run playbooks
requirements:
- ansible>=2.9.0
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
switch_controller_system:
description:
- Configure system-wide switch controller settings.
default: null
type: dict
suboptions:
parallel_process:
description:
- Maximum number of parallel processes (1 - 300).
type: int
parallel_process_override:
description:
- Enable/disable parallel process override.
type: str
choices:
- disable
- enable
'''
EXAMPLES = '''
- hosts: fortigates
collections:
- fortinet.fortios
connection: httpapi
vars:
vdom: "root"
ansible_httpapi_use_ssl: yes
ansible_httpapi_validate_certs: no
ansible_httpapi_port: 443
tasks:
- name: Configure system-wide switch controller settings.
fortios_switch_controller_system:
vdom: "{{ vdom }}"
switch_controller_system:
parallel_process: "3"
parallel_process_override: "disable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import FortiOSHandler
from ansible_collections.fortinet.fortios.plugins.module_utils.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_switch_controller_system_data(json):
option_list = ['parallel_process', 'parallel_process_override']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for i, elem in enumerate(data):
data[i] = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def switch_controller_system(data, fos):
vdom = data['vdom']
switch_controller_system_data = data['switch_controller_system']
filtered_data = underscore_to_hyphen(filter_switch_controller_system_data(switch_controller_system_data))
return fos.set('switch-controller',
'system',
data=filtered_data,
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_switch_controller(data, fos):
if data['switch_controller_system']:
resp = switch_controller_system(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success" and \
(resp['revision_changed'] if 'revision_changed' in resp else True), \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"switch_controller_system": {
"required": False, "type": "dict", "default": None,
"options": {
"parallel_process": {"required": False, "type": "int"},
"parallel_process_override": {"required": False, "type": "str",
"choices": ["disable",
"enable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
versions_check_result = None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_switch_controller(module.params, fos)
versions_check_result = connection.get_system_version()
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_switch_controller(module.params, fos)
fos.logout()
if versions_check_result and versions_check_result['matched'] is False:
module.warn("Ansible has detected version mismatch between FortOS system and galaxy, see more details by specifying option -vvv")
if not is_error:
if versions_check_result and versions_check_result['matched'] is False:
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result)
else:
module.exit_json(changed=has_changed, meta=result)
else:
if versions_check_result and versions_check_result['matched'] is False:
module.fail_json(msg="Error in repo", version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
50e593e5345382e1c4b44ea0718549de7998cc89 | d3eec7a516ffca4620206f23d399fa085660fb3c | /demo/settings.py | 48a2c62a6757f3a8fb08ef27522fb303d0295091 | []
| no_license | anykate/extend_user_demo | 90c5bff2e973d22d0976e495d2ca4e686be86cf1 | bffce102cefffad30975032c378ce3efbbfb0b5d | refs/heads/master | 2020-11-28T09:52:20.453293 | 2019-12-23T15:21:19 | 2019-12-23T15:21:19 | 229,775,100 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,139 | py | """
Django settings for demo project.
Generated by 'django-admin startproject' using Django 3.0.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^wya3o)mghoj7fa$@c(3ra*y7n%ie+6#r0vu2db87h13ce)noi'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# My Apps
'myapps.app.apps.AppConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'demo.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'demo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Kolkata'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
]
| |
13f3dd0325d622f22a952ada5c8b888c11b78111 | d9a4cecfdcccdb60f55b5e3a5670d178f4cf2211 | /copyjunctions_simple.py | d6190e6d10a0be188019de1fac8a00b6d9bfe112 | [
"Apache-2.0"
]
| permissive | geng-lee/BAM-to-Junction-BED | e75426f919bbe1781abe8dc6be2de3f83157585b | a878710ee395b2bf7f2eb7e4df84f8383d39d662 | refs/heads/master | 2023-03-17T01:08:20.040686 | 2017-02-28T20:39:23 | 2017-02-28T20:39:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,594 | py | ### hierarchical_clustering.py
#Copyright 2005-2012 J. David Gladstone Institutes, San Francisco California
#Author Nathan Salomonis - [email protected]
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is furnished
#to do so, subject to the following conditions:
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
#INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
#PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
#HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
#OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
#SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#################
### Imports an tab-delimited expression matrix and produces and hierarchically clustered heatmap
#################
# bugs, import matplotlib.colors as mc, -row_method
# new features fastcluster
import export
import string
import time
import sys, os
import shutil
import unique
import getopt
################# General data import methods #################
def filepath(filename):
fn = unique.filepath(filename)
return fn
def cleanUpLine(line):
data = string.replace(line,'\n','')
data = string.replace(data,'\c','')
data = string.replace(data,'\r','')
data = string.replace(data,'"','')
return data
def getFolders(sub_dir):
dir_list = unique.read_directory(sub_dir); dir_list2 = []
###Only get folder names
for entry in dir_list:
if '.' not in entry: dir_list2.append(entry)
return dir_list2
def getFiles(sub_dir):
dir_list = unique.read_directory(sub_dir); dir_list2 = []
###Only get folder names
for entry in dir_list:
if '.' in entry: dir_list2.append(entry)
return dir_list2
def copyJunctionFiles(directory):
root_dir = getFolders(directory)
#print root_dir
for top_level in root_dir: ### e.g.,
try:
files = getFiles(directory+'/'+top_level)
for file in files:
if 'junctions.bed' in file and 'junctionBEDfiles' not in top_level:
source_file = directory+'/'+top_level+'/'+file
source_file = filepath(source_file)
destination_file = directory+'/'+'junctionBEDfiles/'+top_level+'__junctions.bed'
destination_file = filepath(destination_file)
export.copyFile(source_file,destination_file)
print 'copying to:',destination_file
except Exception:
print 'failed to copy', source_file
if __name__ == '__main__':
if len(sys.argv[1:])<=1: ### Indicates that there are insufficient number of command-line arguments
print "Warning! Please designate a BAM file as input in the command-line"
print "Example: python BAMtoJunctionBED.py --i /Users/me/sample1.bam --g /Users/me/human.gtf"
sys.exit()
else:
options, remainder = getopt.getopt(sys.argv[1:],'', ['i=','g=','r='])
for opt, arg in options:
if opt == '--i': directory=arg
try: os.mkdir(directory+'/junctionBEDfiles')
except Exception: pass
copyJunctionFiles(directory)
| [
"[email protected]"
]
| |
fca91a107b5e307a442b5bceeab6a80e12a5c2d9 | 6df76f8a6fcdf444c3863e3788a2f4b2c539c22c | /django code/p25/enroll/forms.py | 81abe59143f26d866e689893095a339e98c74dbb | []
| no_license | basantbhandari/DjangoProjectsAsDocs | 068e4a704fade4a97e6c40353edb0a4299bd9678 | 594dbb560391eaf94bb6db6dc07702d127010b88 | refs/heads/master | 2022-12-18T22:33:23.902228 | 2020-09-22T13:11:01 | 2020-09-22T13:11:01 | 297,651,728 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 161 | py | from django import forms
class StudentRegistration(forms.Form):
name = forms.CharField()
email = forms.EmailField()
first_name = forms.CharField()
| [
"[email protected]"
]
| |
de9c252d46aa0d2b94e2e420716e08d4b131d555 | fc195da9608a52dc298f2fea74d38260425ba829 | /examples/translations/japanese_test_1.py | c45ba7a97b67466613e62ddb3d4f77b38e8dd496 | [
"MIT"
]
| permissive | kawarada-san/SeleniumBase | 9f824992911d0eade63df6ab877ae9fd9d6f5b5e | 8e601717fdef0814aae01c6411ea0e1fb114a269 | refs/heads/master | 2021-04-08T21:26:08.663911 | 2020-03-20T04:37:28 | 2020-03-20T04:37:28 | 248,811,137 | 0 | 0 | MIT | 2020-03-20T17:11:45 | 2020-03-20T17:11:45 | null | UTF-8 | Python | false | false | 845 | py | # Japanese Language Test - Python 3 Only!
from seleniumbase.translate.japanese import セレンテストケース # noqa
class テストクラス(セレンテストケース): # noqa
def test_例1(self):
self.URLを開く("https://ja.wikipedia.org/wiki/")
self.テキストを確認する("ウィキペディア")
self.要素を確認する('[title="メインページに移動する"]')
self.テキストを更新("#searchInput", "アニメ")
self.クリックして("#searchButton")
self.テキストを確認する("アニメ", "#firstHeading")
self.テキストを更新("#searchInput", "寿司")
self.クリックして("#searchButton")
self.テキストを確認する("寿司", "#firstHeading")
self.要素を確認する('img[alt="握り寿司"]')
| [
"[email protected]"
]
| |
19ae610a39764ccc48ee3fdcc09812438be053ca | facb8b9155a569b09ba66aefc22564a5bf9cd319 | /wp2/era5_scripts/02_preprocessing/concat82/612-tideGauge.py | 1815baf705abdb18d06e84f33cb97d92e2e83887 | []
| no_license | moinabyssinia/modeling-global-storm-surges | 13e69faa8f45a1244a964c5de4e2a5a6c95b2128 | 6e385b2a5f0867df8ceabd155e17ba876779c1bd | refs/heads/master | 2023-06-09T00:40:39.319465 | 2021-06-25T21:00:44 | 2021-06-25T21:00:44 | 229,080,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,482 | py | # -*- coding: utf-8 -*-
"""
Created on Tue May 13 10:02:00 2020
---------------------------------------------------------
This script concatenates yearly predictor files
Browses the predictor folders for the chosen TG
Concatenates the yearly csvs for the chosen predictor
Saves the concatenated csv in a separate directory
---------------------------------------------------------
@author: Michael Tadesse
"""
#%% import packages
import os
import pandas as pd
#%% define directories
home = '/lustre/fs0/home/mtadesse/erafive_localized'
out_path = '/lustre/fs0/home/mtadesse/eraFiveConcat'
#cd to the home dir to get TG information
os.chdir(home)
tg_list = os.listdir()
x = 612
y = 613
#looping through TGs
for t in range(x, y):
tg = tg_list[t]
print(tg)
#concatenate folder paths
os.chdir(os.path.join(home, tg))
#defining the folders for predictors
#choose only u, v, and slp
where = os.getcwd()
csv_path = {'slp' : os.path.join(where, 'slp'),\
"wnd_u": os.path.join(where, 'wnd_u'),\
'wnd_v' : os.path.join(where, 'wnd_v')}
#%%looping through predictors
for pred in csv_path.keys():
os.chdir(os.path.join(home, tg))
# print(tg, ' ', pred, '\n')
#cd to the chosen predictor
os.chdir(pred)
#%%looping through the yearly csv files
count = 1
for yr in os.listdir():
print(pred, ' ', yr)
if count == 1:
dat = pd.read_csv(yr)
# print('original size is: {}'.format(dat.shape))
else:
#remove the header of the subsequent csvs before merging
# dat_yr = pd.read_csv(yr, header=None).iloc[1:,:]
dat_yr = pd.read_csv(yr)
dat_yr.shape
dat = pd.concat([dat, dat_yr], axis = 0)
# print('concatenated size is: {}'.format(dat.shape))
count+=1
print(dat.shape)
#saving concatenated predictor
#cd to the saving location
os.chdir(out_path)
#create/cd to the tg folder
try:
os.makedirs(tg)
os.chdir(tg) #cd to it after creating it
except FileExistsError:
#directory already exists
os.chdir(tg)
#save as csv
pred_name = '.'.join([pred, 'csv'])
dat.to_csv(pred_name)
| [
"[email protected]"
]
| |
70fac4fbeee5e3465e7135ca016d3e4c35c96452 | 89b4fb1cbaa472e1b19c2e5b024c8ba788e27a3d | /tests/test_pyppeteer.py | e74a7f06d263451334d7f6b6ec968d79c6aafa89 | [
"MIT",
"Apache-2.0"
]
| permissive | moe-m/pyppeteer | 330dbfc2774fb25e3f9b57641a4d09a08bb52ed4 | caf4d3a9e50d1ccb93d4219eed169471b1783bc0 | refs/heads/dev | 2021-04-24T21:55:59.722124 | 2018-01-09T23:58:00 | 2018-01-09T23:58:00 | 116,884,402 | 0 | 0 | null | 2018-01-09T23:58:01 | 2018-01-09T23:52:55 | Python | UTF-8 | Python | false | false | 13,314 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pyppeteer
----------------------------------
Tests for `pyppeteer` module.
"""
import asyncio
from pathlib import Path
import time
import unittest
from syncer import sync
from pyppeteer.launcher import launch
from pyppeteer.util import install_asyncio, get_free_port
from server import get_application, BASE_HTML
def setUpModule():
install_asyncio()
class TestPyppeteer(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.port = get_free_port()
time.sleep(0.1)
cls.app = get_application()
cls.server = cls.app.listen(cls.port)
cls.browser = launch()
cls.page = sync(cls.browser.newPage())
@classmethod
def tearDownModule(cls):
sync(cls.browser.close())
cls.server.stop()
def setUp(self):
self.url = 'http://localhost:{}/'.format(self.port)
sync(self.page.goto(self.url))
@sync
async def test_get(self):
self.assertEqual(await self.page.title(), 'main')
self.assertEqual(self.page.url, self.url)
self.elm = await self.page.querySelector('h1#hello')
self.assertTrue(self.elm)
await self.page.goto('about:blank')
self.assertEqual(self.page.url, 'about:blank')
@sync
async def test_get_https(self):
await self.page.goto('https://example.com/')
self.assertEqual(self.page.url, 'https://example.com/')
@sync
async def test_plain_text(self):
text = await self.page.plainText()
self.assertEqual(text.split(), ['Hello', 'link1', 'link2'])
@sync
async def test_content(self):
html = await self.page.content()
self.assertEqual(html.replace('\n', ''), BASE_HTML.replace('\n', ''))
@sync
async def test_element(self):
elm = await self.page.querySelector('h1')
text = await self.page.evaluate(
'(element) => element.textContent', elm)
self.assertEqual('Hello', text)
@sync
async def test_element_depr(self):
elm = await self.page.querySelector('h1')
with self.assertLogs('pyppeteer', level='WARN') as cm, self.assertWarns(DeprecationWarning): # noqa
text = await elm.evaluate('(element) => element.textContent')
self.assertIn('[DEPRECATED]', cm.output[0])
self.assertEqual('Hello', text)
@sync
async def test_elements(self):
elms = await self.page.querySelectorAll('a')
self.assertEqual(len(elms), 2)
elm1 = elms[0]
elm2 = elms[1]
with self.assertLogs('pyppeteer', level='WARN') as cm, self.assertWarns(DeprecationWarning): # noqa
self.assertEqual(await elm1.attribute('id'), 'link1')
self.assertIn('[DEPRECATED]', cm.output[0])
with self.assertLogs('pyppeteer', level='WARN') as cm, self.assertWarns(DeprecationWarning): # noqa
self.assertEqual(await elm2.attribute('id'), 'link2')
self.assertIn('[DEPRECATED]', cm.output[0])
@sync
async def test_element_inner_html(self):
elm = await self.page.querySelector('h1')
text = await self.page.evaluate('(element) => element.innerHTML', elm)
self.assertEqual('Hello', text)
@sync
async def test_element_outer_html(self):
elm = await self.page.querySelector('h1')
text = await self.page.evaluate('(element) => element.outerHTML', elm)
self.assertEqual('<h1 id="hello">Hello</h1>', text)
@sync
async def test_element_attr(self):
_id = await self.page.querySelectorEval('h1', ('(elm) => elm.id'))
self.assertEqual('hello', _id)
@sync
async def test_click(self):
await self.page.click('#link1')
await asyncio.sleep(0.05)
await self.page.waitForSelector('h1#link1')
self.assertEqual(await self.page.title(), 'link1')
elm = await self.page.querySelector('h1#link1')
self.assertTrue(elm)
@sync
async def test_tap(self):
await self.page.tap('#link1')
await asyncio.sleep(0.05)
await self.page.waitForSelector('h1#link1')
self.assertEqual(self.page.url, self.url + '1')
self.assertEqual(await self.page.title(), 'link1')
@sync
async def test_wait_for_timeout(self):
await self.page.click('#link1')
await self.page.waitFor(0.1)
self.assertEqual(await self.page.title(), 'link1')
@sync
async def test_wait_for_function(self):
await self.page.evaluate(
'() => {'
' setTimeout(() => {'
' document.body.innerHTML = "<section>a</section>"'
' }, 200)'
'}'
)
await asyncio.sleep(0.05)
await self.page.waitForFunction(
'() => !!document.querySelector("section")'
)
self.assertIsNotNone(await self.page.querySelector('section'))
@sync
async def test_wait_for_selector(self):
await self.page.evaluate(
'() => {'
' setTimeout(() => {'
' document.body.innerHTML = "<section>a</section>"'
' }, 200)'
'}'
)
await asyncio.sleep(0.05)
await self.page.waitForSelector('section')
self.assertIsNotNone(await self.page.querySelector('section'))
@sync
async def test_elm_click(self):
btn1 = await self.page.querySelector('#link1')
self.assertTrue(btn1)
await btn1.click()
await asyncio.sleep(0.05)
await self.page.waitForSelector('h1#link1')
self.assertEqual(await self.page.title(), 'link1')
@sync
async def test_elm_tap(self):
btn1 = await self.page.querySelector('#link1')
self.assertTrue(btn1)
await btn1.tap()
await asyncio.sleep(0.05)
await self.page.waitForSelector('h1#link1')
self.assertEqual(await self.page.title(), 'link1')
@sync
async def test_back_forward(self):
await self.page.click('#link1')
await self.page.waitForSelector('h1#link1')
self.assertEqual(await self.page.title(), 'link1')
await self.page.goBack()
await self.page.waitForSelector('h1#hello')
self.assertEqual(await self.page.title(), 'main')
elm = await self.page.querySelector('h1#hello')
self.assertTrue(elm)
await self.page.goForward()
await self.page.waitForSelector('h1#link1')
self.assertEqual(await self.page.title(), 'link1')
btn2 = await self.page.querySelector('#link1')
self.assertTrue(btn2)
@sync
async def test_cookies(self):
cookies = await self.page.cookies()
self.assertEqual(cookies, [])
await self.page.evaluate(
'() => {document.cookie = "username=John Doe"}'
)
cookies = await self.page.cookies()
self.assertEqual(cookies, [{
'name': 'username',
'value': 'John Doe',
'domain': 'localhost',
'path': '/',
'expires': 0,
'size': 16,
'httpOnly': False,
'secure': False,
'session': True,
}])
await self.page.setCookie({'name': 'password', 'value': '123456'})
cookies = await self.page.evaluate(
'() => document.cookie'
)
self.assertEqual(cookies, 'username=John Doe; password=123456')
cookies = await self.page.cookies()
self.assertEqual(cookies, [{
'name': 'password',
'value': '123456',
'domain': 'localhost',
'path': '/',
'expires': 0,
'size': 14,
'httpOnly': False,
'secure': False,
'session': True,
}, {
'name': 'username',
'value': 'John Doe',
'domain': 'localhost',
'path': '/',
'expires': 0,
'size': 16,
'httpOnly': False,
'secure': False,
'session': True,
}])
await self.page.deleteCookie({'name': 'username'})
cookies = await self.page.evaluate(
'() => document.cookie'
)
self.assertEqual(cookies, 'password=123456')
cookies = await self.page.cookies()
self.assertEqual(cookies, [{
'name': 'password',
'value': '123456',
'domain': 'localhost',
'path': '/',
'expires': 0,
'size': 14,
'httpOnly': False,
'secure': False,
'session': True,
}])
@sync
async def test_redirect(self):
await self.page.goto(self.url + 'redirect1')
await self.page.waitForSelector('h1#red2')
self.assertEqual(await self.page.plainText(), 'redirect2')
class TestPage(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.port = get_free_port()
cls.url = 'http://localhost:{}/'.format(cls.port)
cls.app = get_application()
time.sleep(0.1)
cls.server = cls.app.listen(cls.port)
cls.browser = launch(headless=True)
def setUp(self):
self.page = sync(self.browser.newPage())
sync(self.page.goto(self.url))
def tearDown(self):
sync(self.page.goto('about:blank'))
@classmethod
def tearDownModule(cls):
sync(cls.browser.close())
cls.server.stop()
@sync
async def test_close_page(self):
await self.page.close()
self.page = await self.browser.newPage()
@sync
async def test_alert(self):
def dialog_test(dialog):
self.assertEqual(dialog.type, 'alert')
self.assertEqual(dialog.defaultValue(), '')
self.assertEqual(dialog.message(), 'yo')
asyncio.ensure_future(dialog.accept())
self.page.on('dialog', dialog_test)
await self.page.evaluate('() => alert("yo")')
@sync
async def test_prompt(self):
def dialog_test(dialog):
self.assertEqual(dialog.type, 'prompt')
self.assertEqual(dialog.defaultValue(), 'yes.')
self.assertEqual(dialog.message(), 'question?')
asyncio.ensure_future(dialog.accept('answer!'))
self.page.on('dialog', dialog_test)
answer = await self.page.evaluate('() => prompt("question?", "yes.")')
self.assertEqual(answer, 'answer!')
@sync
async def test_prompt_dismiss(self):
def dismiss_test(dialog, *args):
asyncio.ensure_future(dialog.dismiss())
self.page.on('dialog', dismiss_test)
result = await self.page.evaluate('() => prompt("question?", "yes.")')
self.assertIsNone(result)
@sync
async def test_user_agent(self):
self.assertIn('Mozilla', await self.page.evaluate(
'() => navigator.userAgent'))
await self.page.setUserAgent('foobar')
await self.page.goto(self.url)
self.assertEqual('foobar', await self.page.evaluate(
'() => navigator.userAgent'))
@sync
async def test_viewport(self):
await self.page.setViewport(dict(
width=480,
height=640,
deviceScaleFactor=3,
isMobile=True,
hasTouch=True,
isLandscape=True,
))
@sync
async def test_emulate(self):
await self.page.emulate(dict(
userAgent='test',
viewport=dict(
width=480,
height=640,
deviceScaleFactor=3,
isMobile=True,
hasTouch=True,
isLandscape=True,
),
))
@sync
async def test_inject_file(self):
tmp_file = Path('tmp.js')
with tmp_file.open('w') as f:
f.write('''
() => document.body.appendChild(document.createElement("section"))
'''.strip())
await self.page.injectFile(str(tmp_file))
await self.page.waitForSelector('section')
self.assertIsNotNone(await self.page.J('section'))
tmp_file.unlink()
@sync
async def test_tracing(self):
outfile = Path(__file__).parent / 'trace.json'
if outfile.is_file():
outfile.unlink()
await self.page.tracing.start({
'path': str(outfile)
})
await self.page.goto(self.url)
await self.page.tracing.stop()
self.assertTrue(outfile.is_file())
@unittest.skip('This test fails')
@sync
async def test_interception_enable(self):
await self.page.setRequestInterceptionEnabled(True)
await self.page.goto(self.url)
@unittest.skip('This test fails')
@sync
async def test_auth(self):
await self.page.authenticate({'username': 'test', 'password': 'pass'})
await self.page.goto(self.url + 'auth')
@sync
async def test_no_await_check_just_call(self):
await self.page.setExtraHTTPHeaders({'a': 'b'})
await self.page.addScriptTag('https://code.jquery.com/jquery-3.2.1.slim.min.js') # noqa: E501
await self.page.setContent('')
await self.page.reload()
await self.page.setJavaScriptEnabled(True)
await self.page.emulateMedia()
await self.page.evaluateOnNewDocument('() => 1 + 2')
| [
"[email protected]"
]
| |
f2ec83131298fd44f3966e637b944e3667885a12 | 3940b4a507789e1fbbaffeb200149aee215f655a | /lc/review_629.KInversePairsArray.py | 8a4cbd070efefb7b9865f42b2eb8fafcf752eaa7 | []
| no_license | akimi-yano/algorithm-practice | 15f52022ec79542d218c6f901a54396a62080445 | 1abc28919abb55b93d3879860ac9c1297d493d09 | refs/heads/master | 2023-06-11T13:17:56.971791 | 2023-06-10T05:17:56 | 2023-06-10T05:17:56 | 239,395,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,635 | py | # 629. K Inverse Pairs Array
# Hard
# 1131
# 136
# Add to List
# Share
# For an integer array nums, an inverse pair is a pair of integers [i, j] where 0 <= i < j < nums.length and nums[i] > nums[j].
# Given two integers n and k, return the number of different arrays consist of numbers from 1 to n such that there are exactly k inverse pairs. Since the answer can be huge, return it modulo 109 + 7.
# Example 1:
# Input: n = 3, k = 0
# Output: 1
# Explanation: Only the array [1,2,3] which consists of numbers from 1 to 3 has exactly 0 inverse pairs.
# Example 2:
# Input: n = 3, k = 1
# Output: 2
# Explanation: The array [1,3,2] and [2,1,3] have exactly 1 inverse pair.
# Constraints:
# 1 <= n <= 1000
# 0 <= k <= 1000
# This solution works:
class Solution:
MOD = (10**9) + 7
def kInversePairs(self, n: int, k: int) -> int:
dp = [[0] * (k+1) for _ in range(n+1)]
# if k == 0, there is only 1 option - to place the number
for i in range(n+1):
dp[i][0] = 1
for i in range(1,n+1):
for j in range(1,k+1):
'''
instead of this:
for m in range(min(i-1, j)+1):
dp[i][j] += dp[i-1][j-m]
use the below:
dp[i][j] = dp[i][j-1] + dp[i-1][j] - (dp[i-1][j-i] if j>=i else 0)
get this by looking at what is happening in each loop and cross out the ones we do not need and get this formula
'''
dp[i][j] = dp[i][j-1] + dp[i-1][j] - (dp[i-1][j-i] if j>=i else 0)
return dp[-1][-1] % Solution.MOD | [
"[email protected]"
]
| |
954aed68a50f07d377742c0dd09e93768493af8f | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_018/ch118_2020_03_31_17_12_49_349131.py | 5b8e656f9a0736224a9852ea334e20ccbbe25997 | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py | import math
def reflexao_total_interna(n1,n2,teta2graus):
teta2rad = math.radians(teta2graus)
teta1rad = (n2*math.sin(teta2rad))/n1
if math.sin(teta1rad) > 1:
return True
else:
return False | [
"[email protected]"
]
| |
566f6d1dd1cf89383dcf07c36cdd66d48a63be8f | d2c4934325f5ddd567963e7bd2bdc0673f92bc40 | /tests/artificial/transf_RelativeDifference/trend_PolyTrend/cycle_30/ar_12/test_artificial_32_RelativeDifference_PolyTrend_30_12_20.py | 3d4e035ecfec1b335fef8fbde0e7c83cbacb5489 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | jmabry/pyaf | 797acdd585842474ff4ae1d9db5606877252d9b8 | afbc15a851a2445a7824bf255af612dc429265af | refs/heads/master | 2020-03-20T02:14:12.597970 | 2018-12-17T22:08:11 | 2018-12-17T22:08:11 | 137,104,552 | 0 | 0 | BSD-3-Clause | 2018-12-17T22:08:12 | 2018-06-12T17:15:43 | Python | UTF-8 | Python | false | false | 279 | py | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 30, transform = "RelativeDifference", sigma = 0.0, exog_count = 20, ar_order = 12); | [
"[email protected]"
]
| |
deb2f2b6d8483b552b2f4314ce7f5ba958462d3a | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/sdssj_171642.81+412622.3/sdB_SDSSJ_171642.81+412622.3_coadd.py | 269e76e3327b7a8949217536052e70c5b6397b16 | []
| no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 482 | py | from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[259.178375,41.439528], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_SDSSJ_171642.81+412622.3/sdB_SDSSJ_171642.81+412622.3_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_SDSSJ_171642.81+412622.3/sdB_SDSSJ_171642.81+412622.3_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
8971f4fa9b187ce6b2424fd1736169682efe90b2 | 26f6313772161851b3b28b32a4f8d255499b3974 | /Python/1012_NumbersWithRepeatedDigits.py | 4f0a1b3cd50e2f10a2e76bc5453f8c25bb8a6125 | []
| no_license | here0009/LeetCode | 693e634a3096d929e5c842c5c5b989fa388e0fcd | f96a2273c6831a8035e1adacfa452f73c599ae16 | refs/heads/master | 2023-06-30T19:07:23.645941 | 2021-07-31T03:38:51 | 2021-07-31T03:38:51 | 266,287,834 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,762 | py | """
Given a positive integer N, return the number of positive integers less than or equal to N that have at least 1 repeated digit.
Example 1:
Input: 20
Output: 1
Explanation: The only positive number (<= 20) with at least 1 repeated digit is 11.
Example 2:
Input: 100
Output: 10
Explanation: The positive numbers (<= 100) with atleast 1 repeated digit are 11, 22, 33, 44, 55, 66, 77, 88, 99, and 100.
Example 3:
Input: 1000
Output: 262
Note:
1 <= N <= 10^9
"""
class Solution:
def numDupDigitsAtMostN(self, N: int) -> int:
"""
transform N to N+1 and count non-repeated nums that smaller than N+1
N+1 got K digits
find all non-repeated nums that less than K digit
and non-repeated nums that all K digit but smaller than N+1
"""
def perm(m, n):
"""
chose n elements out of m elements
"""
res = 1
for i in range(n):
res *= (m -i)
return res
nums = list(map(int, str(N+1)))
res = 0
K = len(nums)
for k in range(1, K):
res += 9*perm(9, k-1) # first digit can not be zero
seen = set()
for i, v in enumerate(nums):
for j in range(1 if i == 0 else 0 , v): #1st digti can not be 0
if j not in seen:
res += perm(9-i, K-i-1) #there are i elments before, and K-i-1 elements after
if v in seen: #there are repeated digits in N, so there are no more nums smaller than N that got non-repeated elements
break
seen.add(v)
return N-res
S = Solution()
print(S.numDupDigitsAtMostN(20))
print(S.numDupDigitsAtMostN(100))
print(S.numDupDigitsAtMostN(1000)) | [
"[email protected]"
]
| |
2d7c27442952fe478807ed74d7c022de2f10b28c | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_208/111.py | fb1d05f6613af1be13458ff9e2ba26f08ea8805e | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,654 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Apr 22 11:51:27 2017
@author: pellowes
"""
import numpy as np
import sys
fileIn = '/Users/pellowes/test.in'
fileIn = '/Users/pellowes/Downloads/C-small-attempt1.in'
#fileIn = '/Users/pellowes/Downloads/A-large(3).in'
fileOut = fileIn.split('.')[0]+'.out'
f = open(fileIn,'r')
fo = open(fileOut,'w')
class Town:
def __init__(self,num,distances,horseDistance,horseSpeed):
self.num = num
self.timesTo = {}
self.horseDistance = horseDistance
self.horseSpeed = horseSpeed
self.distances = distances
def solveSimple(n,q,horses,grid,stops):
distanceToNext = []
distanceToEndAgg = []
horseDistances = []
horseSpeeds = []
for horse in horses:
horseDistances.append(int(horse[0]))
horseSpeeds.append(int(horse[1]))
for i in range(0,len(grid)-1):
distLine = grid[i]
distanceToNext.append(int(distLine[i+1]))
distanceToEndAgg.append(-1)
agg = 0
for j in range(len(distanceToNext)-1,-1,-1):
agg+= distanceToNext[j]
distanceToEndAgg[j] = agg
distanceToEndAgg.append(0)
#print(horses)
#print(horseDistances)
#print(horseSpeeds)
#print(grid)
#print(distanceToEndAgg)
#print('-----')
bestTimeFrom = []
for i in range(0,n):
bestTimeFrom.append(1e99)
bestTimeFrom[-1]=0
for i in range(n-1,-1,-1):
#look at all upstream, and try to update them
for j in range(0,i):
if(horseDistances[j] >= (distanceToEndAgg[j]-distanceToEndAgg[i])):
timeBetween = (distanceToEndAgg[j]-distanceToEndAgg[i])/horseSpeeds[j]
if(bestTimeFrom[j] > bestTimeFrom[i] + timeBetween):
bestTimeFrom[j] = bestTimeFrom[i] + timeBetween
if(bestTimeFrom[0] > 1e98):
print(horses)
print(horseDistances)
print(horseSpeeds)
print(grid)
print(distanceToEndAgg)
return str(bestTimeFrom[0])
numcases = int(f.readline())
for casenum in range(1,numcases+1):
problem = f.readline().strip().split(' ')
n = int(problem[0])
q = int(problem[1])
horses = []
grid = []
stops = []
for row in range(0,n):
horses.append(f.readline().strip().split(' '))
for row in range(0,n):
grid.append(f.readline().strip().split(' '))
for row in range(0,q):
stops.append(f.readline().strip())
#print('---')
fo.write('Case #' + repr(casenum) + ': ' + solveSimple(n,q,horses,grid,stops)+'\n')
f.close()
fo.close() | [
"[email protected]"
]
| |
9da04c099883870e8ad931318365f288003451d8 | 9ed4d46aedd4d4acadb48d610e940594b5b7b3fd | /project_euler/problem_046/sol1.py | 07dd9bbf84c8046f193eae040861fc196b7ae147 | [
"MIT",
"CC-BY-NC-4.0",
"CC-BY-NC-SA-4.0"
]
| permissive | TheAlgorithms/Python | 7596a0e236ed12a61f9db19a7ea68309779cc85b | 421ace81edb0d9af3a173f4ca7e66cc900078c1d | refs/heads/master | 2023-09-01T17:32:20.190949 | 2023-08-29T13:18:10 | 2023-08-29T13:18:10 | 63,476,337 | 184,217 | 48,615 | MIT | 2023-09-14T02:05:29 | 2016-07-16T09:44:01 | Python | UTF-8 | Python | false | false | 2,808 | py | """
Problem 46: https://projecteuler.net/problem=46
It was proposed by Christian Goldbach that every odd composite number can be
written as the sum of a prime and twice a square.
9 = 7 + 2 × 12
15 = 7 + 2 × 22
21 = 3 + 2 × 32
25 = 7 + 2 × 32
27 = 19 + 2 × 22
33 = 31 + 2 × 12
It turns out that the conjecture was false.
What is the smallest odd composite that cannot be written as the sum of a
prime and twice a square?
"""
from __future__ import annotations
import math
def is_prime(number: int) -> bool:
"""Checks to see if a number is a prime in O(sqrt(n)).
A number is prime if it has exactly two factors: 1 and itself.
>>> is_prime(0)
False
>>> is_prime(1)
False
>>> is_prime(2)
True
>>> is_prime(3)
True
>>> is_prime(27)
False
>>> is_prime(87)
False
>>> is_prime(563)
True
>>> is_prime(2999)
True
>>> is_prime(67483)
False
"""
if 1 < number < 4:
# 2 and 3 are primes
return True
elif number < 2 or number % 2 == 0 or number % 3 == 0:
# Negatives, 0, 1, all even numbers, all multiples of 3 are not primes
return False
# All primes number are in format of 6k +/- 1
for i in range(5, int(math.sqrt(number) + 1), 6):
if number % i == 0 or number % (i + 2) == 0:
return False
return True
odd_composites = [num for num in range(3, 100001, 2) if not is_prime(num)]
def compute_nums(n: int) -> list[int]:
"""
Returns a list of first n odd composite numbers which do
not follow the conjecture.
>>> compute_nums(1)
[5777]
>>> compute_nums(2)
[5777, 5993]
>>> compute_nums(0)
Traceback (most recent call last):
...
ValueError: n must be >= 0
>>> compute_nums("a")
Traceback (most recent call last):
...
ValueError: n must be an integer
>>> compute_nums(1.1)
Traceback (most recent call last):
...
ValueError: n must be an integer
"""
if not isinstance(n, int):
raise ValueError("n must be an integer")
if n <= 0:
raise ValueError("n must be >= 0")
list_nums = []
for num in range(len(odd_composites)):
i = 0
while 2 * i * i <= odd_composites[num]:
rem = odd_composites[num] - 2 * i * i
if is_prime(rem):
break
i += 1
else:
list_nums.append(odd_composites[num])
if len(list_nums) == n:
return list_nums
return []
def solution() -> int:
"""Return the solution to the problem"""
return compute_nums(1)[0]
if __name__ == "__main__":
print(f"{solution() = }")
| [
"[email protected]"
]
| |
c95316ed88c4eb8b6507da52be9580434e57786d | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_divination.py | 6d43fd05084eaeeb1cd6871ad852cdf779c77057 | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 360 | py |
#calss header
class _DIVINATION():
def __init__(self,):
self.name = "DIVINATION"
self.definitions = [u'the skill or act of saying or discovering what will happen in the future']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
]
| |
d254ebfbaeafdb969c9d4440d84ce4d00f4001b8 | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/AlipayOfflineProviderIndirectisvActivityEffectModel.py | cac8ed6d796184997fe5f3aa98dbb056ba0bbc8a | [
"Apache-2.0"
]
| permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 2,002 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AlipayOfflineProviderIndirectisvActivityEffectModel(object):
def __init__(self):
self._effective_time = None
self._ext_info = None
self._merchant_id = None
@property
def effective_time(self):
return self._effective_time
@effective_time.setter
def effective_time(self, value):
self._effective_time = value
@property
def ext_info(self):
return self._ext_info
@ext_info.setter
def ext_info(self, value):
self._ext_info = value
@property
def merchant_id(self):
return self._merchant_id
@merchant_id.setter
def merchant_id(self, value):
self._merchant_id = value
def to_alipay_dict(self):
params = dict()
if self.effective_time:
if hasattr(self.effective_time, 'to_alipay_dict'):
params['effective_time'] = self.effective_time.to_alipay_dict()
else:
params['effective_time'] = self.effective_time
if self.ext_info:
if hasattr(self.ext_info, 'to_alipay_dict'):
params['ext_info'] = self.ext_info.to_alipay_dict()
else:
params['ext_info'] = self.ext_info
if self.merchant_id:
if hasattr(self.merchant_id, 'to_alipay_dict'):
params['merchant_id'] = self.merchant_id.to_alipay_dict()
else:
params['merchant_id'] = self.merchant_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayOfflineProviderIndirectisvActivityEffectModel()
if 'effective_time' in d:
o.effective_time = d['effective_time']
if 'ext_info' in d:
o.ext_info = d['ext_info']
if 'merchant_id' in d:
o.merchant_id = d['merchant_id']
return o
| [
"[email protected]"
]
| |
6c409fcaa252b26079a41244cd21a989f55251bc | 4d2cc76dbceff9eded071ba542ab2a1dd8c19f7b | /bhp056/apps/mpepu_lab/migrations/0002_auto__add_field_infantrequisition_sample_type__add_field_infantrequisi.py | 61675964e87a72884b15d72cd9cd71918dec3590 | []
| no_license | botswana-harvard/mpepu | 5d436638b760150ed76ec223121f5ac7aeee1020 | 6aa29c91f4fab50782b27e5f55aa33b30aee1dd0 | refs/heads/master | 2021-01-16T23:15:44.335940 | 2016-08-12T14:30:17 | 2016-08-12T14:30:17 | 65,557,693 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 39,317 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'InfantRequisition.sample_type'
db.add_column('mpepu_lab_infantrequisition', 'sample_type', self.gf('django.db.models.fields.related.ForeignKey')(default=0, to=orm['lab_aliquot_list.AliquotType']), keep_default=False)
# Adding field 'InfantRequisitionAudit.sample_type'
db.add_column('mpepu_lab_infantrequisition_audit', 'sample_type', self.gf('django.db.models.fields.related.ForeignKey')(default=0, related_name='_audit_infantrequisition', to=orm['lab_aliquot_list.AliquotType']), keep_default=False)
# Adding field 'MaternalRequisition.sample_type'
db.add_column('mpepu_lab_maternalrequisition', 'sample_type', self.gf('django.db.models.fields.related.ForeignKey')(default=0, to=orm['lab_aliquot_list.AliquotType']), keep_default=False)
# Adding field 'MaternalRequisitionAudit.sample_type'
db.add_column('mpepu_lab_maternalrequisition_audit', 'sample_type', self.gf('django.db.models.fields.related.ForeignKey')(default=0, related_name='_audit_maternalrequisition', to=orm['lab_aliquot_list.AliquotType']), keep_default=False)
def backwards(self, orm):
# Deleting field 'InfantRequisition.sample_type'
db.delete_column('mpepu_lab_infantrequisition', 'sample_type_id')
# Deleting field 'InfantRequisitionAudit.sample_type'
db.delete_column('mpepu_lab_infantrequisition_audit', 'sample_type_id')
# Deleting field 'MaternalRequisition.sample_type'
db.delete_column('mpepu_lab_maternalrequisition', 'sample_type_id')
# Deleting field 'MaternalRequisitionAudit.sample_type'
db.delete_column('mpepu_lab_maternalrequisition_audit', 'sample_type_id')
models = {
'bhp_appointment.appointment': {
'Meta': {'ordering': "['registered_subject', 'appt_datetime']", 'unique_together': "[('registered_subject', 'visit_definition', 'visit_instance')]", 'object_name': 'Appointment', 'db_table': "'bhp_form_appointment'"},
'appt_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'appt_reason': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'}),
'appt_status': ('django.db.models.fields.CharField', [], {'default': "'NEW'", 'max_length': '25'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'contact_tel': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'registered_subject': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['bhp_registration.RegisteredSubject']"}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'visit_definition': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['bhp_visit.VisitDefinition']"}),
'visit_instance': ('django.db.models.fields.CharField', [], {'default': "'0'", 'max_length': '1', 'null': 'True', 'blank': 'True'})
},
'bhp_content_type_map.contenttypemap': {
'Meta': {'unique_together': "(['app_label', 'model'],)", 'object_name': 'ContentTypeMap', 'db_table': "'bhp_common_contenttypemap'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'bhp_registration.registeredsubject': {
'Meta': {'object_name': 'RegisteredSubject'},
'comment': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'dob': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'identity': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'initials': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'is_dob_estimated': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'may_store_samples': ('django.db.models.fields.CharField', [], {'default': "'?'", 'max_length': '3'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'randomization_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'registration_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'registration_identifier': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'registration_status': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'relative_identifier': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'screening_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'sid': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'subject_consent_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'subject_identifier': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'}),
'subject_type': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'bhp_variables.studysite': {
'Meta': {'ordering': "['site_code']", 'unique_together': "[('site_code', 'site_name')]", 'object_name': 'StudySite'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'site_code': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'site_name': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'bhp_visit.membershipform': {
'Meta': {'object_name': 'MembershipForm', 'db_table': "'bhp_form_membershipform'"},
'category': ('django.db.models.fields.CharField', [], {'default': "'subject'", 'max_length': '25', 'null': 'True'}),
'content_type_map': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['bhp_content_type_map.ContentTypeMap']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hide_from_dashboard': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'bhp_visit.schedulegroup': {
'Meta': {'ordering': "['group_name']", 'object_name': 'ScheduleGroup', 'db_table': "'bhp_form_schedulegroup'"},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'group_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'}),
'grouping_key': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'membership_form': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_visit.MembershipForm']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'bhp_visit.visitdefinition': {
'Meta': {'ordering': "['code', 'time_point']", 'object_name': 'VisitDefinition', 'db_table': "'bhp_form_visitdefinition'"},
'base_interval': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'base_interval_unit': ('django.db.models.fields.CharField', [], {'default': "'D'", 'max_length': '10'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'grouping': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'instruction': ('django.db.models.fields.TextField', [], {'max_length': '255', 'blank': 'True'}),
'lower_window': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'lower_window_unit': ('django.db.models.fields.CharField', [], {'default': "'D'", 'max_length': '10'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'schedule_group': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['bhp_visit.ScheduleGroup']", 'symmetrical': 'False'}),
'time_point': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'upper_window': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'upper_window_unit': ('django.db.models.fields.CharField', [], {'default': "'D'", 'max_length': '10'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lab_aliquot_list.aliquottype': {
'Meta': {'ordering': "['name']", 'object_name': 'AliquotType', 'db_table': "'bhp_lab_core_aliquottype'"},
'alpha_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '15'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'dmis_reference': ('django.db.models.fields.IntegerField', [], {}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'numeric_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '2'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'lab_panel.panel': {
'Meta': {'object_name': 'Panel', 'db_table': "'bhp_lab_core_panel'"},
'aliquot_type': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['lab_aliquot_list.AliquotType']", 'symmetrical': 'False'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '250', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'dmis_panel_identifier': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'panel_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lab_panel.PanelGroup']"}),
'test_code': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['lab_test_code.TestCode']", 'symmetrical': 'False'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'lab_panel.panelgroup': {
'Meta': {'object_name': 'PanelGroup', 'db_table': "'bhp_lab_core_panelgroup'"},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'lab_test_code.testcode': {
'Meta': {'ordering': "['name']", 'object_name': 'TestCode', 'db_table': "'bhp_lab_test_code_testcode'"},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '15'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'display_decimal_places': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'formula': ('django.db.models.fields.CharField', [], {'max_length': "'50'", 'null': 'True', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_absolute': ('django.db.models.fields.CharField', [], {'default': "'absolute'", 'max_length': "'15'"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'test_code_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lab_test_code.TestCodeGroup']"}),
'units': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'lab_test_code.testcodegroup': {
'Meta': {'ordering': "['code']", 'object_name': 'TestCodeGroup', 'db_table': "'bhp_lab_test_code_testcodegroup'"},
'code': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'mpepu_infant.infantvisit': {
'Meta': {'object_name': 'InfantVisit'},
'appointment': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['bhp_appointment.Appointment']", 'unique': 'True'}),
'comments': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'info_source': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'info_source_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'information_provider': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'information_provider_other': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'reason_missed': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'report_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'study_status': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'mpepu_lab.infantrequisition': {
'Meta': {'object_name': 'InfantRequisition'},
'clinician_initials': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'comments': ('django.db.models.fields.TextField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'drawn_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'estimated_volume': ('django.db.models.fields.DecimalField', [], {'default': '5.0', 'max_digits': '7', 'decimal_places': '1'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'infant_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_infant.InfantVisit']"}),
'is_drawn': ('django.db.models.fields.CharField', [], {'default': "'Yes'", 'max_length': '3'}),
'item_count': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'item_type': ('django.db.models.fields.CharField', [], {'default': "'tube'", 'max_length': '25'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'panel': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lab_panel.Panel']"}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'normal'", 'max_length': '25'}),
'protocol': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'reason_not_drawn': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'requisition_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'requisition_identifier': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'sample_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lab_aliquot_list.AliquotType']"}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_variables.StudySite']"}),
'test_code': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lab_test_code.TestCode']", 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'mpepu_lab.infantrequisitionaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'InfantRequisitionAudit', 'db_table': "'mpepu_lab_infantrequisition_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'clinician_initials': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'comments': ('django.db.models.fields.TextField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'drawn_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'estimated_volume': ('django.db.models.fields.DecimalField', [], {'default': '5.0', 'max_digits': '7', 'decimal_places': '1'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'infant_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_infantrequisition'", 'to': "orm['mpepu_infant.InfantVisit']"}),
'is_drawn': ('django.db.models.fields.CharField', [], {'default': "'Yes'", 'max_length': '3'}),
'item_count': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'item_type': ('django.db.models.fields.CharField', [], {'default': "'tube'", 'max_length': '25'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'panel': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_infantrequisition'", 'to': "orm['lab_panel.Panel']"}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'normal'", 'max_length': '25'}),
'protocol': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'reason_not_drawn': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'requisition_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'requisition_identifier': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'sample_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_infantrequisition'", 'to': "orm['lab_aliquot_list.AliquotType']"}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_infantrequisition'", 'to': "orm['bhp_variables.StudySite']"}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'mpepu_lab.maternalrequisition': {
'Meta': {'object_name': 'MaternalRequisition'},
'clinician_initials': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'comments': ('django.db.models.fields.TextField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'drawn_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'estimated_volume': ('django.db.models.fields.DecimalField', [], {'default': '5.0', 'max_digits': '7', 'decimal_places': '1'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'is_drawn': ('django.db.models.fields.CharField', [], {'default': "'Yes'", 'max_length': '3'}),
'item_count': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'item_type': ('django.db.models.fields.CharField', [], {'default': "'tube'", 'max_length': '25'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'panel': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lab_panel.Panel']"}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'normal'", 'max_length': '25'}),
'protocol': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'reason_not_drawn': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'requisition_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'requisition_identifier': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'sample_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lab_aliquot_list.AliquotType']"}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['bhp_variables.StudySite']"}),
'test_code': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lab_test_code.TestCode']", 'null': 'True', 'blank': 'True'}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'mpepu_lab.maternalrequisitionaudit': {
'Meta': {'ordering': "['-_audit_timestamp']", 'object_name': 'MaternalRequisitionAudit', 'db_table': "'mpepu_lab_maternalrequisition_audit'"},
'_audit_change_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'_audit_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'_audit_subject_identifier': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'_audit_timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'clinician_initials': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'comments': ('django.db.models.fields.TextField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'drawn_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'estimated_volume': ('django.db.models.fields.DecimalField', [], {'default': '5.0', 'max_digits': '7', 'decimal_places': '1'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'blank': 'True'}),
'is_drawn': ('django.db.models.fields.CharField', [], {'default': "'Yes'", 'max_length': '3'}),
'item_count': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'item_type': ('django.db.models.fields.CharField', [], {'default': "'tube'", 'max_length': '25'}),
'maternal_visit': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalrequisition'", 'to': "orm['mpepu_maternal.MaternalVisit']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'panel': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalrequisition'", 'to': "orm['lab_panel.Panel']"}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'normal'", 'max_length': '25'}),
'protocol': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'reason_not_drawn': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'requisition_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'requisition_identifier': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'sample_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalrequisition'", 'to': "orm['lab_aliquot_list.AliquotType']"}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'_audit_maternalrequisition'", 'to': "orm['bhp_variables.StudySite']"}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
},
'mpepu_maternal.maternalvisit': {
'Meta': {'object_name': 'MaternalVisit'},
'appointment': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['bhp_appointment.Appointment']", 'unique': 'True'}),
'comments': ('django.db.models.fields.TextField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'hostname_created': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'hostname_modified': ('django.db.models.fields.CharField', [], {'default': "'home'", 'max_length': '50', 'blank': 'True'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '36', 'primary_key': 'True'}),
'info_source': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'info_source_other': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'reason_missed': ('django.db.models.fields.CharField', [], {'max_length': '35', 'null': 'True', 'blank': 'True'}),
'report_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'user_created': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'}),
'user_modified': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250'})
}
}
complete_apps = ['mpepu_lab']
| [
"[email protected]"
]
| |
a9595644a256d97d832d311804226c979060f5e6 | 351fa4edb6e904ff1ac83c6a790deaa7676be452 | /misc/maxAreaOfCake/Solution.py | 3facaa570babac01ef0de3db2aefe3661017d523 | [
"MIT"
]
| permissive | shahbagdadi/py-algo-n-ds | 42981a61631e1a9af7d5ac73bdc894ac0c2a1586 | f3026631cd9f3c543250ef1e2cfdf2726e0526b8 | refs/heads/master | 2022-11-27T19:13:47.348893 | 2022-11-14T21:58:51 | 2022-11-14T21:58:51 | 246,944,662 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 521 | py | from typing import List
class Solution:
def maxArea(self, h: int, w: int, horizontalCuts: List[int], verticalCuts: List[int]) -> int:
hc , vc = sorted(horizontalCuts) +[h], sorted(verticalCuts) + [w]
mh , p = 0 , 0
for h in hc :
mh = max(mh,h-p)
p = h
mw , p = 0 , 0
for w in vc :
mw = max(mw,w-p)
p = w
return mh * mw
s = Solution()
hc = [1,2,4]
vc = [1,3]
# hc = [3,1]
# vc = [1]
ans = s.maxArea(5,4,hc,vc)
print(ans) | [
"[email protected]"
]
| |
ef80cd1299365e03fba1fc377ac924f891081870 | c36679186f669c6e3bd1c106c96d4a17be1f5ab1 | /Hindi/8.py | dea37b09eac4d20e8cdc9749fa9f9f0062527b90 | []
| no_license | touhiduzzaman-tuhin/python-code-university-life | 60a3d671b200a6f5222c6d176c13c5f20f013509 | 6d2e3d90d430faa5c83fe79e7fb1ebe516994762 | refs/heads/master | 2023-03-22T15:18:10.636203 | 2021-03-06T18:52:04 | 2021-03-06T18:52:04 | 332,467,190 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33 | py | # This is Comment
print("Tuhin") | [
"[email protected]"
]
| |
35289460658e0ee7b729775ed7615e52d9c16be0 | 1525eeb085814724cd2678d1d81871c5c4b6eaee | /urls.py | 9570812ad1ee668bdf30070e089f4461d26c9d29 | []
| no_license | djangorobert/datauploader | b6156b4de7490ae9669ce724f6f727d7b5a68a5c | 381edfc76b293038ccabea134face10f15ec1310 | refs/heads/master | 2020-03-28T07:17:45.135728 | 2018-09-13T15:41:21 | 2018-09-13T15:41:21 | 147,892,502 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 422 | py | from django.conf.urls import url
from datauploader.views import UploadFileView, SubmissionListView
from datauploader import views
urlpatterns = [
url(r'^upload/$', UploadFileView.as_view(), name='upload'),
url(r'^list/$', SubmissionListView.as_view(), name='list'),
url(r'^(?P<pk>\d+)\$', views.SubmissionDetailView.as_view(), name='detail'),
url(r'^submitted/$', views.submitted, name='submitted'),
] | [
"[email protected]"
]
| |
1350572da4d56758a9a53a26cec9c22952a3686a | cecd66e056674fe0e8f83eb24f0f8f076304ef02 | /meidoo/meidoo/apps/payment/models.py | d05bd14d345e128c9c918f07188544b66e2394ce | [
"MIT"
]
| permissive | amourbrus/meiduo_mall | 30b2aac92685df5ef119b57cb653ff5f7eabcb3a | 965b3d4685d1a8fe18a3177cc864f27eeb516081 | refs/heads/master | 2020-03-23T17:42:54.552801 | 2018-07-22T09:23:37 | 2018-07-22T09:23:37 | 141,871,772 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 537 | py | from django.db import models
# Create your models here.
from orders.models import OrderInfo
from meidoo.utils.models import BaseModel # check
class Payment(BaseModel):
"""
支付信息
"""
order = models.ForeignKey(OrderInfo, on_delete=models.CASCADE, verbose_name='订单')
trade_id = models.CharField(max_length=100, unique=True, null=True, blank=True, verbose_name="支付编号")
class Meta:
db_table = 'tb_payment'
verbose_name = '支付信息'
verbose_name_plural = verbose_name | [
"[email protected]"
]
| |
1a8f4c33617b29b06b3d18836ddac3326c605b11 | 09e5cfe06e437989a2ccf2aeecb9c73eb998a36c | /modules/cctbx_project/libtbx/command_line/create_unzipsfx.py | 726675faa9cac376b1054e69d2966926e398a772 | [
"BSD-3-Clause-LBNL",
"BSD-3-Clause"
]
| permissive | jorgediazjr/dials-dev20191018 | b81b19653624cee39207b7cefb8dfcb2e99b79eb | 77d66c719b5746f37af51ad593e2941ed6fbba17 | refs/heads/master | 2020-08-21T02:48:54.719532 | 2020-01-25T01:41:37 | 2020-01-25T01:41:37 | 216,089,955 | 0 | 1 | BSD-3-Clause | 2020-01-25T01:41:39 | 2019-10-18T19:03:17 | Python | UTF-8 | Python | false | false | 1,342 | py | from __future__ import absolute_import, division, print_function
import libtbx.path
import sys
buf_size = 1000000
def copy(src, dest):
while True:
buf = src.read(buf_size)
if (buf == ""): break
dest.write(buf)
def find_unzipsfx():
for command in ("unzipsfx_autorun_yes.exe",
"unzipsfx_autorun.exe",
"unzipsfx.exe"):
path_cmd = libtbx.path.full_command_path(
command=command, search_first=["."])
if (path_cmd is not None): return path_cmd
return None
def create(zip_file_name, path_unzipsfx_exe=None):
if (path_unzipsfx_exe is None):
path_unzipsfx_exe = find_unzipsfx()
if (path_unzipsfx_exe is None):
raise RuntimeError("Fatal: unzipsfx executable not found.")
assert zip_file_name.endswith(".zip")
exe_file_name = zip_file_name[:-4] + ".exe"
exe_file = open(exe_file_name, "wb")
copy(open(path_unzipsfx_exe, "rb"), exe_file)
copy(open(zip_file_name, "rb"), exe_file)
exe_file.close()
def run(args):
"usage: libtbx.create_unzipsfx [path_unzipsfx_exe] zip_file_name"
if (not len(args) in (1,2) or "-h" in args or "--help" in args):
print(run.__doc__)
return
if (len(args) == 1):
create(zip_file_name=args[0])
else:
create(zip_file_name=args[1], path_unzipsfx_exe=args[0])
if (__name__ == "__main__"):
run(sys.argv[1:])
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.