blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e2141bfbe1940d48e60d545306ad35b1aa55f3e8 | 60f3c767c9f1a700c9e67dac606b8ee3bc46450d | /example.py | bb8e0450c336caa9837456280eb09470e3379615 | [] | no_license | codesharedot/Quadratic-Line-Chart-Sandra | 57b999e12d7ae20b3f907697b2f739c64a45db11 | 9e4eae6d10fc4001464a80de7c7cf5c4e2d6b115 | refs/heads/master | 2020-07-26T12:24:34.892400 | 2019-09-15T19:04:04 | 2019-09-15T19:04:04 | 208,642,944 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 147 | py | import matplotlib.pyplot as plt
import numpy as np
x = np.linspace(-1, 1, 50)
y = 9*x*x
plt.plot(x, y,'c-',linewidth=10)
plt.savefig('chart.png') | [
"[email protected]"
] | |
6796233cc8e0d68532199b60208872e887b79dbe | 8af6f0195e94908482ca7236bcd2eae382605fa7 | /python3code/chapter03/fibs.py | 82488642ecd1ea7d7ff1edce7bf88be46820530f | [] | no_license | MeatStack/StarterLearningPython | 4a1e0fc94c4615022ba9ff41455c4e67bd16a5bd | 98f0a9028f40db189cf2636a5e0c3abbcd86f71d | refs/heads/master | 2020-03-23T16:21:02.884442 | 2018-07-21T11:24:11 | 2018-07-21T11:24:11 | 141,805,470 | 1 | 0 | null | 2018-07-21T11:15:42 | 2018-07-21T11:15:42 | null | UTF-8 | Python | false | false | 191 | py | # coding=utf-8
'''
filename: fibs.py
'''
def fibs(n):
result = [0,1]
for i in range(n-2):
result.append(result[-2] + result[-1])
return result
lst = fibs(10)
print(lst)
| [
"[email protected]"
] | |
641eb5e4ce8f4443864024b99da2a1c4b80e0d83 | 167face5e34f69ba36b8a8d93306387dcaa50d24 | /15formatando_strings.py | 1061eb1748036704fe55492e86c058ee0f7e4ae9 | [] | no_license | william-cirico/python-study | 4fbe20936c46af6115f0d88ad861c71e6273db71 | 5923268fea4c78707fe82f1f609535a69859d0df | refs/heads/main | 2023-04-19T03:49:23.237829 | 2021-05-03T01:24:56 | 2021-05-03T01:24:56 | 309,492,617 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | # É possível formatar strings das seguintes formas:
nome = "William Círico"
idade = 20
peso = 70.31287418293472
print("Nome: ", nome, "Idade: ", idade, "Peso: ", peso)
print("Nome: {0} Idade: {1} Peso: {2}".format(nome, idade, peso))
print("Nome: {n} Idade: {i} Peso: {p}".format(n=nome, i=idade, p=peso))
print(f"Nome: {nome} Idade: {idade} Peso: {peso:.2f}") | [
"[email protected]"
] | |
17ebc93a0e4a5b9f3bdb7c23942b97a73909d91d | 0bc4391986b15c706a77e5df314ec83e84375c54 | /articles/migrations/0002_article_image_thumbnail.py | dd12130bb4ff92b2ae300134423a7f1d034fcd9b | [] | no_license | ssshhh0402/django-crud | a6d1a0872942c6215b1130a44ae335182c42937d | da292c07c9f77526bee8cbbec07d37ea8464d6af | refs/heads/master | 2022-05-02T12:07:26.518798 | 2019-09-23T06:26:43 | 2019-09-23T06:26:43 | 203,089,241 | 0 | 0 | null | 2022-04-22T22:11:46 | 2019-08-19T03:07:54 | HTML | UTF-8 | Python | false | false | 443 | py | # Generated by Django 2.2.4 on 2019-09-23 06:07
from django.db import migrations
import imagekit.models.fields
class Migration(migrations.Migration):
dependencies = [
('articles', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='article',
name='image_thumbnail',
field=imagekit.models.fields.ProcessedImageField(blank=True, upload_to=''),
),
]
| [
"[email protected]"
] | |
df4e2b89e5e838494485cf479d6d0589536e3838 | fa76cf45d7bf4ed533e5a776ecd52cea15da8c90 | /robotframework-ls/src/robotframework_debug_adapter/vendored/force_pydevd.py | 93bcca4fb794844f5a72a146f94071d71202e7a7 | [
"Apache-2.0"
] | permissive | martinRenou/robotframework-lsp | 8a5d63b7cc7d320c9fed2372a79c8c6772d6481e | 5f23b7374139e83d0aa1ebd30675e762d7a0db86 | refs/heads/master | 2023-08-18T22:26:01.386975 | 2021-10-25T13:46:11 | 2021-10-25T13:46:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,358 | py | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, division, print_function, unicode_literals
import contextlib
from importlib import import_module
import os
import sys
VENDORED_ROOT = os.path.dirname(os.path.abspath(__file__))
def project_root(project):
"""Return the path to the root dir of the vendored project.
If "project" is an empty string then the path prefix for vendored
projects (e.g. "robotframework_debug_adapter/_vendored/") will be returned.
"""
if not project:
project = ""
return os.path.join(VENDORED_ROOT, project)
@contextlib.contextmanager
def vendored(project, root=None):
"""A context manager under which the vendored project will be imported."""
if root is None:
root = project_root(project)
# Add the vendored project directory, so that it gets tried first.
sys.path.insert(0, root)
try:
yield root
finally:
sys.path.remove(root)
def preimport(project, modules, **kwargs):
"""Import each of the named modules out of the vendored project."""
with vendored(project, **kwargs):
for name in modules:
import_module(name)
try:
import pydevd # noqa
except ImportError:
pydevd_available = False
else:
pydevd_available = True
if not pydevd_available:
# Constants must be set before importing any other pydevd module
# # due to heavy use of "from" in them.
with vendored("vendored_pydevd"):
try:
pydevd_constants = import_module("_pydevd_bundle.pydevd_constants")
except ImportError as e:
contents = os.listdir(VENDORED_ROOT)
for c in contents[:]:
if os.path.isdir(c):
contents.append(f"{c}/{os.listdir(c)}")
else:
contents.append(c)
s = "\n".join(contents)
msg = f"Vendored root: {VENDORED_ROOT} -- contents:\n{s}"
raise ImportError(msg) from e
# Now make sure all the top-level modules and packages in pydevd are
# loaded. Any pydevd modules that aren't loaded at this point, will
# be loaded using their parent package's __path__ (i.e. one of the
# following).
preimport(
"vendored_pydevd",
[
"_pydev_bundle",
"_pydev_imps",
"_pydev_runfiles",
"_pydevd_bundle",
"_pydevd_frame_eval",
"pydev_ipython",
"pydevd_concurrency_analyser",
"pydevd_plugins",
"pydevd",
],
)
import pydevd # noqa
# Ensure that pydevd uses JSON protocol by default.
from _pydevd_bundle import pydevd_constants
from _pydevd_bundle import pydevd_defaults
pydevd_defaults.PydevdCustomization.DEFAULT_PROTOCOL = (
pydevd_constants.HTTP_JSON_PROTOCOL
)
from robocorp_ls_core.debug_adapter_core.dap.dap_base_schema import (
BaseSchema as RobotSchema,
)
from _pydevd_bundle._debug_adapter.pydevd_base_schema import BaseSchema as PyDevdSchema
PyDevdSchema._obj_id_to_dap_id = RobotSchema._obj_id_to_dap_id
PyDevdSchema._dap_id_to_obj_id = RobotSchema._dap_id_to_obj_id
PyDevdSchema._next_dap_id = RobotSchema._next_dap_id
| [
"[email protected]"
] | |
b2fa1c2267c4363c4044bbd0a1256ecebf629f01 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/DRAFT-MSDP-MIB.py | af1baa3acc14379cc42129394496b65eb61a6067 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 31,396 | py | #
# PySNMP MIB module DRAFT-MSDP-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DRAFT-MSDP-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:54:19 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
MibIdentifier, TimeTicks, Counter32, Bits, ModuleIdentity, Counter64, NotificationType, Gauge32, iso, experimental, MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, ObjectIdentity, IpAddress, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "TimeTicks", "Counter32", "Bits", "ModuleIdentity", "Counter64", "NotificationType", "Gauge32", "iso", "experimental", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "ObjectIdentity", "IpAddress", "Unsigned32")
TextualConvention, RowStatus, TruthValue, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "RowStatus", "TruthValue", "DisplayString")
msdpMIB = ModuleIdentity((1, 3, 6, 1, 3, 92))
if mibBuilder.loadTexts: msdpMIB.setLastUpdated('9912160000Z')
if mibBuilder.loadTexts: msdpMIB.setOrganization('IETF MSDP Working Group')
if mibBuilder.loadTexts: msdpMIB.setContactInfo(' Bill Fenner 75 Willow Road Menlo Park, CA 94025 Phone: +1 650 867 6073 E-mail: [email protected] Dave Thaler One Microsoft Way Redmond, WA 98052 Phone: +1 425 703 8835 Email: [email protected]')
if mibBuilder.loadTexts: msdpMIB.setDescription('An experimental MIB module for MSDP Management.')
msdpMIBobjects = MibIdentifier((1, 3, 6, 1, 3, 92, 1))
msdp = MibIdentifier((1, 3, 6, 1, 3, 92, 1, 1))
msdpEnabled = MibScalar((1, 3, 6, 1, 3, 92, 1, 1, 1), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: msdpEnabled.setStatus('current')
if mibBuilder.loadTexts: msdpEnabled.setDescription('The state of MSDP on this MSDP speaker - globally enabled or disabled.')
msdpCacheLifetime = MibScalar((1, 3, 6, 1, 3, 92, 1, 1, 2), TimeTicks()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: msdpCacheLifetime.setStatus('current')
if mibBuilder.loadTexts: msdpCacheLifetime.setDescription('The lifetime given to SA cache entries when created or refreshed. A value of 0 means no SA caching is done by this MSDP speaker.')
msdpNumSACacheEntries = MibScalar((1, 3, 6, 1, 3, 92, 1, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpNumSACacheEntries.setStatus('current')
if mibBuilder.loadTexts: msdpNumSACacheEntries.setDescription('The total number of entries in the SA Cache table.')
msdpSAHoldDownPeriod = MibScalar((1, 3, 6, 1, 3, 92, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(90)).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpSAHoldDownPeriod.setStatus('current')
if mibBuilder.loadTexts: msdpSAHoldDownPeriod.setDescription('The number of seconds in the MSDP SA Hold-down period')
msdpRequestsTable = MibTable((1, 3, 6, 1, 3, 92, 1, 1, 4), )
if mibBuilder.loadTexts: msdpRequestsTable.setStatus('current')
if mibBuilder.loadTexts: msdpRequestsTable.setDescription('The (conceptual) table listing group ranges and MSDP peers used when deciding where to send an SA Request message when required. If SA Caching is enabled, this table may be empty.')
msdpRequestsEntry = MibTableRow((1, 3, 6, 1, 3, 92, 1, 1, 4, 1), ).setIndexNames((0, "DRAFT-MSDP-MIB", "msdpRequestsGroupAddress"), (0, "DRAFT-MSDP-MIB", "msdpRequestsGroupMask"))
if mibBuilder.loadTexts: msdpRequestsEntry.setStatus('current')
if mibBuilder.loadTexts: msdpRequestsEntry.setDescription('An entry (conceptual row) representing a group range used when deciding where to send an SA Request message.')
msdpRequestsGroupAddress = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 4, 1, 1), IpAddress())
if mibBuilder.loadTexts: msdpRequestsGroupAddress.setStatus('current')
if mibBuilder.loadTexts: msdpRequestsGroupAddress.setDescription('The group address that, when combined with the mask in this entry, represents the group range for which this peer will service MSDP SA Requests.')
msdpRequestsGroupMask = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 4, 1, 2), IpAddress())
if mibBuilder.loadTexts: msdpRequestsGroupMask.setStatus('current')
if mibBuilder.loadTexts: msdpRequestsGroupMask.setDescription('The mask that, when combined with the group address in this entry, represents the group range for which this peer will service MSDP SA Requests.')
msdpRequestsPeer = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 4, 1, 3), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: msdpRequestsPeer.setStatus('current')
if mibBuilder.loadTexts: msdpRequestsPeer.setDescription("The peer to which MSDP SA Requests for groups matching this entry's group range will be sent. Must match the INDEX of a row in the msdpPeerTable.")
msdpRequestsStatus = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 4, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: msdpRequestsStatus.setStatus('current')
if mibBuilder.loadTexts: msdpRequestsStatus.setDescription('The status of this row, by which new rows may be added to the table.')
msdpPeerTable = MibTable((1, 3, 6, 1, 3, 92, 1, 1, 5), )
if mibBuilder.loadTexts: msdpPeerTable.setStatus('current')
if mibBuilder.loadTexts: msdpPeerTable.setDescription("The (conceptual) table listing the MSDP speaker's peers.")
msdpPeerEntry = MibTableRow((1, 3, 6, 1, 3, 92, 1, 1, 5, 1), ).setIndexNames((0, "DRAFT-MSDP-MIB", "msdpPeerRemoteAddress"))
if mibBuilder.loadTexts: msdpPeerEntry.setStatus('current')
if mibBuilder.loadTexts: msdpPeerEntry.setDescription('An entry (conceptual row) representing an MSDP peer.')
msdpPeerRemoteAddress = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 1), IpAddress())
if mibBuilder.loadTexts: msdpPeerRemoteAddress.setStatus('current')
if mibBuilder.loadTexts: msdpPeerRemoteAddress.setDescription('The address of the remote MSDP peer.')
msdpPeerState = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("inactive", 1), ("listen", 2), ("connecting", 3), ("established", 4), ("disabled", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerState.setStatus('current')
if mibBuilder.loadTexts: msdpPeerState.setDescription('The state of the MSDP TCP connection with this peer.')
msdpPeerRPFFailures = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerRPFFailures.setStatus('current')
if mibBuilder.loadTexts: msdpPeerRPFFailures.setDescription('The number of RPF failures on SA messages received from this peer.')
msdpPeerInSAs = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerInSAs.setStatus('current')
if mibBuilder.loadTexts: msdpPeerInSAs.setDescription('The number of MSDP SA messages received on this connection. This object should be initialized to zero when the connection is established.')
msdpPeerOutSAs = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerOutSAs.setStatus('current')
if mibBuilder.loadTexts: msdpPeerOutSAs.setDescription('The number of MSDP SA messages transmitted on this connection. This object should be initialized to zero when the connection is established.')
msdpPeerInSARequests = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerInSARequests.setStatus('current')
if mibBuilder.loadTexts: msdpPeerInSARequests.setDescription('The number of MSDP SA-Request messages received on this connection. This object should be initialized to zero when the connection is established.')
msdpPeerOutSARequests = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerOutSARequests.setStatus('current')
if mibBuilder.loadTexts: msdpPeerOutSARequests.setDescription('The number of MSDP SA-Request messages transmitted on this connection. This object should be initialized to zero when the connection is established.')
msdpPeerInSAResponses = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerInSAResponses.setStatus('current')
if mibBuilder.loadTexts: msdpPeerInSAResponses.setDescription('The number of MSDP SA-Response messages received on this connection. This object should be initialized to zero when the connection is established.')
msdpPeerOutSAResponses = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerOutSAResponses.setStatus('current')
if mibBuilder.loadTexts: msdpPeerOutSAResponses.setDescription('The number of MSDP SA Response messages transmitted on this TCP connection. This object should be initialized to zero when the connection is established.')
msdpPeerInControlMessages = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerInControlMessages.setStatus('current')
if mibBuilder.loadTexts: msdpPeerInControlMessages.setDescription('The total number of MSDP messages received on this TCP connection. This object should be initialized to zero when the connection is established.')
msdpPeerOutControlMessages = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerOutControlMessages.setStatus('current')
if mibBuilder.loadTexts: msdpPeerOutControlMessages.setDescription('The total number of MSDP messages transmitted on this TCP connection. This object should be initialized to zero when the connection is established.')
msdpPeerInDataPackets = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerInDataPackets.setStatus('current')
if mibBuilder.loadTexts: msdpPeerInDataPackets.setDescription('The total number of encapsulated data packets received from this peer. This object should be initialized to zero when the connection is established.')
msdpPeerOutDataPackets = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerOutDataPackets.setStatus('current')
if mibBuilder.loadTexts: msdpPeerOutDataPackets.setDescription('The total number of encapsulated data packets sent to this peer. This object should be initialized to zero when the connection is established.')
msdpPeerFsmEstablishedTransitions = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerFsmEstablishedTransitions.setStatus('current')
if mibBuilder.loadTexts: msdpPeerFsmEstablishedTransitions.setDescription('The total number of times the MSDP FSM transitioned into the established state.')
msdpPeerFsmEstablishedTime = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 16), Gauge32()).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerFsmEstablishedTime.setStatus('current')
if mibBuilder.loadTexts: msdpPeerFsmEstablishedTime.setDescription('This timer indicates how long (in seconds) this peer has been in the Established state or how long since this peer was last in the Established state. It is set to zero when a new peer is configured or the MSDP speaker is booted.')
msdpPeerInMessageElapsedTime = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 17), Gauge32()).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerInMessageElapsedTime.setStatus('current')
if mibBuilder.loadTexts: msdpPeerInMessageElapsedTime.setDescription('Elapsed time in seconds since the last MSDP message was received from the peer. Each time msdpPeerInControlMessages is incremented, the value of this object is set to zero (0).')
msdpPeerLocalAddress = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 18), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: msdpPeerLocalAddress.setStatus('current')
if mibBuilder.loadTexts: msdpPeerLocalAddress.setDescription("The local IP address of this entry's MSDP connection.")
msdpPeerSAAdvPeriod = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)).clone(60)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: msdpPeerSAAdvPeriod.setStatus('current')
if mibBuilder.loadTexts: msdpPeerSAAdvPeriod.setDescription('Time interval in seconds for the MinSAAdvertisementInterval MSDP timer.')
msdpPeerConnectRetryInterval = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(120)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: msdpPeerConnectRetryInterval.setStatus('current')
if mibBuilder.loadTexts: msdpPeerConnectRetryInterval.setDescription('Time interval in seconds for the ConnectRetry timer.')
msdpPeerHoldTimeConfigured = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(3, 65535), )).clone(90)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: msdpPeerHoldTimeConfigured.setStatus('current')
if mibBuilder.loadTexts: msdpPeerHoldTimeConfigured.setDescription('Time interval in seconds for the Hold Timer configured for this MSDP speaker with this peer.')
msdpPeerKeepAliveConfigured = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(1, 21845), )).clone(30)).setUnits('seconds').setMaxAccess("readcreate")
if mibBuilder.loadTexts: msdpPeerKeepAliveConfigured.setStatus('current')
if mibBuilder.loadTexts: msdpPeerKeepAliveConfigured.setDescription('Time interval in seconds for the KeepAlive timer configured for this MSDP speaker with this peer. A reasonable maximum value for this timer would be configured to be one third of that of msdpPeerHoldTimeConfigured. If the value of this object is zero (0), no periodic KEEPALIVE messages are sent to the peer after the MSDP connection has been established.')
msdpPeerDataTtl = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: msdpPeerDataTtl.setStatus('current')
if mibBuilder.loadTexts: msdpPeerDataTtl.setDescription('The minimum TTL a packet is required to have before it may be forwarded using SA encapsulation to this peer.')
msdpPeerProcessRequestsFrom = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 24), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: msdpPeerProcessRequestsFrom.setStatus('current')
if mibBuilder.loadTexts: msdpPeerProcessRequestsFrom.setDescription('This object indicates whether or not to process MSDP SA Request messages from this peer. If True(1), MSDP SA Request messages from this peer are processed and replied to (if appropriate) with SA Response messages. If False(2), MSDP SA Request messages from this peer are silently ignored. It defaults to False when msdpCacheLifetime is 0 and True when msdpCacheLifetime is non-0.')
msdpPeerStatus = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 25), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: msdpPeerStatus.setStatus('current')
if mibBuilder.loadTexts: msdpPeerStatus.setDescription("The RowStatus object by which peers can be added and deleted. A transition to 'active' will cause the MSDP Start Event to be generated. A transition out of the 'active' state will cause the MSDP Stop Event to be generated. Care should be used in providing write access to this object without adequate authentication.")
msdpPeerRemotePort = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 26), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerRemotePort.setStatus('current')
if mibBuilder.loadTexts: msdpPeerRemotePort.setDescription('The remote port for the TCP connection between the MSDP peers.')
msdpPeerLocalPort = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 27), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerLocalPort.setStatus('current')
if mibBuilder.loadTexts: msdpPeerLocalPort.setDescription('The local port for the TCP connection between the MSDP peers.')
msdpPeerEncapsulationState = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("default", 1), ("received", 2), ("advertising", 3), ("sent", 4), ("agreed", 5), ("failed", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerEncapsulationState.setStatus('current')
if mibBuilder.loadTexts: msdpPeerEncapsulationState.setDescription('The status of the encapsulation negotiation state machine.')
msdpPeerEncapsulationType = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("tcp", 1), ("udp", 2), ("gre", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerEncapsulationType.setStatus('current')
if mibBuilder.loadTexts: msdpPeerEncapsulationType.setDescription('The encapsulation in use when encapsulating data in SA messages to this peer.')
msdpPeerConnectionAttempts = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 30), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerConnectionAttempts.setStatus('current')
if mibBuilder.loadTexts: msdpPeerConnectionAttempts.setDescription('The number of times the state machine has transitioned from inactive to connecting.')
msdpPeerInNotifications = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 31), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerInNotifications.setStatus('current')
if mibBuilder.loadTexts: msdpPeerInNotifications.setDescription('The number of MSDP Notification messages received on this connection. This object should be initialized to zero when the connection is established.')
msdpPeerOutNotifications = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 32), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerOutNotifications.setStatus('current')
if mibBuilder.loadTexts: msdpPeerOutNotifications.setDescription('The number of MSDP Notification messages transmitted on this connection. This object should be initialized to zero when the connection is established.')
msdpPeerLastError = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 5, 1, 33), OctetString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2).clone(hexValue="0000")).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpPeerLastError.setStatus('current')
if mibBuilder.loadTexts: msdpPeerLastError.setDescription('The last error code and subcode seen by this peer on this connection. If no error has occurred, this field is zero. Otherwise, the first byte of this two byte OCTET STRING contains the error code, and the second byte contains the subcode.')
msdpSACacheTable = MibTable((1, 3, 6, 1, 3, 92, 1, 1, 6), )
if mibBuilder.loadTexts: msdpSACacheTable.setStatus('current')
if mibBuilder.loadTexts: msdpSACacheTable.setDescription("The (conceptual) table listing the MSDP SA advertisements currently in the MSDP speaker's cache.")
msdpSACacheEntry = MibTableRow((1, 3, 6, 1, 3, 92, 1, 1, 6, 1), ).setIndexNames((0, "DRAFT-MSDP-MIB", "msdpSACacheGroupAddr"), (0, "DRAFT-MSDP-MIB", "msdpSACacheSourceAddr"), (0, "DRAFT-MSDP-MIB", "msdpSACacheOriginRP"))
if mibBuilder.loadTexts: msdpSACacheEntry.setStatus('current')
if mibBuilder.loadTexts: msdpSACacheEntry.setDescription('An entry (conceptual row) representing an MSDP SA advert.')
msdpSACacheGroupAddr = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 6, 1, 1), IpAddress())
if mibBuilder.loadTexts: msdpSACacheGroupAddr.setStatus('current')
if mibBuilder.loadTexts: msdpSACacheGroupAddr.setDescription('The group address of the SA Cache entry.')
msdpSACacheSourceAddr = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 6, 1, 2), IpAddress())
if mibBuilder.loadTexts: msdpSACacheSourceAddr.setStatus('current')
if mibBuilder.loadTexts: msdpSACacheSourceAddr.setDescription('The source address of the SA Cache entry.')
msdpSACacheOriginRP = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 6, 1, 3), IpAddress())
if mibBuilder.loadTexts: msdpSACacheOriginRP.setStatus('current')
if mibBuilder.loadTexts: msdpSACacheOriginRP.setDescription('The address of the RP which originated the last SA message accepted for this entry.')
msdpSACachePeerLearnedFrom = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 6, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpSACachePeerLearnedFrom.setStatus('current')
if mibBuilder.loadTexts: msdpSACachePeerLearnedFrom.setDescription('The peer from which this SA Cache entry was last accepted. This address must correspond to the msdpPeerRemoteAddress value for a row in the MSDP Peer Table.')
msdpSACacheRPFPeer = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 6, 1, 5), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpSACacheRPFPeer.setStatus('current')
if mibBuilder.loadTexts: msdpSACacheRPFPeer.setDescription('The peer from which an SA message corresponding to this cache entry would be accepted (i.e. the RPF peer for msdpSACacheOriginRP). This may be different than msdpSACachePeerLearnedFrom if this entry was created by an MSDP SA-Response. This address must correspond to the msdpPeerRemoteAddress value for a row in the MSDP Peer Table.')
msdpSACacheInSAs = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 6, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpSACacheInSAs.setStatus('current')
if mibBuilder.loadTexts: msdpSACacheInSAs.setDescription('The number of MSDP SA messages received relevant to this cache entry. This object must be initialized to zero when creating a cache entry.')
msdpSACacheInDataPackets = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 6, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpSACacheInDataPackets.setStatus('current')
if mibBuilder.loadTexts: msdpSACacheInDataPackets.setDescription('The number of MSDP encapsulated data packets received relevant to this cache entry. This object must be initialized to zero when creating a cache entry.')
msdpSACacheUpTime = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 6, 1, 8), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpSACacheUpTime.setStatus('current')
if mibBuilder.loadTexts: msdpSACacheUpTime.setDescription('The time since this entry was placed in the SA cache.')
msdpSACacheExpiryTime = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 6, 1, 9), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: msdpSACacheExpiryTime.setStatus('current')
if mibBuilder.loadTexts: msdpSACacheExpiryTime.setDescription('The time remaining before this entry will expire from the SA cache.')
msdpSACacheStatus = MibTableColumn((1, 3, 6, 1, 3, 92, 1, 1, 6, 1, 10), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: msdpSACacheStatus.setStatus('current')
if mibBuilder.loadTexts: msdpSACacheStatus.setDescription("The status of this row in the table. The only allowable actions are to retreive the status, which will be `active', or to set the status to `destroy' in order to remove this entry from the cache.")
msdpTraps = MibIdentifier((1, 3, 6, 1, 3, 92, 1, 1, 7))
msdpEstablished = NotificationType((1, 3, 6, 1, 3, 92, 1, 1, 7, 1)).setObjects(("DRAFT-MSDP-MIB", "msdpPeerFsmEstablishedTransitions"))
if mibBuilder.loadTexts: msdpEstablished.setStatus('current')
if mibBuilder.loadTexts: msdpEstablished.setDescription('The MSDP Established event is generated when the MSDP FSM enters the ESTABLISHED state.')
msdpBackwardTransition = NotificationType((1, 3, 6, 1, 3, 92, 1, 1, 7, 2)).setObjects(("DRAFT-MSDP-MIB", "msdpPeerState"))
if mibBuilder.loadTexts: msdpBackwardTransition.setStatus('current')
if mibBuilder.loadTexts: msdpBackwardTransition.setDescription('The MSDPBackwardTransition Event is generated when the MSDP FSM moves from a higher numbered state to a lower numbered state.')
msdpMIBConformance = MibIdentifier((1, 3, 6, 1, 3, 92, 1, 1, 8))
msdpMIBCompliances = MibIdentifier((1, 3, 6, 1, 3, 92, 1, 1, 8, 1))
msdpMIBGroups = MibIdentifier((1, 3, 6, 1, 3, 92, 1, 1, 8, 2))
msdpMIBCompliance = ModuleCompliance((1, 3, 6, 1, 3, 92, 1, 1, 8, 1, 1)).setObjects(("DRAFT-MSDP-MIB", "msdpMIBGlobalsGroup"), ("DRAFT-MSDP-MIB", "msdpSACacheGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
msdpMIBCompliance = msdpMIBCompliance.setStatus('current')
if mibBuilder.loadTexts: msdpMIBCompliance.setDescription('The compliance statement for entities which implement the MSDP MIB.')
msdpMIBGlobalsGroup = ObjectGroup((1, 3, 6, 1, 3, 92, 1, 1, 8, 2, 1)).setObjects(("DRAFT-MSDP-MIB", "msdpEnabled"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
msdpMIBGlobalsGroup = msdpMIBGlobalsGroup.setStatus('current')
if mibBuilder.loadTexts: msdpMIBGlobalsGroup.setDescription('A collection of objects providing information on global MSDP state.')
msdpMIBPeerGroup = ObjectGroup((1, 3, 6, 1, 3, 92, 1, 1, 8, 2, 2)).setObjects(("DRAFT-MSDP-MIB", "msdpPeerRPFFailures"), ("DRAFT-MSDP-MIB", "msdpPeerState"), ("DRAFT-MSDP-MIB", "msdpPeerInSAs"), ("DRAFT-MSDP-MIB", "msdpPeerOutSAs"), ("DRAFT-MSDP-MIB", "msdpPeerInSARequests"), ("DRAFT-MSDP-MIB", "msdpPeerOutSARequests"), ("DRAFT-MSDP-MIB", "msdpPeerInSAResponses"), ("DRAFT-MSDP-MIB", "msdpPeerOutSAResponses"), ("DRAFT-MSDP-MIB", "msdpPeerInNotifications"), ("DRAFT-MSDP-MIB", "msdpPeerOutNotifications"), ("DRAFT-MSDP-MIB", "msdpPeerInControlMessages"), ("DRAFT-MSDP-MIB", "msdpPeerOutControlMessages"), ("DRAFT-MSDP-MIB", "msdpPeerInDataPackets"), ("DRAFT-MSDP-MIB", "msdpPeerOutDataPackets"), ("DRAFT-MSDP-MIB", "msdpPeerFsmEstablishedTransitions"), ("DRAFT-MSDP-MIB", "msdpPeerFsmEstablishedTime"), ("DRAFT-MSDP-MIB", "msdpPeerLocalAddress"), ("DRAFT-MSDP-MIB", "msdpPeerRemotePort"), ("DRAFT-MSDP-MIB", "msdpPeerLocalPort"), ("DRAFT-MSDP-MIB", "msdpPeerSAAdvPeriod"), ("DRAFT-MSDP-MIB", "msdpPeerConnectRetryInterval"), ("DRAFT-MSDP-MIB", "msdpPeerHoldTimeConfigured"), ("DRAFT-MSDP-MIB", "msdpPeerKeepAliveConfigured"), ("DRAFT-MSDP-MIB", "msdpPeerInMessageElapsedTime"), ("DRAFT-MSDP-MIB", "msdpPeerDataTtl"), ("DRAFT-MSDP-MIB", "msdpPeerProcessRequestsFrom"), ("DRAFT-MSDP-MIB", "msdpPeerEncapsulationState"), ("DRAFT-MSDP-MIB", "msdpPeerEncapsulationType"), ("DRAFT-MSDP-MIB", "msdpPeerConnectionAttempts"), ("DRAFT-MSDP-MIB", "msdpPeerLastError"), ("DRAFT-MSDP-MIB", "msdpPeerStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
msdpMIBPeerGroup = msdpMIBPeerGroup.setStatus('current')
if mibBuilder.loadTexts: msdpMIBPeerGroup.setDescription('A collection of objects for managing MSDP peers.')
msdpSACacheGroup = ObjectGroup((1, 3, 6, 1, 3, 92, 1, 1, 8, 2, 3)).setObjects(("DRAFT-MSDP-MIB", "msdpCacheLifetime"), ("DRAFT-MSDP-MIB", "msdpNumSACacheEntries"), ("DRAFT-MSDP-MIB", "msdpSAHoldDownPeriod"), ("DRAFT-MSDP-MIB", "msdpSACachePeerLearnedFrom"), ("DRAFT-MSDP-MIB", "msdpSACacheRPFPeer"), ("DRAFT-MSDP-MIB", "msdpSACacheInSAs"), ("DRAFT-MSDP-MIB", "msdpSACacheInDataPackets"), ("DRAFT-MSDP-MIB", "msdpSACacheUpTime"), ("DRAFT-MSDP-MIB", "msdpSACacheExpiryTime"), ("DRAFT-MSDP-MIB", "msdpSACacheStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
msdpSACacheGroup = msdpSACacheGroup.setStatus('current')
if mibBuilder.loadTexts: msdpSACacheGroup.setDescription('A collection of objects for managing MSDP SA cache entries.')
mibBuilder.exportSymbols("DRAFT-MSDP-MIB", msdpPeerRPFFailures=msdpPeerRPFFailures, msdpRequestsGroupAddress=msdpRequestsGroupAddress, msdpPeerInSAs=msdpPeerInSAs, msdpMIB=msdpMIB, msdpPeerOutDataPackets=msdpPeerOutDataPackets, msdpSACacheRPFPeer=msdpSACacheRPFPeer, msdpSACacheInDataPackets=msdpSACacheInDataPackets, msdpMIBCompliances=msdpMIBCompliances, msdpNumSACacheEntries=msdpNumSACacheEntries, msdpPeerDataTtl=msdpPeerDataTtl, msdpPeerEntry=msdpPeerEntry, msdpMIBPeerGroup=msdpMIBPeerGroup, msdpSAHoldDownPeriod=msdpSAHoldDownPeriod, msdpRequestsTable=msdpRequestsTable, msdpPeerStatus=msdpPeerStatus, msdpPeerInMessageElapsedTime=msdpPeerInMessageElapsedTime, msdpPeerTable=msdpPeerTable, msdpPeerFsmEstablishedTime=msdpPeerFsmEstablishedTime, msdpPeerKeepAliveConfigured=msdpPeerKeepAliveConfigured, msdpSACacheInSAs=msdpSACacheInSAs, msdpMIBGlobalsGroup=msdpMIBGlobalsGroup, msdpPeerOutControlMessages=msdpPeerOutControlMessages, msdpSACacheUpTime=msdpSACacheUpTime, msdpSACacheGroup=msdpSACacheGroup, msdpPeerInSARequests=msdpPeerInSARequests, msdpPeerSAAdvPeriod=msdpPeerSAAdvPeriod, msdpPeerLocalPort=msdpPeerLocalPort, msdpBackwardTransition=msdpBackwardTransition, msdpPeerOutNotifications=msdpPeerOutNotifications, msdpPeerEncapsulationState=msdpPeerEncapsulationState, msdpMIBCompliance=msdpMIBCompliance, msdpPeerProcessRequestsFrom=msdpPeerProcessRequestsFrom, msdpSACacheStatus=msdpSACacheStatus, msdpPeerRemoteAddress=msdpPeerRemoteAddress, msdpSACacheGroupAddr=msdpSACacheGroupAddr, msdpMIBConformance=msdpMIBConformance, msdp=msdp, msdpSACacheEntry=msdpSACacheEntry, msdpPeerEncapsulationType=msdpPeerEncapsulationType, msdpPeerOutSAs=msdpPeerOutSAs, msdpPeerConnectRetryInterval=msdpPeerConnectRetryInterval, msdpSACacheSourceAddr=msdpSACacheSourceAddr, msdpSACacheOriginRP=msdpSACacheOriginRP, msdpSACacheExpiryTime=msdpSACacheExpiryTime, msdpRequestsGroupMask=msdpRequestsGroupMask, msdpPeerOutSAResponses=msdpPeerOutSAResponses, msdpPeerRemotePort=msdpPeerRemotePort, msdpRequestsPeer=msdpRequestsPeer, msdpSACachePeerLearnedFrom=msdpSACachePeerLearnedFrom, msdpPeerState=msdpPeerState, msdpPeerOutSARequests=msdpPeerOutSARequests, msdpPeerInNotifications=msdpPeerInNotifications, PYSNMP_MODULE_ID=msdpMIB, msdpPeerInSAResponses=msdpPeerInSAResponses, msdpTraps=msdpTraps, msdpMIBobjects=msdpMIBobjects, msdpPeerHoldTimeConfigured=msdpPeerHoldTimeConfigured, msdpRequestsStatus=msdpRequestsStatus, msdpRequestsEntry=msdpRequestsEntry, msdpPeerConnectionAttempts=msdpPeerConnectionAttempts, msdpPeerInControlMessages=msdpPeerInControlMessages, msdpMIBGroups=msdpMIBGroups, msdpPeerLastError=msdpPeerLastError, msdpCacheLifetime=msdpCacheLifetime, msdpPeerLocalAddress=msdpPeerLocalAddress, msdpEnabled=msdpEnabled, msdpPeerInDataPackets=msdpPeerInDataPackets, msdpEstablished=msdpEstablished, msdpPeerFsmEstablishedTransitions=msdpPeerFsmEstablishedTransitions, msdpSACacheTable=msdpSACacheTable)
| [
"[email protected]"
] | |
de560c64ba52aaecaeac7ec15a5ce04eb115991c | afc8d5a9b1c2dd476ea59a7211b455732806fdfd | /Configurations/VBSjjlnu/Full2018v7/conf_test_fatjetscale_DY/configuration.py | 586bc0ae5cf8cc622910ab866255e792b1b7f1ac | [] | no_license | latinos/PlotsConfigurations | 6d88a5ad828dde4a7f45c68765081ed182fcda21 | 02417839021e2112e740607b0fb78e09b58c930f | refs/heads/master | 2023-08-18T20:39:31.954943 | 2023-08-18T09:23:34 | 2023-08-18T09:23:34 | 39,819,875 | 10 | 63 | null | 2023-08-10T14:08:04 | 2015-07-28T07:36:50 | Python | UTF-8 | Python | false | false | 950 | py | # Configuration file to produce initial root files -- has both merged and binned ggH samples
treeName = 'Events'
tag = 'DY2018_v7'
# used by mkShape to define output directory for root files
outputDir = 'rootFile'+tag
# file with TTree aliases
aliasesFile = 'aliases.py'
# file with list of variables
variablesFile = 'variables.py'
# file with list of cuts
cutsFile = 'cuts.py'
#cutsFile = 'cuts_topCR.py'
# file with list of samples
samplesFile = 'samples.py'
# file with list of samples
plotFile = 'plot.py'
# luminosity to normalize to (in 1/fb)
lumi = 59.74
# used by mkPlot to define output directory for plots
# different from "outputDir" to do things more tidy
outputDirPlots = 'plots'+tag
# used by mkDatacards to define output directory for datacards
outputDirDatacard = 'datacards'
# structure file for datacard
structureFile = 'structure.py'
# nuisances file for mkDatacards and for mkShape
nuisancesFile = 'nuisances.py'
| [
"[email protected]"
] | |
6300090e5a1167be972d853d145e04125121895d | ccbcaca6df1c3984a19f039351e29cfa81e73314 | /timetable/schedule.py | a3265c9ffcaa2c76a8c6866709dc7413cf0e18ea | [
"BSD-3-Clause"
] | permissive | pgromano/timetable | b96c6eb2da8ede8abfa211f6d54748a4a5a9c9c7 | 8fa83fa82bb2afc56f6da1b7f8e3836f2b127164 | refs/heads/master | 2021-01-21T00:22:17.376372 | 2016-08-17T14:57:25 | 2016-08-17T14:57:25 | 61,254,584 | 0 | 0 | null | 2016-06-16T02:07:07 | 2016-06-16T02:07:07 | null | UTF-8 | Python | false | false | 182 | py |
class Schedule(object):
"""Student schedule object.
"""
def __init__(self):
def add(self, course):
"""Add course to schedule"""
def courses
| [
"[email protected]"
] | |
8d8c9788a9836bac94cd547c3889d9deb500b5f6 | da437d59c9caf5d10e8c7be0e640a6c08507d2f4 | /data/CNN.py | 734b06a9792eb9f55ea0e8eb9f87d55e8548a7e5 | [] | no_license | SoliareofAstora/vision_pipeline | 9982ea7b3d2fe009102d0e712535be9bba362a1c | f7a2d76a155c8b3d863b10e7f9e1a148f98c3780 | refs/heads/main | 2023-05-08T23:36:19.403137 | 2021-06-01T14:12:31 | 2021-06-01T14:12:31 | 372,847,735 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,179 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
class CNN(nn.Module):
def __init__(self, args):
super(CNN, self).__init__()
self.criterion = torch.nn.CrossEntropyLoss()
self.args = args
self.L = self.args.L
self.D = self.args.D
self.K = self.args.K
# first_conv = 5 if args.out_loc else 3
if self.args.loc_info:
self.add = 2
else:
self.add = 0
if self.args.dataset_name == 'breast':
input_dim = 6 * 6 * 48
elif self.args.dataset_name == 'bacteria':
input_dim = 512
elif self.args.dataset_name == 'fungus':
input_dim = self.args.input_dim
else:
input_dim = 5 * 5 * 48
self.conv1x1 = nn.Conv1d(input_dim, input_dim // 2, 1)
input_dim = input_dim // 2
if self.args.self_att:
self.self_att = SelfAttention(input_dim, self.args)
if self.args['operator'] == 'att':
self.attention = nn.Sequential( # first layer
nn.Linear(input_dim, self.D),
nn.Tanh(),
# second layer
nn.Linear(self.D, self.K)
# outputs A: NxK
)
torch.nn.init.xavier_uniform_(self.attention[0].weight)
self.attention[0].bias.data.zero_()
torch.nn.init.xavier_uniform_(self.attention[2].weight)
self.attention[2].bias.data.zero_()
self.classifier = nn.Sequential(
nn.Linear(input_dim * self.K, self.args.output_dim),
)
elif self.args['operator'] in ['mean', 'max']:
self.classifier = nn.Sequential(
nn.Linear(input_dim, self.args.output_dim),
)
torch.nn.init.xavier_uniform_(self.classifier[0].weight)
self.classifier[0].bias.data.zero_()
def forward(self, x):
# Trash first dimension
if self.args['dataset_name'] == 'bacteria':
x = x.unsqueeze(1)
if not self.args.out_loc:
loc = x[:, 3:]
x = x[:, :3]
# Extract features
# H = self.feature_extractor(x)
# H = self.fc(H)
# H = H.view(-1, H.shape[0])
# if self.args.loc_info:
# pos_x = loc[:, 0, 0, 0].view(-1, 1)
# pos_y = loc[:, 1, 0, 0].view(-1, 1)
# H = torch.cat((H, pos_x, pos_y), dim=1)
# H = self.conv1x1(x.view((x.shape[0], x.shape[1], 1)))
x = x.permute((0, 2, 1))
H = self.conv1x1(x)
H = H.mean(2)
if self.args['dataset_name'] == 'fungus':
H = H.squeeze(0)
H = H.view(-1, H.shape[1])
# print('before', H.shape)
gamma, gamma_kernel = (0, 0)
if self.args.self_att:
H, self_attention, gamma, gamma_kernel = self.self_att(H)
# attention
if self.args['operator'] == 'mean':
M = H.mean(0)
elif self.args['operator'] == 'max':
M, _ = torch.max(H, 0)
elif self.args['operator'] == 'att':
A = self.attention(H) # NxK
A = torch.transpose(A, 1, 0) # KxN
z = F.softmax(A) # softmax over N
M = torch.mm(z, H) # KxL
M = M.view(1, -1) # (K*L)x1
# classification
y_prob = self.classifier(M)
if self.args['operator'] in ['mean', 'max']:
y_prob = y_prob.unsqueeze(0)
_, y_hat = torch.max(y_prob, 1)
if self.args['operator'] in ['mean', 'max']:
return y_prob, y_hat, [], [], gamma, gamma_kernel
elif self.args.self_att:
return y_prob, y_hat, z, (A, self_attention), gamma, gamma_kernel
else:
return y_prob, y_hat, z, A, gamma, gamma_kernel
# AUXILIARY METHODS
def calculate_classification_error(self, X, Y):
# Y = Y.float()
y_prob, y_hat, _, _, gamma, gamma_kernel = self.forward(X)
error = 1. - y_hat.eq(Y).cpu().float().mean()
return error, gamma, gamma_kernel
def calculate_objective(self, X, Y):
# Y = Y.float()
y_prob, _, _, _, gamma, gamma_kernel = self.forward(X)
loss = self.criterion(y_prob, Y.view(1))
return loss, gamma, gamma_kernel
class SelfAttention(nn.Module):
def __init__(self, in_dim, args):
super(SelfAttention, self).__init__()
self.args = args
self.query_conv = nn.Conv1d(in_channels=in_dim, out_channels=in_dim // 8, kernel_size=1)
self.key_conv = nn.Conv1d(in_channels=in_dim, out_channels=in_dim // 8, kernel_size=1)
self.value_conv = nn.Conv1d(in_channels=in_dim, out_channels=in_dim, kernel_size=1)
self.gamma = nn.Parameter((torch.ones(1)).cuda())
self.gamma_in = nn.Parameter((torch.ones(1)).cuda())
self.softmax = nn.Softmax(dim=-1)
self.alfa = nn.Parameter((torch.ones(1)).cuda())
self.gamma_att = nn.Parameter((torch.ones(1)).cuda())
def forward(self, x):
if self.args.loc_info:
loc = x[:, -2:]
x = x[:, :-2]
x = x.view(1, x.shape[0], x.shape[1]).permute((0, 2, 1))
# x = x.view(1, x.shape[0], x.shape[1])
bs, C, length = x.shape
proj_query = self.query_conv(x).view(bs, -1, length).permute(0, 2, 1) # B X CX(N)
proj_key = self.key_conv(x).view(bs, -1, length) # B X C x (*W*H)
if self.args.att_gauss_spatial:
proj = torch.zeros((length, length))
if self.args.cuda:
proj = proj.cuda()
proj_query = proj_query.permute(0, 2, 1)
for i in range(length):
gauss = torch.pow(proj_query - proj_key[:, :, i].t(), 2).sum(dim=1)
proj[:, i] = torch.exp(-F.relu(self.gamma_att) * gauss)
energy = proj.view((1, length, length))
elif self.args.att_inv_q_spatial:
proj = torch.zeros((length, length))
if self.args.cuda:
proj = proj.cuda()
proj_query = proj_query.permute(0, 2, 1)
for i in range(length):
gauss = torch.pow(proj_query - proj_key[:, :, i].t(), 2).sum(dim=1)
proj[:, i] = 1 / (F.relu(self.gamma_att) * gauss + torch.ones(1).cuda())
energy = proj.view((1, length, length))
elif self.args.att_module:
proj = torch.zeros((length, length))
if self.args.cuda:
proj = proj.cuda()
proj_query = proj_query.permute(0, 2, 1)
for i in range(length):
proj[:, i] = (torch.abs(proj_query - proj_key[:, :, i].t()) -
torch.abs(proj_query) -
torch.abs(proj_key[:, :, i].t())).sum(dim=1)
energy = proj.view((1, length, length))
elif self.args.laplace_att:
proj = torch.zeros((length, length))
if self.args.cuda:
proj = proj.cuda()
proj_query = proj_query.permute(0, 2, 1)
for i in range(length):
proj[:, i] = (-torch.abs(proj_query - proj_key[:, :, i].t())).sum(dim=1)
energy = proj.view((1, length, length))
elif self.args.att_gauss_abnormal:
proj = torch.zeros((length, length))
if self.args.cuda:
proj = proj.cuda()
proj_query = proj_query.permute(0, 2, 1)
for i in range(int(C // 8)):
gauss = proj_query[0, i, :] - proj_key[0, i, :].view(-1, 1)
proj += torch.exp(-F.relu(self.gamma_att) * torch.abs(torch.pow(gauss, 2)))
energy = proj.view((1, length, length))
elif self.args.att_inv_q_abnormal:
proj = torch.zeros((length, length)).cuda()
proj_query = proj_query.permute(0, 2, 1)
for i in range(int(C // 8)):
gauss = proj_query[0, i, :] - proj_key[0, i, :].view(-1, 1)
proj += torch.exp(F.relu(1 / (torch.pow(gauss, 2) + torch.tensor(1).cuda())))
energy = proj.view((1, length, length))
else:
energy = torch.bmm(proj_query, proj_key) # transpose check
if self.args.loc_info:
if self.args.loc_gauss:
loc_energy_x = torch.exp(
-F.relu(self.gamma_in) * torch.abs(torch.pow(loc[:, 0] - loc[:, 0].view(-1, 1), 2)))
loc_energy_y = torch.exp(
-F.relu(self.gamma_in) * torch.abs(torch.pow(loc[:, 1] - loc[:, 1].view(-1, 1), 2)))
energy_pos = self.alfa * (loc_energy_x + loc_energy_y)
energy = energy + energy_pos
elif self.args.loc_inv_q:
loc_energy_x = torch.exp(
1 / (torch.abs(torch.pow(loc[:, 0] - loc[:, 0].view(-1, 1), 2) + torch.tensor(1).cuda())))
loc_energy_y = torch.exp(
1 / (torch.abs(torch.pow(loc[:, 1] - loc[:, 1].view(-1, 1), 2) + torch.tensor(1).cuda())))
energy_pos = self.alfa * loc_energy_x + loc_energy_y
energy = energy + energy_pos
elif self.args.loc_att:
loc_proj = torch.zeros((length, length))
if self.args.cuda:
loc_proj = loc_proj.cuda()
# proj_query = proj_query.permute(0, 2, 1)
rel_loc_x = loc[:, 0] - loc[:, 0].view(-1, 1)
rel_loc_y = loc[:, 1] - loc[:, 1].view(-1, 1)
for i in range(length):
rel_loc_at = torch.sum(proj_query[0] * rel_loc_x[:, i].view(-1) * rel_loc_y[i, :].view(-1), dim=0)
loc_proj[:, i] = rel_loc_at
energy += loc_proj.view((1, length, length))
attention = self.softmax(energy) # BX (N) X (N)
proj_value = self.value_conv(x).view(bs, -1, length) # B X C X N
out = torch.bmm(proj_value, attention.permute(0, 2, 1))
out = out.view(bs, C, length)
out = self.gamma * out + x
return out[0].permute(1, 0), attention, self.gamma, self.gamma_att
# return out[0], attention, self.gamma, self.gamma_att
| [
"[email protected]"
] | |
507318a00b41ce38db963c43532b962a36ca4c43 | f3bd271bf00325881fb5b2533b9ef7f7448a75ec | /classes/_print32.py | fed133646d96b60d6083b2f83a8360c33eb35250 | [] | no_license | obaica/xcp2k | 7f99fc9d494859e16b9b0ea8e217b0493f4b2f59 | 6e15c2c95658f545102595dc1783f5e03a9e6916 | refs/heads/master | 2020-07-15T17:27:43.378835 | 2019-02-11T16:32:24 | 2019-02-11T16:32:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 659 | py | from xcp2k.inputsection import InputSection
from _program_run_info23 import _program_run_info23
from _restart10 import _restart10
from _restart_history4 import _restart_history4
from _current1 import _current1
class _print32(InputSection):
def __init__(self):
InputSection.__init__(self)
self.PROGRAM_RUN_INFO = _program_run_info23()
self.RESTART = _restart10()
self.RESTART_HISTORY = _restart_history4()
self.CURRENT = _current1()
self._name = "PRINT"
self._subsections = {'CURRENT': 'CURRENT', 'RESTART_HISTORY': 'RESTART_HISTORY', 'PROGRAM_RUN_INFO': 'PROGRAM_RUN_INFO', 'RESTART': 'RESTART'}
| [
"[email protected]"
] | |
31fa6cf28dee74da3917221dcc286b6239f35fdc | d5ba475a6a782b0eed5d134b66eb8c601c41421c | /terrascript/data/template.py | a964634d94047ba5352fbbb1a6371b1e8858546a | [
"BSD-2-Clause",
"Python-2.0"
] | permissive | amlodzianowski/python-terrascript | ab42a06a5167e53ad8093b656a9bf14a03cb031d | 142b1a4d1164d1012ac8865d12fdcc72f1e7ae75 | refs/heads/master | 2021-05-19T11:59:47.584554 | 2020-03-26T07:13:47 | 2020-03-26T07:13:47 | 251,688,045 | 0 | 0 | BSD-2-Clause | 2020-03-31T18:00:22 | 2020-03-31T18:00:22 | null | UTF-8 | Python | false | false | 233 | py | # terrascript/data/template.py
import terrascript
class template_file(terrascript.Data):
pass
class template_cloudinit_config(terrascript.Data):
pass
__all__ = [
"template_file",
"template_cloudinit_config",
]
| [
"[email protected]"
] | |
3e14d69378a30d8887db254aeede0f54138ce747 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/matrix/4d38ab06972046a988250a3005464d09.py | 03b161fe26511da6e0ce058e59c662bf8f099254 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 488 | py | class Matrix(object):
def __init__(self, init):
split_at_newline = lambda m: map(lambda s: s.split(), m.split('\n'))
convert_to_int = lambda m: map(lambda s: int(s), m)
column_range = lambda m: range(len(m))
column_member = lambda x, m: map(lambda s: s[x], m)
self.rows = [convert_to_int(row) for row in split_at_newline(init)]
self.columns = [column_member(col, self.rows) for col in column_range(self.rows[0])]
| [
"[email protected]"
] | |
91357c211e5073d5b50569facfbbda0b406a9886 | 167c6226bc77c5daaedab007dfdad4377f588ef4 | /python/ql/test/library-tests/variables/scopes/test.py | 940576d44dfe9eff4e4399fd52b40809619cecb7 | [
"MIT",
"LicenseRef-scancode-python-cwi",
"LicenseRef-scancode-other-copyleft",
"GPL-1.0-or-later",
"LicenseRef-scancode-free-unknown",
"Python-2.0"
] | permissive | github/codeql | 1eebb449a34f774db9e881b52cb8f7a1b1a53612 | d109637e2d7ab3b819812eb960c05cb31d9d2168 | refs/heads/main | 2023-08-20T11:32:39.162059 | 2023-08-18T14:33:32 | 2023-08-18T14:33:32 | 143,040,428 | 5,987 | 1,363 | MIT | 2023-09-14T19:36:50 | 2018-07-31T16:35:51 | CodeQL | UTF-8 | Python | false | false | 987 | py |
global0 = 0
global1 = 1
def func0(param0, param1):
return param0 + param1
def func1():
global global0, global_local
local0 = 0
local1 = 1
global_local
global0 = local0 + local1 + global1
def func2():
local2 = 2
def inner1(param2):
local3 = local2
return local3
return inner1
def func3(param4, param5):
local4 = 4
def inner_outer():
def inner2(param3):
return local5 + local4 + param3 + param4
local5 = 3
return inner2(local4 + param4 + param5)
class C(base):
class_local = 7
def meth(self):
mlocal = self
return mlocal
def func4(param6):
class Local:
def meth_inner(self):
return param6
return Local()
def func5(seq):
return [x for x in seq]
def func6(y, z):
return [y+z for y in seq]
#FP observed in sembuild
def use_in_loop(seq):
[v for v in range(3)]
for v in seq:
v #x redefined -- fine in 2 and 3.
| [
"[email protected]"
] | |
e5afe11339814efc010060f141562f2c0f6a8e6c | 7d667b70c8ae1c8f214b85d613d3a98462af9d0c | /froide/account/forms.py | db1cd0962ee519c64b00d35ca627351437a19328 | [
"MIT"
] | permissive | handlingar/froide | c57653a87a05fb402c1fe61f0df1ff480391f911 | 5ed80cf6550fb4cbc757029b2c860b53e784eb93 | refs/heads/master | 2021-05-28T18:13:17.573095 | 2015-06-18T13:00:16 | 2015-06-18T13:00:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,675 | py | import floppyforms as forms
from django.utils.six import text_type as str
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
from django.core.urlresolvers import reverse
from django.contrib import auth
from django.contrib.auth import get_user_model
from django.conf import settings
from froide.helper.widgets import AgreeCheckboxInput
from .widgets import ConfirmationWidget
from .models import AccountManager
USER_CAN_HIDE_WEB = settings.FROIDE_CONFIG.get("user_can_hide_web", True)
HAVE_ORGANIZATION = settings.FROIDE_CONFIG.get("user_has_organization", True)
ALLOW_PSEUDONYM = settings.FROIDE_CONFIG.get("allow_pseudonym", False)
HAVE_NEWSLETTER = settings.FROIDE_CONFIG.get("have_newsletter", False)
class NewUserBaseForm(forms.Form):
first_name = forms.CharField(max_length=30,
label=_('First name'),
widget=forms.TextInput(attrs={'placeholder': _('First Name'),
'class': 'form-control'}))
last_name = forms.CharField(max_length=30,
label=_('Last name'),
widget=forms.TextInput(attrs={'placeholder': _('Last Name'),
'class': 'form-control'}))
address = forms.CharField(max_length=300,
required=False,
label=_('Mailing Address'),
help_text=_('Optional. Your address will not be displayed publicly and is only needed in case a public body needs to send you paper.'),
widget=forms.Textarea(attrs={
'rows': '3',
'class': 'form-control',
'placeholder': _('Street, Post Code, City'),
}))
user_email = forms.EmailField(label=_('Email address'),
max_length=75,
help_text=_('Not public. The given address will '
'need to be confirmed.'),
widget=forms.EmailInput(attrs={
'placeholder': _('[email protected]'),
'class': 'form-control'
}))
if HAVE_ORGANIZATION:
organization = forms.CharField(required=False,
label=_("Organization"),
help_text=_('Optional. Affiliation will be shown next to your name'),
widget=forms.TextInput(attrs={
'placeholder': _('Organization'),
'class': 'form-control'})
)
if USER_CAN_HIDE_WEB:
private = forms.BooleanField(required=False,
label=_("Hide my name on the web"),
help_text=mark_safe(_("If you check this, your name will still appear in requests to public bodies, but we will do our best to not display it publicly. However, we cannot guarantee your anonymity")))
def __init__(self, *args, **kwargs):
super(NewUserBaseForm, self).__init__(*args, **kwargs)
if ALLOW_PSEUDONYM:
self.fields["last_name"].help_text = mark_safe(
_('<a target="_blank" href="{url}">You may use a pseudonym if you don\'t need to receive postal messages</a>.')
.format(url=reverse("help-privacy") + '#pseudonym'))
def clean_first_name(self):
return self.cleaned_data['first_name'].strip()
def clean_last_name(self):
return self.cleaned_data['last_name'].strip()
def clean_user_email(self):
email = self.cleaned_data['user_email']
user_model = get_user_model()
try:
user = user_model.objects.get(email=email)
except user_model.DoesNotExist:
pass
else:
if user.is_active:
raise forms.ValidationError(mark_safe(
_('This email address already has an account. <a href="%(url)s?simple&email=%(email)s" class="btn btn-warning target-small">Click here to login using that email address.</a>') % {
'url': reverse("account-login"),
'email': email
}))
else:
raise forms.ValidationError(
_('This email address is already registered, but not yet confirmed! Please click on the confirmation link in the mail we send you.'))
return email
class TermsForm(forms.Form):
terms = forms.BooleanField(
label=mark_safe(_("Terms and Conditions and Privacy Statement")),
error_messages={'required':
_('You need to accept our Terms and Conditions and Priavcy Statement.')},
widget=AgreeCheckboxInput(
agree_to=_(u'You agree to our <a href="%(url_terms)s" class="target-new">Terms and Conditions</a> and <a href="%(url_privacy)s" class="target-new">Privacy Statement</a>'),
url_names={"url_terms": "help-terms", "url_privacy": "help-privacy"}))
if HAVE_NEWSLETTER:
newsletter = forms.BooleanField(required=False,
label=_("Check if you want to receive our newsletter."))
def save(self, user):
user.terms = True
if HAVE_NEWSLETTER:
user.newsletter = self.cleaned_data['newsletter']
user.save()
class NewUserForm(NewUserBaseForm, TermsForm):
pass
class NewUserWithPasswordForm(NewUserForm):
password = forms.CharField(widget=forms.PasswordInput,
label=_('Password'))
password2 = forms.CharField(widget=forms.PasswordInput,
label=_('Password (repeat)'))
def clean(self):
cleaned = super(NewUserWithPasswordForm, self).clean()
if cleaned['password'] != cleaned['password2']:
raise forms.ValidationError(_("Passwords do not match!"))
return cleaned
class UserLoginForm(forms.Form):
email = forms.EmailField(widget=forms.EmailInput(
attrs={
'placeholder': _('[email protected]'),
'class': 'form-control'
}),
label=_('Email address'))
password = forms.CharField(widget=forms.PasswordInput(
attrs={
'class': 'form-control'
}),
label=_('Password'))
class PasswordResetForm(auth.forms.PasswordResetForm):
email = forms.EmailField(widget=forms.EmailInput(
attrs={
'placeholder': _('[email protected]'),
'class': 'form-control'
}),
label=_('Email address'))
class UserChangeAddressForm(forms.Form):
address = forms.CharField(max_length=300,
label=_('Mailing Address'),
help_text=_('Your address will never be displayed publicly.'),
widget=forms.Textarea(attrs={'placeholder': _('Street, Post Code, City'),
'class': 'inline smalltext'}))
def __init__(self, profile, *args, **kwargs):
super(UserChangeAddressForm, self).__init__(*args, **kwargs)
self.profile = profile
self.fields['address'].initial = self.profile.address
def save(self):
self.profile.address = self.cleaned_data['address']
self.profile.save()
class UserChangeEmailForm(forms.Form):
email = forms.EmailField(widget=forms.EmailInput(
attrs={'placeholder': _('[email protected]')}),
label=_('New email address'))
def clean_email(self):
email = self.cleaned_data['email'].lower()
if get_user_model().objects.filter(email=email).exists():
raise forms.ValidationError(
_('A user with that email address already exists!')
)
return email
class UserEmailConfirmationForm(forms.Form):
email = forms.EmailField()
secret = forms.CharField(min_length=32, max_length=32)
user_id = forms.IntegerField()
def __init__(self, user, *args, **kwargs):
self.user = user
super(UserEmailConfirmationForm, self).__init__(*args, **kwargs)
def clean_user_id(self):
user_id = self.cleaned_data['user_id']
if user_id != self.user.pk:
raise forms.ValidationError(
_('Logged in user does not match this link!')
)
return user_id
def clean(self):
check = AccountManager(self.user).check_confirmation_secret(
self.cleaned_data['secret'],
self.cleaned_data['email'],
)
if not check:
raise forms.ValidationError(
_('Link is invalid or has expired!')
)
return self.cleaned_data
def save(self):
self.user.email = self.cleaned_data['email']
self.user.save()
class UserDeleteForm(forms.Form):
CONFIRMATION_PHRASE = str(_('Freedom of Information Act'))
password = forms.CharField(
widget=forms.PasswordInput,
label=_('Password'),
help_text=_('Please type your password to confirm.')
)
confirmation = forms.CharField(
widget=ConfirmationWidget(
{'placeholder': CONFIRMATION_PHRASE}
),
label=_('Confirmation Phrase'),
help_text=_('Type the phrase above exactly as displayed.'))
def __init__(self, user, *args, **kwargs):
self.user = user
super(UserDeleteForm, self).__init__(*args, **kwargs)
def clean_password(self):
password = self.cleaned_data['password']
user = auth.authenticate(
username=self.user.email,
password=password
)
if not user:
raise forms.ValidationError(
_('You provided the wrong password!')
)
return ''
def clean_confirmation(self):
confirmation = self.cleaned_data['confirmation']
if confirmation != self.CONFIRMATION_PHRASE:
raise forms.ValidationError(
_('You did not type the confirmation phrase exactly right!')
)
return ''
| [
"[email protected]"
] | |
35b5605675d38e47f6e9113f00cec7ad47b2cd14 | 39d26bedd4049d58265fcd6c480cc7a5b73c7ece | /Tutorial_SimpleTeacherAPI/python-sample-code/tests/conftest.py | 5698513372a065217fe1f856dfe1735a2f254317 | [] | no_license | sramirezh/Developing | 7adc6dbb5c8436db6a3ab125018186ea7bdd1b40 | a07ed07899911b9860830f9498c08144c4eca3d4 | refs/heads/master | 2022-11-08T01:43:05.755215 | 2021-08-23T03:57:39 | 2021-08-23T03:57:39 | 249,786,342 | 0 | 1 | null | 2022-10-24T21:02:22 | 2020-03-24T18:26:46 | Python | UTF-8 | Python | false | false | 281 | py | import pytest
@pytest.fixture
def basic_tree():
return [
(23, "23"),
(4, "4"),
(30, "30"),
(11, "11"),
(7, "7"),
(34, "34"),
(20, "20"),
(24, "24"),
(22, "22"),
(15, "15"),
(1, "1")
]
| [
"[email protected]"
] | |
5ada850496b766d56da6dc90b7d634e1aa9f19c4 | 1cf3a339c0f94bce94cf142fde9a9f6ab38369a8 | /yt_arch/core/api_client.py | 226b998e4c8765b26d726b26d53496c6d0694b0e | [
"MIT"
] | permissive | hilbertqqc/youtube-playlist-archiver | 959f9afc541c293ff05b37b99833f640d39f4c2a | 69727075e0151d03259c373647278312b11f0299 | refs/heads/master | 2023-03-24T03:34:36.507215 | 2021-03-17T20:57:48 | 2021-03-17T20:57:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 235 | py | import httpapiclient
from httpapiclient.mixins import JsonResponseMixin, HelperMethodsMixin
class ApiClient(JsonResponseMixin, HelperMethodsMixin, httpapiclient.BaseApiClient):
base_url = 'https://www.googleapis.com/youtube/v3/'
| [
"[email protected]"
] | |
243ef68fe11d18e22369979cd2bf46125b0e0df8 | c97fc7658c39feb51c0ed42c04783797c8675b8a | /2018/pcy1/day12_mysql/orm8_fk3_update.py | 8e4124a903201d0e359e84c71a75f1bf66cd9c77 | [] | no_license | githubvit/study | 8bff13b18bea4954e8ed1b4619a091b134b8ff97 | 845e19d1225f1aa51c828b15effac30be42fdc1b | refs/heads/master | 2023-02-20T15:59:19.635611 | 2021-12-15T08:30:54 | 2021-12-15T08:30:54 | 241,928,274 | 1 | 1 | null | 2023-02-02T06:18:48 | 2020-02-20T16:08:06 | Python | UTF-8 | Python | false | false | 2,731 | py | #_*_coding:utf-8_*_
'''
8,外键foreign key
8.3修改数据
study_record考勤表在插入时stu_id字段为null,修改
'''
from sqlalchemy import create_engine,ForeignKey,bindparam
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String,DATE,Enum
from sqlalchemy.orm import sessionmaker
# 1,连接数据库
engine = create_engine("mysql+pymysql://root:another333@localhost/test_db",
encoding='utf-8',echo=True)
#mysql+pymysql表示采用pymysql执行原生sql,
#echo=True表示显示创建过程,可以看到生成的原生sql。
# 2,创建表和类的映射
Base = declarative_base() # 生成orm基类
class Student(Base):#建立学生表
__tablename__='student'
id=Column(Integer,primary_key=True)
name=Column(String(32),nullable=False)
register_date=Column(DATE,nullable=False)
class StudyRecody(Base):#建立考勤表
__tablename__='study_record'
id=Column(Integer,primary_key=True)
day=Column(Integer,nullable=False)
status=Column(String(32),nullable=False)
# 3,外键关联,关联student表的id字段
stu_id=Column(Integer,ForeignKey('student.id'))
# 3,创建与数据库的会话session,相当于操作文件的句柄。
Session_class = sessionmaker(bind=engine)# 创建与数据库的会话session class ,注意,这里返回给session的是个class,不是实例
session = Session_class() # 生成session实例
# 4,修改考勤表 update(字典)
# 这是自己参考大量资料,摸索出来的批量更新方法,首先update有where,然后where要绑定参数,这就要在import上引入bindparam。
session.execute(
StudyRecody.__table__.update().where(StudyRecody.id==bindparam('b_id')),
[ {'b_id':1,'day':1,'status':'yes','stu_id':1},
{'b_id':2,'day':1,'status':'yes','stu_id':2},
{'b_id':3,'day':1,'status':'no','stu_id':3},
{'b_id':4,'day':2,'status':'no','stu_id':1},
]
)
'''UPDATE study_record SET day=%(day)s, status=%(status)s WHERE study_record.id = %(b_id)s
这是批量修改的方法,execute可以执行多条数据插入或更新,但是数据的格式必须相同,不可以像:
[{'b_id':1,'day':1,'status':'yes'},
{'b_id':3,'status':'no'}]
这样无法执行,因为第2条需要的原生sql和第一条是不同的'''
'''
上面的执行结果如下:
mysql> select * from study_record;
+----+-----+--------+--------+
| id | day | status | stu_id |
+----+-----+--------+--------+
| 1 | 1 | yes | 1 |
| 2 | 1 | yes | 2 |
| 3 | 1 | no | 3 |
| 4 | 2 | no | 1 |
+----+-----+--------+--------+
4 rows in set (0.00 sec)
mysql>
'''
session.commit() | [
"[email protected]"
] | |
e0a8e099387d137239284405b8a10b388eca81c7 | e82b761f53d6a3ae023ee65a219eea38e66946a0 | /All_In_One/addons/uv_align_distribute/pack_islands.py | a9fd3e306ce026b741f500faf275f51c1a81050d | [] | no_license | 2434325680/Learnbgame | f3a050c28df588cbb3b14e1067a58221252e2e40 | 7b796d30dfd22b7706a93e4419ed913d18d29a44 | refs/heads/master | 2023-08-22T23:59:55.711050 | 2021-10-17T07:26:07 | 2021-10-17T07:26:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,153 | py | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2
# of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
"""
PackIslands Module(attention still wp).
contain the operator used by blender to perform Island Packing
"""
from collections import defaultdict
import bpy.ops
from bpy.props import FloatProperty, BoolProperty, IntProperty
import mathutils
from . import make_islands, templates, utils, operator_manager, global_def
class _Rect:
"""Class rappresenting a rectangle."""
def __init__(self, x, y, width, height):
"""Initialize the class with origin(x, y), width and height."""
self.x = x
self.y = y
self.width = width
self.height = height
def __repr__(self):
"""String representation of Rect."""
return "Rect: x: {0}, y: {1}, width: {2}, height: {3}"\
.format(self.x, self.y, self.width, self.height)
def fit(self, other):
"""Test if other can be contained."""
if other.width <= self.width and other.height <= self.height:
return True
else:
return False
class _Node:
def __init__(self, rect):
self.used = False
self.left = None
self.right = None
self.rect = rect
def __repr__(self):
return "Node {0}: \n\tUsed: {1}, rect: {2}"\
.format(hex(id(self)), self.used, self.rect)
class _BinTree:
def __init__(self, rect):
self._root = _Node(rect)
def insert(self, rect):
width = rect.width
height = rect.height
node = self.__findNode(self._root, width, height)
if node:
node = self.__splitNode(node, width, height)
return node
else:
return self.__growNode(width, height)
def __findNode(self, node, width, height):
if node.used:
return self.__findNode(node.left, width, height) or \
self.__findNode(node.right, width, height)
elif round(width, 5) <= round(node.rect.width, 5) and \
round(height, 5) <= round(node.rect.height, 5):
return node
else:
return None
def __splitNode(self, node, width, height):
node.used = True
lRect = _Rect(node.rect.x, node.rect.y + height,
width, node.rect.height - height)
print("Left: ", lRect)
node.left = _Node(lRect)
rRect = _Rect(node.rect.x + width, node.rect.y,
node.rect.width - width, node.rect.height)
print("Right: ", rRect)
node.right = _Node(rRect)
return node
def __growNode(self, width, height):
canGrowLeft = (width <= self._root.rect.width)
canGrowRight = (width <= self._root.rect.height)
shouldGrowRight = canGrowRight and \
(self._root.rect.height >= (self._root.rect.width + width))
shouldGrowLeft = canGrowLeft and \
(self._root.rect.width >= (self._root.rect.height + height))
if shouldGrowRight:
return self.__growRight(width, height)
elif shouldGrowLeft:
return self.__growLeft(width, height)
elif canGrowRight:
return self.__growRight(width, height)
elif canGrowLeft:
return self.__growLeft(width, height)
else:
return None
def __growRight(self, width, height):
print("growing right")
self._root.used = True
self._root.rect.width += width
# self._root.left = self._root
self._root.right = _Node(_Rect(self._root.rect.width - width, 0,
width, self._root.rect.height))
node = self.__findNode(self._root, width, height)
if node:
return self.__splitNode(node, width, height)
else:
return None
def __growLeft(self, width, height):
print("growing Left")
self._root.used = True
self._root.rect.height += height
# self._root.right = None
self._root.left = _Node(_Rect(0, self._root.rect.height - height,
self._root.rect.width, height))
node = self.__findNode(self._root, width, height)
if node:
return self.__splitNode(node, width, height)
else:
return None
class PackIslands_not_working(templates.UvOperatorTemplate):
"""Pack UV Islands in the uv space."""
bl_idname = "uv.pack_pile_islands"
bl_label = "Pack Pile Islands"
bl_options = {'REGISTER', 'UNDO'}
selectedOnly = BoolProperty(
name="Selection Only",
description="Pack only selected islands",
default=False
)
islandMargin = FloatProperty(
name="Margin",
description="Margin between islands",
default=0,
min=0,
max=1,
soft_min=0,
soft_max=1,
step=1,
precision=4)
pile = BoolProperty(
name="Pile",
description="Pile similar island to save uv space",
default=False
)
numOfPiles = IntProperty(
name="Number of piles",
description="number of piles to create",
default=1,
min=1,
max=2**31-1,
soft_min=1,
soft_max=10,
step=1
)
def execute(self, context):
"""Execute the script."""
def getMax(island):
bbox = island.BBox()
width = bbox.right() - bbox.left()
height = bbox.top() - bbox.bottom()
val = max(width, height)
return val
makeIslands = make_islands.MakeIslands()
islands = makeIslands.getIslands()
selectedIslands = makeIslands.selectedIslands()
activeIsland = makeIslands.activeIsland()
hiddenIslands = makeIslands.hiddenIslands()
# choose which island should be used
usableIslands = islands
if self.selectedOnly:
usableIslands = selectedIslands
# sort island with maxside:
usableIslands.sort(key=lambda island: getMax(island), reverse=True)
# bin pack the island
islandBBox = usableIslands[0].BBox()
width = islandBBox.right() - islandBBox.left()
height = islandBBox.top() - islandBBox.bottom()
rect = _Rect(0, 0, width, height)
btree = _BinTree(rect)
for island in usableIslands:
islandBBox = island.BBox()
width = islandBBox.right() - islandBBox.left()
height = islandBBox.top() - islandBBox.bottom()
rect = _Rect(0, 0, width, height)
node = btree.insert(rect)
if node:
vector = mathutils.Vector((node.rect.x, node.rect.y)) - island.BBox().bottomLeft()
island.move(vector)
# scale the islands to fit uv space
# get the whole BBox:
bbox = utils.GBBox(usableIslands)
width = bbox.right() - bbox.left()
height = bbox.top() - bbox.bottom()
scale = 1 / max(width, height)
for island in usableIslands:
for face_id in island:
face = global_def.bm.faces[face_id]
for loop in face.loops:
x = loop[global_def.bm.loops.layers.uv.active].uv.x
y = loop[global_def.bm.loops.layers.uv.active].uv.y
loop[global_def.bm.loops.layers.uv.active].uv.x = x * scale
loop[global_def.bm.loops.layers.uv.active].uv.y = y * scale
utils.update()
return{'FINISHED'}
def draw(self, context):
"""Draw the operator props."""
layout = self.layout
layout.prop(self, "selectedOnly")
layout.prop(self, "islandMargin")
layout.prop(self, "pile")
if self.pile:
layout.prop(self, "numOfPiles")
class PackIslands(templates.UvOperatorTemplate):
"""Pack UV Islands in the uv space."""
bl_idname = "uv.pack_pile_islands"
bl_label = "Pack Pile Islands"
bl_options = {'REGISTER', 'UNDO'}
selectedOnly = BoolProperty(
name="Selection Only",
description="Pack only selected islands",
default=False
)
rotate = BoolProperty(
name="Rotate",
description="Rotate island",
default=False
)
islandMargin = FloatProperty(
name="Margin",
description="Margin between islands",
default=0,
min=0,
max=1,
soft_min=0,
soft_max=1,
step=1,
precision=4)
pile = BoolProperty(
name="Pile",
description="Pile similar island to save uv space",
default=False
)
numOfPiles = IntProperty(
name="Number of piles",
description="Number of piles to create for each similar islands",
default=1,
min=1,
max=2**31-1,
soft_min=1,
soft_max=10,
step=1
)
def execute(self, context):
"""Execute the script."""
def getMax(island):
bbox = island.BBox()
width = bbox.right() - bbox.left()
height = bbox.top() - bbox.bottom()
val = max(width, height)
return val
def makePiles(self, data):
newDict = defaultdict(list)
for islandIndex in data:
mList = data[islandIndex].copy()
mList.insert(0, islandIndex)
numOfIsoIsland = len(mList)
chunk = numOfIsoIsland // self.numOfPiles
remainder = numOfIsoIsland % self.numOfPiles
pad = 0
for i in range(0, numOfIsoIsland):
bit = 0
if remainder:
bit = 1
for j in range(1, chunk + bit):
if len(mList) > pad + j:
newDict[mList[pad]].append(mList[pad+j])
pad += chunk+bit
if remainder:
remainder -= 1
return newDict
makeIslands = make_islands.MakeIslands()
islands = makeIslands.getIslands()
selectedIslands = makeIslands.selectedIslands()
activeIsland = makeIslands.activeIsland()
hiddenIslands = makeIslands.hiddenIslands()
# search for isomorphic island
isoIslandVisited = []
isoIsland = defaultdict(list)
if self.pile:
for island in selectedIslands:
for other in selectedIslands:
if island in isoIslandVisited or island == other:
continue
isoVerts = island.isIsomorphic(other)
if isoVerts:
isoIsland[selectedIslands.index(island)].append(selectedIslands.index(other))
isoIslandVisited.append(other)
isoIsland = makePiles(self, isoIsland)
# remove isomorphic island from selection
for island in isoIsland.values():
for other in island:
for face_id in selectedIslands[other]:
face = global_def.bm.faces[face_id]
face.select = False
print(isoIsland)
utils.update()
bpy.ops.uv.pack_islands(rotate=self.rotate, margin=self.islandMargin)
if self.pile and len(islands) != 0:
# map each uv vert to corresponding vert for selectedIslands
uv_to_vert = dict((i, list()) for i in range(len(global_def.bm.verts)))
perIslandVerts = dict((i, set()) for i in range(len(selectedIslands)))
# activeIslandUVData = dict((i, list()) for i in range(numOfVertex))
for island in selectedIslands:
for face_id in island:
face = global_def.bm.faces[face_id]
for loop in face.loops:
index = loop.vert.index
uv_to_vert[index].append(loop[global_def.uvlayer])
perIslandVerts[selectedIslands.index(island)].add(index)
for islandIndex in isoIsland:
for isoIndex in isoIsland[islandIndex]:
islandVerts = perIslandVerts[islandIndex]
isoVerts = perIslandVerts[isoIndex]
vertmap = selectedIslands[islandIndex].isIsomorphic(selectedIslands[isoIndex])
for v in islandVerts:
mappedVert = vertmap[v]
for uv_loop in uv_to_vert[v]:
for iso_uv_loop in uv_to_vert[mappedVert]:
iso_uv_loop.uv = uv_loop.uv
# reselct faces
for island in isoIsland.values():
for other in island:
for face_id in selectedIslands[other]:
face = global_def.bm.faces[face_id]
face.select = True
utils.update()
return{'FINISHED'}
def draw(self, context):
"""Draw the operator props."""
layout = self.layout
layout.prop(self, "selectedOnly")
layout.prop(self, "rotate")
layout.prop(self, "islandMargin")
layout.prop(self, "pile")
if self.pile:
layout.prop(self, "numOfPiles")
#################################
# REGISTRATION
#################################
_om = operator_manager.om
_om.addOperator(PackIslands)
| [
"[email protected]"
] | |
f940bcf1ea682999bed19fc60ca0f4af0c8a6610 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5695413893988352_1/Python/sempav/b.py | 8ad8a3ac5ec44bbc0fd72c27fcc7cc57a5f07a7f | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 3,416 | py | POS = 1
NEG = 2
BOTH = 3
NONE = 0
ans_c = ''
ans_j = ''
ans_diff = 10**20
def extract_num(score_str):
pow10 = 1
res = 0
for ch in reversed(score_str):
if ch != '?':
res += (ord(ch) - ord('0')) * pow10
pow10 *= 10
return res
def check(diff, ans, positions, score_c, score_j):
global ans_c
global ans_j
global ans_diff
if abs(diff) > abs(ans_diff):
return
c_str = ''
j_str = ''
for d, p, c, j in zip(ans, positions, score_c, score_j):
if p == NONE:
c_str += c
j_str += j
elif p == BOTH:
if d >= 0:
c_str += str(d)
j_str += '0'
else:
c_str += '0'
j_str += str(-d)
elif p == POS:
c_str += str(d)
j_str += j
else: # p == NEG:
c_str += c
j_str += str(-d)
if abs(diff) < abs(ans_diff):
ans_diff = diff
ans_c = c_str
ans_j = j_str
elif abs(diff) == abs(ans_diff):
c_int = int(c_str)
j_int = int(j_str)
ans_c_int = int(ans_c)
ans_j_int = int(ans_j)
if c_int < ans_c_int:
ans_c = c_str
ans_j = j_str
elif c_int == ans_c_int and j_int < ans_j_int:
ans_c = c_str
ans_j = j_str
def solve(i, ans, diff, positions, score_c, score_j):
if i == len(positions):
check(diff, ans, positions, score_c, score_j)
return
pow10 = 10 ** (len(positions) - i - 1)
if positions[i] == NONE:
ans[i] = 0
solve(i + 1, ans, diff, positions, score_c, score_j)
return
if positions[i] == POS:
cur_range = range(0, 10)
elif positions[i] == NEG:
cur_range = range(-9, 1)
elif positions[i] == BOTH:
cur_range = range(-9, 10)
#print(positions[i], diff, list(cur_range))
just_above = cur_range[-1]
for digit in cur_range:
if diff - pow10 * digit == 0:
just_above = digit
break
if diff - pow10 * digit < 0:
just_above = digit - 1
break
if just_above not in cur_range:
just_above = cur_range[0]
just_below = cur_range[0]
for digit in reversed(cur_range):
if diff - pow10 * digit == 0:
just_below = digit
break
if diff - pow10 * digit > 0:
just_below = digit + 1
break
if just_below not in cur_range:
just_below = cur_range[-1]
ans[i] = just_below
solve(i + 1, ans, diff - pow10 * just_below, positions, score_c, score_j)
ans[i] = just_above
solve(i + 1, ans, diff - pow10 * just_above, positions, score_c, score_j)
t = int(input())
for testCase in range(1, t + 1):
score_c, score_j = input().split()
ans_c = ''
ans_j = ''
ans_diff = 10**20
a = extract_num(score_c)
b = extract_num(score_j)
positions = []
for ch_a, ch_b in zip(score_c, score_j):
if ch_a == '?' and ch_b == '?':
positions.append(BOTH)
elif ch_a == '?':
positions.append(POS)
elif ch_b == '?':
positions.append(NEG)
else:
positions.append(NONE)
ans = [0 for tmp in score_c]
solve(0, ans, b - a, tuple(positions), score_c, score_j)
print('Case #{}: {} {}'.format(testCase, ans_c, ans_j))
| [
"[email protected]"
] | |
b9d4b40eb8ee711ce836773b1102fa904d5d7281 | fd3460952febec5d4d4cbe16a91fd3f06577d9a6 | /app/views.py | da055749ba8cca16713ebc16eced687cece05cf4 | [] | no_license | sakkhar/accounts | 9ea52783bc720b0422fef00e80b83947f4d9c566 | 7da8357d548cca20426b39c4f39ff0f5608a906f | refs/heads/master | 2020-03-27T20:18:05.622381 | 2018-09-02T05:21:39 | 2018-09-02T05:21:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,472 | py |
from django.contrib import messages
from django.contrib.auth import login, authenticate, REDIRECT_FIELD_NAME
from django.contrib.auth.tokens import default_token_generator
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.views import (
LogoutView as BaseLogoutView, PasswordChangeView as BasePasswordChangeView,
PasswordResetDoneView as BasePasswordResetDoneView, PasswordResetConfirmView as BasePasswordResetConfirmView,
)
from django.shortcuts import get_object_or_404, redirect
from django.utils.crypto import get_random_string
from django.utils.decorators import method_decorator
from django.utils.http import is_safe_url
from django.utils.encoding import force_bytes
from django.utils.http import urlsafe_base64_encode
from django.utils.translation import gettext_lazy as _
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
from django.views.generic import View, FormView
from django.conf import settings
from .utils import (
send_activation_email, send_reset_password_email, send_forgotten_username_email, send_activation_change_email,
)
from .forms import (
SignInViaUsernameForm, SignInViaEmailForm, SignInViaEmailOrUsernameForm, SignUpForm,
RestorePasswordForm, RestorePasswordViaEmailOrUsernameForm, RemindUsernameForm,
ResendActivationCodeForm, ResendActivationCodeViaEmailForm, ChangeProfileForm, ChangeEmailForm,
)
from .models import Activation
class GuestOnlyView(View):
def dispatch(self, request, *args, **kwargs):
# Redirect to the index page if the user already authenticated
if request.user.is_authenticated:
return redirect(settings.LOGIN_REDIRECT_URL)
return super().dispatch(request, *args, **kwargs)
class LogInView(GuestOnlyView, FormView):
template_name = 'accounts/log_in.html'
@staticmethod
def get_form_class(**kwargs):
if settings.DISABLE_USERNAME or settings.LOGIN_VIA_EMAIL:
return SignInViaEmailForm
if settings.LOGIN_VIA_EMAIL_OR_USERNAME:
return SignInViaEmailOrUsernameForm
return SignInViaUsernameForm
@method_decorator(sensitive_post_parameters('password'))
@method_decorator(csrf_protect)
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
# Sets a test cookie to make sure the user has cookies enabled
request.session.set_test_cookie()
return super().dispatch(request, *args, **kwargs)
def form_valid(self, form):
request = self.request
# If the test cookie worked, go ahead and delete it since its no longer needed
if request.session.test_cookie_worked():
request.session.delete_test_cookie()
# The default Django's "remember me" lifetime is 2 weeks and can be changed by modifying
# the SESSION_COOKIE_AGE settings' option.
if settings.USE_REMEMBER_ME:
if not form.cleaned_data['remember_me']:
request.session.set_expiry(0)
login(request, form.user_cache)
redirect_to = request.POST.get(REDIRECT_FIELD_NAME, request.GET.get(REDIRECT_FIELD_NAME))
url_is_safe = is_safe_url(redirect_to, allowed_hosts=request.get_host(), require_https=request.is_secure())
if url_is_safe:
return redirect(redirect_to)
return redirect(settings.LOGIN_REDIRECT_URL)
class SignUpView(GuestOnlyView, FormView):
template_name = 'accounts/sign_up.html'
form_class = SignUpForm
def form_valid(self, form):
request = self.request
user = form.save(commit=False)
if settings.DISABLE_USERNAME:
# Set a temporary username
user.username = get_random_string()
else:
user.username = form.cleaned_data['username']
if settings.ENABLE_USER_ACTIVATION:
user.is_active = False
# Create a user record
user.save()
# Change the username to the "user_ID" form
if settings.DISABLE_USERNAME:
user.username = f'user_{user.id}'
user.save()
if settings.ENABLE_USER_ACTIVATION:
code = get_random_string(20)
act = Activation()
act.code = code
act.user = user
act.save()
send_activation_email(request, user.email, code)
messages.success(
request, _('You are signed up. To activate the account, follow the link sent to the mail.'))
else:
raw_password = form.cleaned_data['password1']
user = authenticate(username=user.username, password=raw_password)
login(request, user)
messages.success(request, _('You are successfully signed up!'))
return redirect('index')
class ActivateView(View):
@staticmethod
def get(request, code):
act = get_object_or_404(Activation, code=code)
# Activate profile
user = act.user
user.is_active = True
user.save()
# Remove the activation record
act.delete()
messages.success(request, _('You have successfully activated your account!'))
return redirect('accounts:log_in')
class ResendActivationCodeView(GuestOnlyView, FormView):
template_name = 'accounts/resend_activation_code.html'
@staticmethod
def get_form_class(**kwargs):
if settings.DISABLE_USERNAME:
return ResendActivationCodeViaEmailForm
return ResendActivationCodeForm
def form_valid(self, form):
user = form.user_cache
activation = user.activation_set.first()
activation.delete()
code = get_random_string(20)
act = Activation()
act.code = code
act.user = user
act.save()
send_activation_email(self.request, user.email, code)
messages.success(self.request, _('A new activation code has been sent to your email address.'))
return redirect('accounts:resend_activation_code')
class RestorePasswordView(GuestOnlyView, FormView):
template_name = 'accounts/restore_password.html'
@staticmethod
def get_form_class(**kwargs):
if settings.RESTORE_PASSWORD_VIA_EMAIL_OR_USERNAME:
return RestorePasswordViaEmailOrUsernameForm
return RestorePasswordForm
def form_valid(self, form):
user = form.user_cache
token = default_token_generator.make_token(user)
uid = urlsafe_base64_encode(force_bytes(user.pk)).decode()
send_reset_password_email(self.request, user.email, token, uid)
return redirect('accounts:restore_password_done')
class ChangeProfileView(LoginRequiredMixin, FormView):
template_name = 'accounts/profile/change_profile.html'
form_class = ChangeProfileForm
def get_initial(self):
user = self.request.user
initial = super().get_initial()
initial['first_name'] = user.first_name
initial['last_name'] = user.last_name
return initial
def form_valid(self, form):
user = self.request.user
user.first_name = form.cleaned_data['first_name']
user.last_name = form.cleaned_data['last_name']
user.save()
messages.success(self.request, _('Profile data has been successfully updated.'))
return redirect('accounts:change_profile')
class ChangeEmailView(LoginRequiredMixin, FormView):
template_name = 'accounts/profile/change_email.html'
form_class = ChangeEmailForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def get_initial(self):
initial = super().get_initial()
initial['email'] = self.request.user.email
return initial
def form_valid(self, form):
user = self.request.user
email = form.cleaned_data['email']
if settings.ENABLE_ACTIVATION_AFTER_EMAIL_CHANGE:
code = get_random_string(20)
act = Activation()
act.code = code
act.user = user
act.email = email
act.save()
send_activation_change_email(self.request, email, code)
messages.success(self.request, _('To complete the change of email address, click on the link sent to it.'))
else:
user.email = email
user.save()
messages.success(self.request, _('Email successfully changed.'))
return redirect('accounts:change_email')
class ChangeEmailActivateView(View):
@staticmethod
def get(request, code):
act = get_object_or_404(Activation, code=code)
# Change the email
user = act.user
user.email = act.email
user.save()
# Remove the activation record
act.delete()
messages.success(request, _('You have successfully changed your email!'))
return redirect('accounts:change_email')
class RemindUsernameView(GuestOnlyView, FormView):
template_name = 'accounts/remind_username.html'
form_class = RemindUsernameForm
def form_valid(self, form):
user = form.user_cache
send_forgotten_username_email(user.email, user.username)
messages.success(self.request, _('Your username has been successfully sent to your email.'))
return redirect('accounts:remind_username')
class ChangePasswordView(BasePasswordChangeView):
template_name = 'accounts/profile/change_password.html'
def form_valid(self, form):
# Change the password
user = form.save()
# Re-authentication
login(self.request, user)
messages.success(self.request, _('Your password was changed.'))
return redirect('accounts:change_password')
class RestorePasswordConfirmView(BasePasswordResetConfirmView):
template_name = 'accounts/restore_password_confirm.html'
def form_valid(self, form):
# Change the password
form.save()
messages.success(self.request, _('Your password has been set. You may go ahead and log in now.'))
return redirect('accounts:log_in')
class RestorePasswordDoneView(BasePasswordResetDoneView):
template_name = 'accounts/restore_password_done.html'
class LogOutView(LoginRequiredMixin, BaseLogoutView):
template_name = 'accounts/log_out.html' | [
"[email protected]"
] | |
e815bc00ac8a9f39a473d1ae169a929143560be6 | c93f51492cfee3f98040f07d7f4323ec27ac81a5 | /refinery/units/obfuscation/ps1/concat.py | 40bc8a8c7d142f8dcdff27d0265cce17adba6673 | [
"BSD-3-Clause"
] | permissive | prats84/refinery | cbe9ebfeb570c9c0531e13bbf13ec18801f12aca | 5f961051e9cc1857a06108ce4d36a6799ac9d720 | refs/heads/master | 2023-07-13T02:32:04.998285 | 2021-08-20T09:08:01 | 2021-08-20T09:08:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,398 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
from .. import IterativeDeobfuscator
from . import string_unquote, string_quote, Ps1StringLiterals
class deob_ps1_concat(IterativeDeobfuscator):
_SENTINEL = re.compile(R'''['"]\s*[+&]\s*['"]''')
def deobfuscate(self, data):
def concat(data):
strlit = Ps1StringLiterals(data)
repeat = True
while repeat:
for match in self._SENTINEL.finditer(data):
a, b = match.span()
a = strlit.get_container(a)
if a is None:
continue
b = strlit.get_container(b)
if b is None or b != a + 1:
continue
a = strlit.ranges[a]
b = strlit.ranges[b]
stra = data[slice(*a)]
strb = data[slice(*b)]
parts = list(string_unquote(stra))
it = iter(string_unquote(strb))
parts[~0] += next(it)
parts.extend(it)
yield data[:a[0]] + string_quote(parts)
data = data[b[1]:]
strlit.update(data)
break
else:
repeat = False
yield data
return ''.join(concat(data))
| [
"[email protected]"
] | |
6fc3e353a8326a114fc60b18e3229535220c28c9 | 0a118477c8b6d1ef79b26310a1d3fb06716743e9 | /contributer_demo/demo2/coordination/formation_demo/my_leader.py | 6a3f3c47ca5aa8e260d12d37928babc195285821 | [
"MIT"
] | permissive | nsgcjdsz/XTDrone | 773ea65421044a895e427cfc68d9e3669210c12a | ebefd6cf943b95998e1b47de6be9052a146d667d | refs/heads/master | 2023-08-25T18:12:48.338686 | 2021-10-23T12:03:47 | 2021-10-23T12:03:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,406 | py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
import rospy
from geometry_msgs.msg import Twist, Vector3, PoseStamped
from std_msgs.msg import String
from pyquaternion import Quaternion
import time
import math
import numpy
import sys
#if sys.argv[2] == '6': #formation_dict是该文件夹下的文件
# from formation_dict import formation_dict_6 as formation_dict
#elif sys.argv[2] == '9':
# from formation_dict import formation_dict_9 as formation_dict
#elif sys.argv[2] == '18':
# from formation_dict import formation_dict_18 as formation_dict
if sys.argv[2] == '21':
from my_formation_dict import formation_dict_my as formation_dict
elif sys.argv[2] == '34':
from my_formation_dict import formation_dict_my as formation_dict
class Leader:
def __init__(self, uav_type, leader_id, uav_num):
self.hover = True
self.id = leader_id
self.local_pose = PoseStamped()
self.cmd_vel_enu = Twist()
self.follower_num = uav_num - 1
self.followers_info = ["Moving"]*self.follower_num
self.follower_arrived_num = 0
self.follower_all_arrived = True
self.avoid_accel = Vector3(0,0,0)
self.formation_config = 'waiting'
self.target_height_recorded = False
self.cmd = String()
self.f = 200
self.Kz = 0.5
self.local_pose_sub = rospy.Subscriber(uav_type+'_'+str(self.id)+"/mavros/local_position/pose", PoseStamped , self.local_pose_callback)
self.cmd_vel_sub = rospy.Subscriber("/xtdrone/leader/cmd_vel_flu", Twist, self.cmd_vel_callback)
self.avoid_vel_sub = rospy.Subscriber("/xtdrone/"+uav_type+'_'+str(self.id)+"/avoid_accel", Vector3, self.avoid_accel_callback)
self.leader_cmd_sub = rospy.Subscriber("/xtdrone/leader/cmd",String, self.cmd_callback)
for i in range(self.follower_num):#遍历所有跟随者
rospy.Subscriber('/xtdrone/'+uav_type+'_'+str(i+1)+'/info',String,self.followers_info_callback,i)
self.local_pose_pub = rospy.Publisher("/xtdrone/leader/pose", PoseStamped , queue_size=10)
self.formation_switch_pub = rospy.Publisher("/xtdrone/formation_switch",String, queue_size=10)
self.vel_enu_pub = rospy.Publisher('/xtdrone/'+uav_type+'_'+str(self.id)+'/cmd_vel_enu', Twist, queue_size=10)
self.cmd_pub = rospy.Publisher('/xtdrone/'+uav_type+'_'+str(self.id)+'/cmd', String, queue_size=10)
def local_pose_callback(self, msg):
self.local_pose = msg
def cmd_vel_callback(self, msg):
self.cmd_vel_enu = msg
if msg.linear.z == 0:
self.hover = True #悬停
else:
self.hover = False
def cmd_callback(self, msg):
if msg.data in formation_dict.keys():
self.formation_config = msg.data
else:
self.cmd = msg.data
def avoid_accel_callback(self, msg):
self.avoid_accel = msg
def followers_info_callback(self, msg, id):
self.followers_info[id] = msg.data
#print("follower"+str(id)+":"+ msg.data)
def loop(self):
rospy.init_node('leader')
rate = rospy.Rate(self.f)
while True:
#self.cmd_vel_enu = Twist()
for follower_info in self.followers_info:
if follower_info == "Arrived": # 一架到达
self.follower_arrived_num += 1
if self.follower_arrived_num > self.follower_num - 1:
self.follower_all_arrived = True #全部到达
if self.follower_all_arrived:
self.formation_switch_pub.publish(self.formation_config)
if self.formation_config == 'pyramid':
if not self.target_height_recorded:
target_height = self.local_pose.pose.position.z + 2
self.target_height_recorded = True
self.cmd_vel_enu.linear.z = self.Kz * (target_height - self.local_pose.pose.position.z)
self.cmd_vel_enu.linear.x += self.avoid_accel.x
self.cmd_vel_enu.linear.y += self.avoid_accel.y
self.cmd_vel_enu.linear.z += self.avoid_accel.z
self.vel_enu_pub.publish(self.cmd_vel_enu)
self.local_pose_pub.publish(self.local_pose)
self.cmd_pub.publish(self.cmd)
rate.sleep()
if __name__ == '__main__':
leader = Leader(sys.argv[1], 0, int(sys.argv[2]))
leader.loop()
| [
"[email protected]"
] | |
8766db5f17f73ece19a8e050eb0f6c2da93a0634 | 02d8a026d63127f045042e03e23acbe6c9675db8 | /vb2py/test/testcollection.py | 68f0506871cd8ec816e607dfee324b6b6168fe80 | [
"BSD-3-Clause"
] | permissive | VB6Hobbyst7/xl_vb2py | 40e77976b452732575e2726fb1f0675b1ab9f86f | 899fec0301140fd8bd313e8c80b3fa839b3f5ee4 | refs/heads/main | 2023-07-28T20:12:11.933183 | 2021-09-23T18:12:02 | 2021-09-23T18:12:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,618 | py | from vb2py.vbclasses import Collection
import unittest
class TestCollection(unittest.TestCase):
def setUp(self):
"""Set up the test"""
self.c = Collection()
# << Collection tests >> (1 of 9)
def testAddNumeric(self):
"""testAddNumeric: should be able to add with numeric indexes"""
for i in range(10):
self.c.Add(i)
for expect, actual in zip(list(range(10)), self.c):
self.assertEqual(expect, actual)
self.assertEqual(self.c.Count(), 10)
# << Collection tests >> (2 of 9)
def testAddBeforeNumeric(self):
"""testAddBeforeNumeric: should be able to add something before something else"""
# Put 1 ... 9 in with 5 missing
for i in range(1, 10):
if i != 5:
self.c.Add(i)
self.c.Add(5, Before=5) # ie before the index 5
for expect, actual in zip(list(range(1, 10)), self.c):
self.assertEqual(expect, actual)
self.assertEqual(self.c.Count(), 9)
# << Collection tests >> (3 of 9)
def testAddAfterNumeric(self):
"""testAddAfterNumeric: should be able to add something after something else"""
# Put 1 ... 9 in with 5 missing
for i in range(1, 10):
if i != 5:
self.c.Add(i)
self.c.Add(5, After=4)
for expect, actual in zip(list(range(1, 10)), self.c):
self.assertEqual(expect, actual)
self.assertEqual(self.c.Count(), 9)
# << Collection tests >> (4 of 9)
def testAddText(self):
"""testAddText: should be able to add with text indexes"""
for i in range(10):
self.c.Add(i, "txt%d" % i)
for expect, actual in zip(list(range(10)), self.c):
self.assertEqual(expect, actual)
self.assertEqual(self.c.Count(), 10)
# << Collection tests >> (5 of 9)
def testAddTextandNumeric(self):
"""testAddTextandNumeric: should be able to add with text and numeric indexes"""
for i in range(10):
self.c.Add(i, "txt%d" % i)
self.c.Add(i)
for i in range(10):
self.assertEqual(self.c.Item("txt%d" % i), i)
self.assertEqual(self.c.Item(i*2+2), i)
self.assertEqual(self.c.Count(), 20)
# << Collection tests >> (6 of 9)
def testItemNumeric(self):
"""testItemNumeric: should be able to get with numeric indexes"""
for i in range(10):
self.c.Add(i)
for i in range(10):
self.assertEqual(i, self.c.Item(i+1))
# << Collection tests >> (7 of 9)
def testItemText(self):
"""testItemText: should be able to get with text indexes"""
for i in range(10):
self.c.Add(i, "txt%d" % i)
for i in range(10):
self.assertEqual(i, self.c.Item("txt%d" % i))
# << Collection tests >> (8 of 9)
def testRemoveNumeric(self):
"""testRemoveNumeric: should be able to remove with numeric indexes"""
for i in range(10):
self.c.Add(i+1)
self.c.Remove(5)
self.assertEqual(self.c.Count(), 9)
for i in self.c:
self.assertNotEqual(i, 5)
# << Collection tests >> (9 of 9)
def testRemoveText(self):
"""testRemoveText: should be able to remove with text indexes"""
for i in range(10):
self.c.Add(i, "txt%d" % i)
self.c.Remove("txt%d" % 5)
self.assertEqual(self.c.Count(), 9)
for i in self.c:
self.assertNotEqual(i, 5)
# -- end -- << Collection tests >>
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
5d4d2ed476aea05494ec90081e7dd8d67f9f8cb0 | 602ea2edb853c5561a45b6aa2783ac894ef408e4 | /res_mlp_pytorch/res_mlp_pytorch.py | 1ac60d23a7068840b29608111b116cc789825440 | [
"MIT"
] | permissive | BadGuy-wang/res-mlp-pytorch | 427d6f1f2279dcfe59d7cee02befb26a0a4dad79 | 562814a406cc418bdb4710aa3bdc569206ac171b | refs/heads/main | 2023-05-05T13:22:46.575901 | 2021-06-03T22:30:40 | 2021-06-03T22:30:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,989 | py | import torch
from torch import nn, einsum
from einops.layers.torch import Rearrange, Reduce
# helpers
def pair(val):
return (val, val) if not isinstance(val, tuple) else val
# classes
class Affine(nn.Module):
def __init__(self, dim):
super().__init__()
self.g = nn.Parameter(torch.ones(1, 1, dim))
self.b = nn.Parameter(torch.zeros(1, 1, dim))
def forward(self, x):
return x * self.g + self.b
class PreAffinePostLayerScale(nn.Module): # https://arxiv.org/abs/2103.17239
def __init__(self, dim, depth, fn):
super().__init__()
if depth <= 18:
init_eps = 0.1
elif depth > 18 and depth <= 24:
init_eps = 1e-5
else:
init_eps = 1e-6
scale = torch.zeros(1, 1, dim).fill_(init_eps)
self.scale = nn.Parameter(scale)
self.affine = Affine(dim)
self.fn = fn
def forward(self, x):
return self.fn(self.affine(x)) * self.scale + x
def ResMLP(*, image_size, patch_size, dim, depth, num_classes, expansion_factor = 4):
image_height, image_width = pair(image_size)
assert (image_height % patch_size) == 0 and (image_width % patch_size) == 0, 'image height and width must be divisible by patch size'
num_patches = (image_height // patch_size) * (image_width // patch_size)
wrapper = lambda i, fn: PreAffinePostLayerScale(dim, i + 1, fn)
return nn.Sequential(
Rearrange('b c (h p1) (w p2) -> b (h w) (p1 p2 c)', p1 = patch_size, p2 = patch_size),
nn.Linear((patch_size ** 2) * 3, dim),
*[nn.Sequential(
wrapper(i, nn.Conv1d(num_patches, num_patches, 1)),
wrapper(i, nn.Sequential(
nn.Linear(dim, dim * expansion_factor),
nn.GELU(),
nn.Linear(dim * expansion_factor, dim)
))
) for i in range(depth)],
Affine(dim),
Reduce('b n c -> b c', 'mean'),
nn.Linear(dim, num_classes)
)
| [
"[email protected]"
] | |
312b52cb1b4319add74ab61694c18b56da2451a1 | f80ef3a3cf859b13e8af8433af549b6b1043bf6e | /pyobjc-framework-Cocoa/PyObjCTest/test_nscolorsampler.py | 1b666e6faca75d95e31360218b6b1b293f053d99 | [
"MIT"
] | permissive | ronaldoussoren/pyobjc | 29dc9ca0af838a56105a9ddd62fb38ec415f0b86 | 77b98382e52818690449111cd2e23cd469b53cf5 | refs/heads/master | 2023-09-01T05:15:21.814504 | 2023-06-13T20:00:17 | 2023-06-13T20:00:17 | 243,933,900 | 439 | 49 | null | 2023-06-25T02:49:07 | 2020-02-29T08:43:12 | Python | UTF-8 | Python | false | false | 290 | py | import AppKit
from PyObjCTools.TestSupport import TestCase, min_os_level
class TestNSColorSampler(TestCase):
@min_os_level("10.15")
def test_methods_10_15(self):
self.assertArgIsBlock(
AppKit.NSColorSampler.showSamplerWithSelectionHandler_, 0, b"v@"
)
| [
"[email protected]"
] | |
36cc0b54b41fcc7f8fe680953ecdd8685005c0bc | 6a746abb4dd3f2e0538936f272ed5d051a120c5b | /message_ix_models/model/build.py | f92ab09a1bd72b04109c697d15c7faf224e8c6b0 | [
"Apache-2.0"
] | permissive | OFR-IIASA/message-ix-models | d902d26c10db8215a856032d09f4252e16500c99 | 7459065505f8f3a418086aa620b789b5c5f39cde | refs/heads/main | 2023-06-15T00:16:56.654237 | 2021-07-02T09:33:49 | 2021-07-02T09:33:49 | 380,197,167 | 0 | 0 | Apache-2.0 | 2021-06-25T10:01:47 | 2021-06-25T10:01:47 | null | UTF-8 | Python | false | false | 4,795 | py | import logging
from typing import Callable, Dict, Mapping
import pandas as pd
from ixmp.utils import maybe_check_out, maybe_commit
from message_ix import Scenario
from sdmx.model import Code
from message_ix_models.util import add_par_data, strip_par_data
from message_ix_models.util.scenarioinfo import ScenarioInfo
log = logging.getLogger(__name__)
def apply_spec(
scenario: Scenario,
spec: Mapping[str, ScenarioInfo],
data: Callable = None,
**options,
):
"""Apply `spec` to `scenario`.
Parameters
----------
spec
A 'specification': :class:`dict` with 'require', 'remove', and 'add' keys and
:class:`.ScenarioInfo` objects as values.
data : callable, optional
Function to add data to `scenario`. `data` can either manipulate the scenario
directly, or return a :class:`dict` compatible with :func:`.add_par_data`.
Other parameters
----------------
dry_run : bool
Don't modify `scenario`; only show what would be done. Default :obj:`False`.
Exceptions will still be raised if the elements from ``spec['required']`` are
missing; this serves as a check that the scenario has the required features for
applying the spec.
fast : bool
Do not remove existing parameter data; increases speed on large scenarios.
quiet : bool
Only show log messages at level ``ERROR`` and higher. If :obj:`False` (default),
show log messages at level ``DEBUG`` and higher.
message : str
Commit message.
See also
--------
.add_par_data
.strip_par_data
.Code
.ScenarioInfo
"""
dry_run = options.get("dry_run", False)
log.setLevel(logging.ERROR if options.get("quiet", False) else logging.DEBUG)
if not dry_run:
try:
scenario.remove_solution()
except ValueError:
pass
maybe_check_out(scenario)
dump: Dict[str, pd.DataFrame] = {} # Removed data
for set_name in scenario.set_list():
# Check whether this set is mentioned at all in the spec
if 0 == sum(map(lambda info: len(info.set[set_name]), spec.values())):
# Not mentioned; don't do anything
continue
log.info(f"Set {repr(set_name)}")
# Base contents of the set
base_set = scenario.set(set_name)
# Unpack a multi-dimensional/indexed set to a list of tuples
base = (
list(base_set.itertuples(index=False))
if isinstance(base_set, pd.DataFrame)
else base_set.tolist()
)
log.info(f" {len(base)} elements")
# log.debug(', '.join(map(repr, base))) # All elements; verbose
# Check for required elements
require = spec["require"].set[set_name]
log.info(f" Check {len(require)} required elements")
# Raise an exception about the first missing element
missing = list(filter(lambda e: e not in base, require))
if len(missing):
log.error(f" {len(missing)} elements not found: {repr(missing)}")
raise ValueError
# Remove elements and associated parameter values
remove = spec["remove"].set[set_name]
for element in remove:
msg = f"{repr(element)} and associated parameter elements"
if options.get("fast", False):
log.info(f" Skip removing {msg} (fast=True)")
continue
log.info(f" Remove {msg}")
strip_par_data(scenario, set_name, element, dry_run=dry_run, dump=dump)
# Add elements
add = [] if dry_run else spec["add"].set[set_name]
for element in add:
scenario.add_set(
set_name,
element.id if isinstance(element, Code) else element,
)
if len(add):
log.info(f" Add {len(add)} element(s)")
log.debug(" " + ", ".join(map(repr, add)))
log.info(" ---")
N_removed = sum(len(d) for d in dump.values())
log.info(f"{N_removed} parameter elements removed")
# Add units to the Platform before adding data
for unit in spec["add"].set["unit"]:
unit = unit if isinstance(unit, Code) else Code(id=unit, name=unit)
log.info(f"Add unit {repr(unit)}")
scenario.platform.add_unit(unit.id, comment=str(unit.name))
# Add data
if callable(data):
result = data(scenario, dry_run=dry_run)
if result:
# `data` function returned some data; use add_par_data()
add_par_data(scenario, result, dry_run=dry_run)
# Finalize
log.info("Commit results.")
maybe_commit(
scenario,
condition=not dry_run,
message=options.get("message", f"{__name__}.apply_spec()"),
)
| [
"[email protected]"
] | |
de7fe8a3116f89860feb58cc06238a1c9f045460 | 924763dfaa833a898a120c411a5ed3b2d9b2f8c7 | /compiled/construct/enum_int_range_s.py | bc6c06e80d04163057a993b93a7ea82933e7a6d2 | [
"MIT"
] | permissive | kaitai-io/ci_targets | 31257dfdf77044d32a659ab7b8ec7da083f12d25 | 2f06d144c5789ae909225583df32e2ceb41483a3 | refs/heads/master | 2023-08-25T02:27:30.233334 | 2023-08-04T18:54:45 | 2023-08-04T18:54:45 | 87,530,818 | 4 | 6 | MIT | 2023-07-28T22:12:01 | 2017-04-07T09:44:44 | C++ | UTF-8 | Python | false | false | 383 | py | from construct import *
from construct.lib import *
import enum
class enum_int_range_s__constants(enum.IntEnum):
int_min = -2147483648
zero = 0
int_max = 2147483647
enum_int_range_s = Struct(
'f1' / Enum(Int32sb, enum_int_range_s__constants),
'f2' / Enum(Int32sb, enum_int_range_s__constants),
'f3' / Enum(Int32sb, enum_int_range_s__constants),
)
_schema = enum_int_range_s
| [
"[email protected]"
] | |
27af4d42e1a0cdc16826948e7d69e7e6b8a9ef94 | 5b683c7f0cc23b1a2b8927755f5831148f4f7e1c | /Python_Study/DataStructureAndAlgorithm/classical_algorithm/binary_search.py | 556f7aa8a3e48cec1ab4feb7b9ccb23c04cbbe3c | [] | no_license | Shmilyqjj/Shmily-py | 970def5a53a77aa33b93404e18c57130f134772a | 770fc26607ad3e05a4d7774a769bc742582c7b64 | refs/heads/master | 2023-09-02T04:43:39.192052 | 2023-08-31T03:28:39 | 2023-08-31T03:28:39 | 199,372,223 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,408 | py | #!/usr/bin/env python
# encoding: utf-8
"""
:Description: 二分查找算法
:Author: 佳境Shmily
:Create Time: 2020/3/15 21:34
:File: binary_search
:Site: shmily-qjj.top
:Desc:
二分查找场景:寻找一个数、寻找左侧边界、寻找右侧边界。
"""
import logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
# import sys
#
# sys.setrecursionlimit(9000000)
def binary_search(sorted_list, item, asc=True):
"""
非递归的二分查找
寻找一个数 如果存在,返回其索引值
最基本的二分查找
首先,假设表中元素是按升序排列,将表中间位置记录的关键字与查找关键字比较,如果两者相等,则查找成功;
否则利用中间位置记录将表分成前、后两个子表,如果中间位置记录的关键字大于查找关键字,则进一步查找前一子表,否则进一步查找后一子表。
重复以上过程,直到找到满足条件的记录,使查找成功,或直到子表不存在为止,此时查找不成功。
:param asc: 默认认为传入的list是升序的 如果降序 需要反转
:param sorted_list: 有序列表
:param item: int 要找的元素
:return: 找到了返回下标 否则返回-1
"""
sorted_list = sorted_list if asc else list(reversed(sorted_list))
low = 0 # 最小数的下标
high = len(sorted_list)-1 # 最大数的下标
n = 0 # 分的次数
while low <= high:
mid = (low + high) >> 1 if (low + high) % 2 == 1 else ((low + high) >> 1) + 1 # 精确获取中间值 下标
n += 1
if sorted_list[mid]==item:
logger.info('二分法分了%s次,找到元素' % n)
return mid
if sorted_list[mid]<item: # 要找的元素大于中间的 则从后半个list找
low = mid + 1
else: # 要找的元素小于中间的 则从前半个list找
high = (mid-1)
logger.info('二分法分了%s次,未找到元素。' % n)
return -1
def recursion_binary_search(sorted_list, start, end, item):
"""
递归二分查找 查找有序数组的一个元素
:param sorted_list: 有序数组 默认传升序数组
:param start: 初始下标
:param end: 结束下标
:param item: 待查找元素
:return: 如果找到,返回index 否则 -1
"""
if start > end: # 一定不能是大于等于 mid + 1等于end的时候很有可能mid+1就是找到的结果
return -1
# mid = (end + start) // 2 # 不四舍五入 得到中间元素
mid = (start + end) >> 1 if (start + end) % 2 == 1 else ((start + end) >> 1) + 1 # 精确获取中间值 下标
if sorted_list[mid] == item:
return mid
elif item > sorted_list[mid]:
return recursion_binary_search(sorted_list, mid + 1, end, item)
elif item < sorted_list[mid]:
return recursion_binary_search(sorted_list, start, mid - 1, item)
return -1
if __name__ == '__main__':
m=[1,2,3,4,8,9,11,12,14,18,19,20,28,29]
print(binary_search(m,20))
m1 = [28, 20, 19, 18, 14, 12, 11, 9, 8, 4, 3, 2, 1]
print(binary_search(m1,14,False))
# #########################################################
m=[1,2,3,4,8,9,11,12,14,18,19,20,28]
print(recursion_binary_search(m, 0, len(m) - 1, 14))
| [
"[email protected]"
] | |
a5d7909a17f6af66a01de4d84f29e1e7ee96e4b5 | 380a47268c5975473a2e7c38c747bc3bdbd981b1 | /benchmark/third_party/transformers/examples/pytorch/summarization/run_summarization_no_trainer.py | 3bd925569bb6a8347b8f7e8562568fc34cfb023a | [
"Apache-2.0"
] | permissive | FMInference/FlexGen | 07aa9b1918c19b02077e13ad07e76840843810dd | d34f7b4b43ed87a374f394b0535ed685af66197b | refs/heads/main | 2023-07-24T02:29:51.179817 | 2023-07-21T22:38:31 | 2023-07-21T22:38:31 | 602,270,517 | 6,821 | 411 | Apache-2.0 | 2023-07-07T22:59:24 | 2023-02-15T21:18:53 | Python | UTF-8 | Python | false | false | 31,761 | py | #!/usr/bin/env python
# coding=utf-8
# Copyright The HuggingFace Team and The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Fine-tuning a 🤗 Transformers model on summarization.
"""
# You can also adapt this script on your own summarization task. Pointers for this are left as comments.
import argparse
import json
import logging
import math
import os
import random
from pathlib import Path
import datasets
import nltk
import numpy as np
import torch
from datasets import load_dataset
from torch.utils.data import DataLoader
from tqdm.auto import tqdm
import evaluate
import transformers
from accelerate import Accelerator
from accelerate.logging import get_logger
from accelerate.utils import set_seed
from filelock import FileLock
from huggingface_hub import Repository
from transformers import (
CONFIG_MAPPING,
MODEL_MAPPING,
AutoConfig,
AutoModelForSeq2SeqLM,
AutoTokenizer,
DataCollatorForSeq2Seq,
SchedulerType,
get_scheduler,
)
from transformers.utils import check_min_version, get_full_repo_name, is_offline_mode, send_example_telemetry
from transformers.utils.versions import require_version
# Will error if the minimal version of Transformers is not installed. Remove at your own risks.
check_min_version("4.24.0")
logger = get_logger(__name__)
require_version("datasets>=1.8.0", "To fix: pip install -r examples/pytorch/summarization/requirements.txt")
# You should update this to your particular problem to have better documentation of `model_type`
MODEL_CONFIG_CLASSES = list(MODEL_MAPPING.keys())
MODEL_TYPES = tuple(conf.model_type for conf in MODEL_CONFIG_CLASSES)
try:
nltk.data.find("tokenizers/punkt")
except (LookupError, OSError):
if is_offline_mode():
raise LookupError(
"Offline mode: run this script without TRANSFORMERS_OFFLINE first to download nltk data files"
)
with FileLock(".lock") as lock:
nltk.download("punkt", quiet=True)
summarization_name_mapping = {
"amazon_reviews_multi": ("review_body", "review_title"),
"big_patent": ("description", "abstract"),
"cnn_dailymail": ("article", "highlights"),
"orange_sum": ("text", "summary"),
"pn_summary": ("article", "summary"),
"psc": ("extract_text", "summary_text"),
"samsum": ("dialogue", "summary"),
"thaisum": ("body", "summary"),
"xglue": ("news_body", "news_title"),
"xsum": ("document", "summary"),
"wiki_summary": ("article", "highlights"),
}
def parse_args():
parser = argparse.ArgumentParser(description="Finetune a transformers model on a summarization task")
parser.add_argument(
"--dataset_name",
type=str,
default=None,
help="The name of the dataset to use (via the datasets library).",
)
parser.add_argument(
"--dataset_config_name",
type=str,
default=None,
help="The configuration name of the dataset to use (via the datasets library).",
)
parser.add_argument(
"--train_file", type=str, default=None, help="A csv or a json file containing the training data."
)
parser.add_argument(
"--validation_file", type=str, default=None, help="A csv or a json file containing the validation data."
)
parser.add_argument(
"--ignore_pad_token_for_loss",
type=bool,
default=True,
help="Whether to ignore the tokens corresponding to padded labels in the loss computation or not.",
)
parser.add_argument(
"--max_source_length",
type=int,
default=1024,
help=(
"The maximum total input sequence length after "
"tokenization.Sequences longer than this will be truncated, sequences shorter will be padded."
),
)
parser.add_argument(
"--source_prefix",
type=str,
default=None,
help="A prefix to add before every source text (useful for T5 models).",
)
parser.add_argument(
"--preprocessing_num_workers",
type=int,
default=None,
help="The number of processes to use for the preprocessing.",
)
parser.add_argument(
"--overwrite_cache", action="store_true", help="Overwrite the cached training and evaluation sets"
)
parser.add_argument(
"--max_target_length",
type=int,
default=128,
help=(
"The maximum total sequence length for target text after "
"tokenization. Sequences longer than this will be truncated, sequences shorter will be padded."
"during ``evaluate`` and ``predict``."
),
)
parser.add_argument(
"--val_max_target_length",
type=int,
default=None,
help=(
"The maximum total sequence length for validation "
"target text after tokenization.Sequences longer than this will be truncated, sequences shorter will be "
"padded. Will default to `max_target_length`.This argument is also used to override the ``max_length`` "
"param of ``model.generate``, which is used during ``evaluate`` and ``predict``."
),
)
parser.add_argument(
"--max_length",
type=int,
default=128,
help=(
"The maximum total input sequence length after tokenization. Sequences longer than this will be truncated,"
" sequences shorter will be padded if `--pad_to_max_lengh` is passed."
),
)
parser.add_argument(
"--num_beams",
type=int,
default=None,
help=(
"Number of beams to use for evaluation. This argument will be "
"passed to ``model.generate``, which is used during ``evaluate`` and ``predict``."
),
)
parser.add_argument(
"--pad_to_max_length",
action="store_true",
help="If passed, pad all samples to `max_length`. Otherwise, dynamic padding is used.",
)
parser.add_argument(
"--model_name_or_path",
type=str,
help="Path to pretrained model or model identifier from huggingface.co/models.",
required=False,
)
parser.add_argument(
"--config_name",
type=str,
default=None,
help="Pretrained config name or path if not the same as model_name",
)
parser.add_argument(
"--tokenizer_name",
type=str,
default=None,
help="Pretrained tokenizer name or path if not the same as model_name",
)
parser.add_argument(
"--text_column",
type=str,
default=None,
help="The name of the column in the datasets containing the full texts (for summarization).",
)
parser.add_argument(
"--summary_column",
type=str,
default=None,
help="The name of the column in the datasets containing the summaries (for summarization).",
)
parser.add_argument(
"--use_slow_tokenizer",
action="store_true",
help="If passed, will use a slow tokenizer (not backed by the 🤗 Tokenizers library).",
)
parser.add_argument(
"--per_device_train_batch_size",
type=int,
default=8,
help="Batch size (per device) for the training dataloader.",
)
parser.add_argument(
"--per_device_eval_batch_size",
type=int,
default=8,
help="Batch size (per device) for the evaluation dataloader.",
)
parser.add_argument(
"--learning_rate",
type=float,
default=5e-5,
help="Initial learning rate (after the potential warmup period) to use.",
)
parser.add_argument("--weight_decay", type=float, default=0.0, help="Weight decay to use.")
parser.add_argument("--num_train_epochs", type=int, default=3, help="Total number of training epochs to perform.")
parser.add_argument(
"--max_train_steps",
type=int,
default=None,
help="Total number of training steps to perform. If provided, overrides num_train_epochs.",
)
parser.add_argument(
"--gradient_accumulation_steps",
type=int,
default=1,
help="Number of updates steps to accumulate before performing a backward/update pass.",
)
parser.add_argument(
"--lr_scheduler_type",
type=SchedulerType,
default="linear",
help="The scheduler type to use.",
choices=["linear", "cosine", "cosine_with_restarts", "polynomial", "constant", "constant_with_warmup"],
)
parser.add_argument(
"--num_warmup_steps", type=int, default=0, help="Number of steps for the warmup in the lr scheduler."
)
parser.add_argument("--output_dir", type=str, default=None, help="Where to store the final model.")
parser.add_argument("--seed", type=int, default=None, help="A seed for reproducible training.")
parser.add_argument(
"--model_type",
type=str,
default=None,
help="Model type to use if training from scratch.",
choices=MODEL_TYPES,
)
parser.add_argument("--push_to_hub", action="store_true", help="Whether or not to push the model to the Hub.")
parser.add_argument(
"--hub_model_id", type=str, help="The name of the repository to keep in sync with the local `output_dir`."
)
parser.add_argument("--hub_token", type=str, help="The token to use to push to the Model Hub.")
parser.add_argument(
"--checkpointing_steps",
type=str,
default=None,
help="Whether the various states should be saved at the end of every n steps, or 'epoch' for each epoch.",
)
parser.add_argument(
"--resume_from_checkpoint",
type=str,
default=None,
help="If the training should continue from a checkpoint folder.",
)
parser.add_argument(
"--with_tracking",
action="store_true",
help="Whether to enable experiment trackers for logging.",
)
parser.add_argument(
"--report_to",
type=str,
default="all",
help=(
'The integration to report the results and logs to. Supported platforms are `"tensorboard"`,'
' `"wandb"` and `"comet_ml"`. Use `"all"` (default) to report to all integrations.'
"Only applicable when `--with_tracking` is passed."
),
)
args = parser.parse_args()
# Sanity checks
if args.dataset_name is None and args.train_file is None and args.validation_file is None:
raise ValueError("Need either a dataset name or a training/validation file.")
else:
if args.train_file is not None:
extension = args.train_file.split(".")[-1]
assert extension in ["csv", "json"], "`train_file` should be a csv or a json file."
if args.validation_file is not None:
extension = args.validation_file.split(".")[-1]
assert extension in ["csv", "json"], "`validation_file` should be a csv or a json file."
if args.push_to_hub:
assert args.output_dir is not None, "Need an `output_dir` to create a repo when `--push_to_hub` is passed."
return args
def main():
args = parse_args()
# Sending telemetry. Tracking the example usage helps us better allocate resources to maintain them. The
# information sent is the one passed as arguments along with your Python/PyTorch versions.
send_example_telemetry("run_summarization_no_trainer", args)
# Initialize the accelerator. We will let the accelerator handle device placement for us in this example.
# If we're using tracking, we also need to initialize it here and it will by default pick up all supported trackers
# in the environment
accelerator_log_kwargs = {}
if args.with_tracking:
accelerator_log_kwargs["log_with"] = args.report_to
accelerator_log_kwargs["logging_dir"] = args.output_dir
accelerator = Accelerator(gradient_accumulation_steps=args.gradient_accumulation_steps, **accelerator_log_kwargs)
if args.source_prefix is None and args.model_name_or_path in [
"t5-small",
"t5-base",
"t5-large",
"t5-3b",
"t5-11b",
]:
logger.warning(
"You're running a t5 model but didn't provide a source prefix, which is the expected, e.g. with "
"`--source_prefix 'summarize: ' `"
)
# Make one log on every process with the configuration for debugging.
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
level=logging.INFO,
)
logger.info(accelerator.state, main_process_only=False)
if accelerator.is_local_main_process:
datasets.utils.logging.set_verbosity_warning()
transformers.utils.logging.set_verbosity_info()
else:
datasets.utils.logging.set_verbosity_error()
transformers.utils.logging.set_verbosity_error()
# If passed along, set the training seed now.
if args.seed is not None:
set_seed(args.seed)
# Handle the repository creation
if accelerator.is_main_process:
if args.push_to_hub:
if args.hub_model_id is None:
repo_name = get_full_repo_name(Path(args.output_dir).name, token=args.hub_token)
else:
repo_name = args.hub_model_id
repo = Repository(args.output_dir, clone_from=repo_name)
with open(os.path.join(args.output_dir, ".gitignore"), "w+") as gitignore:
if "step_*" not in gitignore:
gitignore.write("step_*\n")
if "epoch_*" not in gitignore:
gitignore.write("epoch_*\n")
elif args.output_dir is not None:
os.makedirs(args.output_dir, exist_ok=True)
accelerator.wait_for_everyone()
# Get the datasets: you can either provide your own CSV/JSON/TXT training and evaluation files (see below)
# or just provide the name of one of the public datasets available on the hub at https://huggingface.co/datasets/
# (the dataset will be downloaded automatically from the datasets Hub).
#
# For CSV/JSON files, this script will use the column called 'text' or the first column if no column called
# 'text' is found. You can easily tweak this behavior (see below).
#
# In distributed training, the load_dataset function guarantee that only one local process can concurrently
# download the dataset.
if args.dataset_name is not None:
# Downloading and loading a dataset from the hub.
raw_datasets = load_dataset(args.dataset_name, args.dataset_config_name)
else:
data_files = {}
if args.train_file is not None:
data_files["train"] = args.train_file
if args.validation_file is not None:
data_files["validation"] = args.validation_file
extension = args.train_file.split(".")[-1]
raw_datasets = load_dataset(extension, data_files=data_files)
# See more about loading any type of standard or custom dataset (from files, python dict, pandas DataFrame, etc) at
# https://huggingface.co/docs/datasets/loading_datasets.html.
# Load pretrained model and tokenizer
#
# In distributed training, the .from_pretrained methods guarantee that only one local process can concurrently
# download model & vocab.
if args.config_name:
config = AutoConfig.from_pretrained(args.config_name)
elif args.model_name_or_path:
config = AutoConfig.from_pretrained(args.model_name_or_path)
else:
config = CONFIG_MAPPING[args.model_type]()
logger.warning("You are instantiating a new config instance from scratch.")
if args.tokenizer_name:
tokenizer = AutoTokenizer.from_pretrained(args.tokenizer_name, use_fast=not args.use_slow_tokenizer)
elif args.model_name_or_path:
tokenizer = AutoTokenizer.from_pretrained(args.model_name_or_path, use_fast=not args.use_slow_tokenizer)
else:
raise ValueError(
"You are instantiating a new tokenizer from scratch. This is not supported by this script."
"You can do it from another script, save it, and load it from here, using --tokenizer_name."
)
if args.model_name_or_path:
model = AutoModelForSeq2SeqLM.from_pretrained(
args.model_name_or_path,
from_tf=bool(".ckpt" in args.model_name_or_path),
config=config,
)
else:
logger.info("Training new model from scratch")
model = AutoModelForSeq2SeqLM.from_config(config)
model.resize_token_embeddings(len(tokenizer))
if model.config.decoder_start_token_id is None:
raise ValueError("Make sure that `config.decoder_start_token_id` is correctly defined")
prefix = args.source_prefix if args.source_prefix is not None else ""
# Preprocessing the datasets.
# First we tokenize all the texts.
column_names = raw_datasets["train"].column_names
# Get the column names for input/target.
dataset_columns = summarization_name_mapping.get(args.dataset_name, None)
if args.text_column is None:
text_column = dataset_columns[0] if dataset_columns is not None else column_names[0]
else:
text_column = args.text_column
if text_column not in column_names:
raise ValueError(
f"--text_column' value '{args.text_column}' needs to be one of: {', '.join(column_names)}"
)
if args.summary_column is None:
summary_column = dataset_columns[1] if dataset_columns is not None else column_names[1]
else:
summary_column = args.summary_column
if summary_column not in column_names:
raise ValueError(
f"--summary_column' value '{args.summary_column}' needs to be one of: {', '.join(column_names)}"
)
# Temporarily set max_target_length for training.
max_target_length = args.max_target_length
padding = "max_length" if args.pad_to_max_length else False
def preprocess_function(examples):
inputs = examples[text_column]
targets = examples[summary_column]
inputs = [prefix + inp for inp in inputs]
model_inputs = tokenizer(inputs, max_length=args.max_source_length, padding=padding, truncation=True)
# Tokenize targets with the `text_target` keyword argument
labels = tokenizer(text_target=targets, max_length=max_target_length, padding=padding, truncation=True)
# If we are padding here, replace all tokenizer.pad_token_id in the labels by -100 when we want to ignore
# padding in the loss.
if padding == "max_length" and args.ignore_pad_token_for_loss:
labels["input_ids"] = [
[(l if l != tokenizer.pad_token_id else -100) for l in label] for label in labels["input_ids"]
]
model_inputs["labels"] = labels["input_ids"]
return model_inputs
with accelerator.main_process_first():
processed_datasets = raw_datasets.map(
preprocess_function,
batched=True,
num_proc=args.preprocessing_num_workers,
remove_columns=column_names,
load_from_cache_file=not args.overwrite_cache,
desc="Running tokenizer on dataset",
)
train_dataset = processed_datasets["train"]
eval_dataset = processed_datasets["validation"]
# Log a few random samples from the training set:
for index in random.sample(range(len(train_dataset)), 1):
logger.info(f"Sample {index} of the training set: {train_dataset[index]}.")
label_pad_token_id = -100 if args.ignore_pad_token_for_loss else tokenizer.pad_token_id
data_collator = DataCollatorForSeq2Seq(
tokenizer,
model=model,
label_pad_token_id=label_pad_token_id,
pad_to_multiple_of=8 if accelerator.use_fp16 else None,
)
def postprocess_text(preds, labels):
preds = [pred.strip() for pred in preds]
labels = [label.strip() for label in labels]
# rougeLSum expects newline after each sentence
preds = ["\n".join(nltk.sent_tokenize(pred)) for pred in preds]
labels = ["\n".join(nltk.sent_tokenize(label)) for label in labels]
return preds, labels
train_dataloader = DataLoader(
train_dataset, shuffle=True, collate_fn=data_collator, batch_size=args.per_device_train_batch_size
)
eval_dataloader = DataLoader(eval_dataset, collate_fn=data_collator, batch_size=args.per_device_eval_batch_size)
# Optimizer
# Split weights in two groups, one with weight decay and the other not.
no_decay = ["bias", "LayerNorm.weight", "layer_norm.weight"]
optimizer_grouped_parameters = [
{
"params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],
"weight_decay": args.weight_decay,
},
{
"params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)],
"weight_decay": 0.0,
},
]
optimizer = torch.optim.AdamW(optimizer_grouped_parameters, lr=args.learning_rate)
# Scheduler and math around the number of training steps.
overrode_max_train_steps = False
num_update_steps_per_epoch = math.ceil(len(train_dataloader) / args.gradient_accumulation_steps)
if args.max_train_steps is None:
args.max_train_steps = args.num_train_epochs * num_update_steps_per_epoch
overrode_max_train_steps = True
lr_scheduler = get_scheduler(
name=args.lr_scheduler_type,
optimizer=optimizer,
num_warmup_steps=args.num_warmup_steps * args.gradient_accumulation_steps,
num_training_steps=args.max_train_steps * args.gradient_accumulation_steps,
)
# Prepare everything with our `accelerator`.
model, optimizer, train_dataloader, eval_dataloader, lr_scheduler = accelerator.prepare(
model, optimizer, train_dataloader, eval_dataloader, lr_scheduler
)
# We need to recalculate our total training steps as the size of the training dataloader may have changed.
num_update_steps_per_epoch = math.ceil(len(train_dataloader) / args.gradient_accumulation_steps)
if overrode_max_train_steps:
args.max_train_steps = args.num_train_epochs * num_update_steps_per_epoch
# Afterwards we recalculate our number of training epochs
args.num_train_epochs = math.ceil(args.max_train_steps / num_update_steps_per_epoch)
# Figure out how many steps we should save the Accelerator states
checkpointing_steps = args.checkpointing_steps
if checkpointing_steps is not None and checkpointing_steps.isdigit():
checkpointing_steps = int(checkpointing_steps)
# We need to initialize the trackers we use, and also store our configuration.
# The trackers initializes automatically on the main process.
if args.with_tracking:
experiment_config = vars(args)
# TensorBoard cannot log Enums, need the raw value
experiment_config["lr_scheduler_type"] = experiment_config["lr_scheduler_type"].value
accelerator.init_trackers("summarization_no_trainer", experiment_config)
# Metric
metric = evaluate.load("rouge")
# Train!
total_batch_size = args.per_device_train_batch_size * accelerator.num_processes * args.gradient_accumulation_steps
logger.info("***** Running training *****")
logger.info(f" Num examples = {len(train_dataset)}")
logger.info(f" Num Epochs = {args.num_train_epochs}")
logger.info(f" Instantaneous batch size per device = {args.per_device_train_batch_size}")
logger.info(f" Total train batch size (w. parallel, distributed & accumulation) = {total_batch_size}")
logger.info(f" Gradient Accumulation steps = {args.gradient_accumulation_steps}")
logger.info(f" Total optimization steps = {args.max_train_steps}")
# Only show the progress bar once on each machine.
progress_bar = tqdm(range(args.max_train_steps), disable=not accelerator.is_local_main_process)
completed_steps = 0
starting_epoch = 0
# Potentially load in the weights and states from a previous save
if args.resume_from_checkpoint:
if args.resume_from_checkpoint is not None or args.resume_from_checkpoint != "":
accelerator.print(f"Resumed from checkpoint: {args.resume_from_checkpoint}")
accelerator.load_state(args.resume_from_checkpoint)
path = os.path.basename(args.resume_from_checkpoint)
else:
# Get the most recent checkpoint
dirs = [f.name for f in os.scandir(os.getcwd()) if f.is_dir()]
dirs.sort(key=os.path.getctime)
path = dirs[-1] # Sorts folders by date modified, most recent checkpoint is the last
# Extract `epoch_{i}` or `step_{i}`
training_difference = os.path.splitext(path)[0]
if "epoch" in training_difference:
starting_epoch = int(training_difference.replace("epoch_", "")) + 1
resume_step = None
else:
resume_step = int(training_difference.replace("step_", ""))
starting_epoch = resume_step // len(train_dataloader)
resume_step -= starting_epoch * len(train_dataloader)
for epoch in range(starting_epoch, args.num_train_epochs):
model.train()
if args.with_tracking:
total_loss = 0
for step, batch in enumerate(train_dataloader):
# We need to skip steps until we reach the resumed step
if args.resume_from_checkpoint and epoch == starting_epoch:
if resume_step is not None and step < resume_step:
completed_steps += 1
continue
with accelerator.accumulate(model):
outputs = model(**batch)
loss = outputs.loss
# We keep track of the loss at each epoch
if args.with_tracking:
total_loss += loss.detach().float()
accelerator.backward(loss)
optimizer.step()
lr_scheduler.step()
optimizer.zero_grad()
# Checks if the accelerator has performed an optimization step behind the scenes
if accelerator.sync_gradients:
progress_bar.update(1)
completed_steps += 1
if isinstance(checkpointing_steps, int):
if completed_steps % checkpointing_steps == 0:
output_dir = f"step_{completed_steps }"
if args.output_dir is not None:
output_dir = os.path.join(args.output_dir, output_dir)
accelerator.save_state(output_dir)
if completed_steps >= args.max_train_steps:
break
model.eval()
if args.val_max_target_length is None:
args.val_max_target_length = args.max_target_length
gen_kwargs = {
"max_length": args.val_max_target_length if args is not None else config.max_length,
"num_beams": args.num_beams,
}
for step, batch in enumerate(eval_dataloader):
with torch.no_grad():
generated_tokens = accelerator.unwrap_model(model).generate(
batch["input_ids"],
attention_mask=batch["attention_mask"],
**gen_kwargs,
)
generated_tokens = accelerator.pad_across_processes(
generated_tokens, dim=1, pad_index=tokenizer.pad_token_id
)
labels = batch["labels"]
if not args.pad_to_max_length:
# If we did not pad to max length, we need to pad the labels too
labels = accelerator.pad_across_processes(batch["labels"], dim=1, pad_index=tokenizer.pad_token_id)
generated_tokens, labels = accelerator.gather_for_metrics((generated_tokens, labels))
generated_tokens = generated_tokens.cpu().numpy()
labels = labels.cpu().numpy()
if args.ignore_pad_token_for_loss:
# Replace -100 in the labels as we can't decode them.
labels = np.where(labels != -100, labels, tokenizer.pad_token_id)
if isinstance(generated_tokens, tuple):
generated_tokens = generated_tokens[0]
decoded_preds = tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)
decoded_labels = tokenizer.batch_decode(labels, skip_special_tokens=True)
decoded_preds, decoded_labels = postprocess_text(decoded_preds, decoded_labels)
decoded_preds, decoded_labels = accelerator.gather_for_metrics(decoded_preds, decoded_labels)
metric.add_batch(
predictions=decoded_preds,
references=decoded_labels,
)
result = metric.compute(use_stemmer=True)
result = {k: round(v * 100, 4) for k, v in result.items()}
logger.info(result)
if args.with_tracking:
result["train_loss"] = total_loss.item() / len(train_dataloader)
result["epoch"] = epoch
result["step"] = completed_steps
accelerator.log(result, step=completed_steps)
if args.push_to_hub and epoch < args.num_train_epochs - 1:
accelerator.wait_for_everyone()
unwrapped_model = accelerator.unwrap_model(model)
unwrapped_model.save_pretrained(
args.output_dir, is_main_process=accelerator.is_main_process, save_function=accelerator.save
)
if accelerator.is_main_process:
tokenizer.save_pretrained(args.output_dir)
repo.push_to_hub(
commit_message=f"Training in progress epoch {epoch}", blocking=False, auto_lfs_prune=True
)
if args.checkpointing_steps == "epoch":
output_dir = f"epoch_{epoch}"
if args.output_dir is not None:
output_dir = os.path.join(args.output_dir, output_dir)
accelerator.save_state(output_dir)
if args.output_dir is not None:
accelerator.wait_for_everyone()
unwrapped_model = accelerator.unwrap_model(model)
unwrapped_model.save_pretrained(
args.output_dir, is_main_process=accelerator.is_main_process, save_function=accelerator.save
)
if accelerator.is_main_process:
tokenizer.save_pretrained(args.output_dir)
if args.push_to_hub:
repo.push_to_hub(commit_message="End of training", auto_lfs_prune=True)
with open(os.path.join(args.output_dir, "all_results.json"), "w") as f:
json.dump(
{
"eval_rouge1": result["rouge1"],
"eval_rouge2": result["rouge2"],
"eval_rougeL": result["rougeL"],
"eval_rougeLsum": result["rougeLsum"],
},
f,
)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
9adb2f941bf5d47c462c44dae8d72de1b9269d95 | d4a569dcf616b7f05e53a44803e38196b436b8b9 | /[email protected]/Lib/site-packages/django/test/utils.py | 235136ed67432f1217462c16026362efa3fe60c3 | [
"MIT"
] | permissive | nverbois/TFE21-232 | ac3178d24939c872c02a671c0f1d8cc471af516b | 7113837b5263b5c508bfc6903cb6982b48aa7ee4 | refs/heads/main | 2023-06-05T18:50:59.207392 | 2021-06-25T19:54:40 | 2021-06-25T19:54:40 | 337,691,391 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 29,893 | py | import asyncio
import logging
import re
import sys
import time
import warnings
from contextlib import contextmanager
from functools import wraps
from io import StringIO
from itertools import chain
from types import SimpleNamespace
from unittest import TestCase, skipIf, skipUnless
from xml.dom.minidom import Node, parseString
from django.apps import apps
from django.apps.registry import Apps
from django.conf import UserSettingsHolder, settings
from django.core import mail
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import request_started
from django.db import DEFAULT_DB_ALIAS, connections, reset_queries
from django.db.models.options import Options
from django.template import Template
from django.test.signals import setting_changed, template_rendered
from django.urls import get_script_prefix, set_script_prefix
from django.utils.translation import deactivate
try:
import jinja2
except ImportError:
jinja2 = None
__all__ = (
"Approximate",
"ContextList",
"isolate_lru_cache",
"get_runner",
"modify_settings",
"override_settings",
"requires_tz_support",
"setup_test_environment",
"teardown_test_environment",
)
TZ_SUPPORT = hasattr(time, "tzset")
class Approximate:
def __init__(self, val, places=7):
self.val = val
self.places = places
def __repr__(self):
return repr(self.val)
def __eq__(self, other):
return self.val == other or round(abs(self.val - other), self.places) == 0
class ContextList(list):
"""
A wrapper that provides direct key access to context items contained
in a list of context objects.
"""
def __getitem__(self, key):
if isinstance(key, str):
for subcontext in self:
if key in subcontext:
return subcontext[key]
raise KeyError(key)
else:
return super().__getitem__(key)
def get(self, key, default=None):
try:
return self.__getitem__(key)
except KeyError:
return default
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
return True
def keys(self):
"""
Flattened keys of subcontexts.
"""
return set(chain.from_iterable(d for subcontext in self for d in subcontext))
def instrumented_test_render(self, context):
"""
An instrumented Template render method, providing a signal that can be
intercepted by the test Client.
"""
template_rendered.send(sender=self, template=self, context=context)
return self.nodelist.render(context)
class _TestState:
pass
def setup_test_environment(debug=None):
"""
Perform global pre-test setup, such as installing the instrumented template
renderer and setting the email backend to the locmem email backend.
"""
if hasattr(_TestState, "saved_data"):
# Executing this function twice would overwrite the saved values.
raise RuntimeError(
"setup_test_environment() was already called and can't be called "
"again without first calling teardown_test_environment()."
)
if debug is None:
debug = settings.DEBUG
saved_data = SimpleNamespace()
_TestState.saved_data = saved_data
saved_data.allowed_hosts = settings.ALLOWED_HOSTS
# Add the default host of the test client.
settings.ALLOWED_HOSTS = [*settings.ALLOWED_HOSTS, "testserver"]
saved_data.debug = settings.DEBUG
settings.DEBUG = debug
saved_data.email_backend = settings.EMAIL_BACKEND
settings.EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
saved_data.template_render = Template._render
Template._render = instrumented_test_render
mail.outbox = []
deactivate()
def teardown_test_environment():
"""
Perform any global post-test teardown, such as restoring the original
template renderer and restoring the email sending functions.
"""
saved_data = _TestState.saved_data
settings.ALLOWED_HOSTS = saved_data.allowed_hosts
settings.DEBUG = saved_data.debug
settings.EMAIL_BACKEND = saved_data.email_backend
Template._render = saved_data.template_render
del _TestState.saved_data
del mail.outbox
def setup_databases(
verbosity,
interactive,
keepdb=False,
debug_sql=False,
parallel=0,
aliases=None,
**kwargs
):
"""Create the test databases."""
test_databases, mirrored_aliases = get_unique_databases_and_mirrors(aliases)
old_names = []
for db_name, aliases in test_databases.values():
first_alias = None
for alias in aliases:
connection = connections[alias]
old_names.append((connection, db_name, first_alias is None))
# Actually create the database for the first connection
if first_alias is None:
first_alias = alias
connection.creation.create_test_db(
verbosity=verbosity,
autoclobber=not interactive,
keepdb=keepdb,
serialize=connection.settings_dict["TEST"].get("SERIALIZE", True),
)
if parallel > 1:
for index in range(parallel):
connection.creation.clone_test_db(
suffix=str(index + 1), verbosity=verbosity, keepdb=keepdb,
)
# Configure all other connections as mirrors of the first one
else:
connections[alias].creation.set_as_test_mirror(
connections[first_alias].settings_dict
)
# Configure the test mirrors.
for alias, mirror_alias in mirrored_aliases.items():
connections[alias].creation.set_as_test_mirror(
connections[mirror_alias].settings_dict
)
if debug_sql:
for alias in connections:
connections[alias].force_debug_cursor = True
return old_names
def dependency_ordered(test_databases, dependencies):
"""
Reorder test_databases into an order that honors the dependencies
described in TEST[DEPENDENCIES].
"""
ordered_test_databases = []
resolved_databases = set()
# Maps db signature to dependencies of all its aliases
dependencies_map = {}
# Check that no database depends on its own alias
for sig, (_, aliases) in test_databases:
all_deps = set()
for alias in aliases:
all_deps.update(dependencies.get(alias, []))
if not all_deps.isdisjoint(aliases):
raise ImproperlyConfigured(
"Circular dependency: databases %r depend on each other, "
"but are aliases." % aliases
)
dependencies_map[sig] = all_deps
while test_databases:
changed = False
deferred = []
# Try to find a DB that has all its dependencies met
for signature, (db_name, aliases) in test_databases:
if dependencies_map[signature].issubset(resolved_databases):
resolved_databases.update(aliases)
ordered_test_databases.append((signature, (db_name, aliases)))
changed = True
else:
deferred.append((signature, (db_name, aliases)))
if not changed:
raise ImproperlyConfigured("Circular dependency in TEST[DEPENDENCIES]")
test_databases = deferred
return ordered_test_databases
def get_unique_databases_and_mirrors(aliases=None):
"""
Figure out which databases actually need to be created.
Deduplicate entries in DATABASES that correspond the same database or are
configured as test mirrors.
Return two values:
- test_databases: ordered mapping of signatures to (name, list of aliases)
where all aliases share the same underlying database.
- mirrored_aliases: mapping of mirror aliases to original aliases.
"""
if aliases is None:
aliases = connections
mirrored_aliases = {}
test_databases = {}
dependencies = {}
default_sig = connections[DEFAULT_DB_ALIAS].creation.test_db_signature()
for alias in connections:
connection = connections[alias]
test_settings = connection.settings_dict["TEST"]
if test_settings["MIRROR"]:
# If the database is marked as a test mirror, save the alias.
mirrored_aliases[alias] = test_settings["MIRROR"]
elif alias in aliases:
# Store a tuple with DB parameters that uniquely identify it.
# If we have two aliases with the same values for that tuple,
# we only need to create the test database once.
item = test_databases.setdefault(
connection.creation.test_db_signature(),
(connection.settings_dict["NAME"], set()),
)
item[1].add(alias)
if "DEPENDENCIES" in test_settings:
dependencies[alias] = test_settings["DEPENDENCIES"]
else:
if (
alias != DEFAULT_DB_ALIAS
and connection.creation.test_db_signature() != default_sig
):
dependencies[alias] = test_settings.get(
"DEPENDENCIES", [DEFAULT_DB_ALIAS]
)
test_databases = dict(dependency_ordered(test_databases.items(), dependencies))
return test_databases, mirrored_aliases
def teardown_databases(old_config, verbosity, parallel=0, keepdb=False):
"""Destroy all the non-mirror databases."""
for connection, old_name, destroy in old_config:
if destroy:
if parallel > 1:
for index in range(parallel):
connection.creation.destroy_test_db(
suffix=str(index + 1), verbosity=verbosity, keepdb=keepdb,
)
connection.creation.destroy_test_db(old_name, verbosity, keepdb)
def get_runner(settings, test_runner_class=None):
test_runner_class = test_runner_class or settings.TEST_RUNNER
test_path = test_runner_class.split(".")
# Allow for relative paths
if len(test_path) > 1:
test_module_name = ".".join(test_path[:-1])
else:
test_module_name = "."
test_module = __import__(test_module_name, {}, {}, test_path[-1])
return getattr(test_module, test_path[-1])
class TestContextDecorator:
"""
A base class that can either be used as a context manager during tests
or as a test function or unittest.TestCase subclass decorator to perform
temporary alterations.
`attr_name`: attribute assigned the return value of enable() if used as
a class decorator.
`kwarg_name`: keyword argument passing the return value of enable() if
used as a function decorator.
"""
def __init__(self, attr_name=None, kwarg_name=None):
self.attr_name = attr_name
self.kwarg_name = kwarg_name
def enable(self):
raise NotImplementedError
def disable(self):
raise NotImplementedError
def __enter__(self):
return self.enable()
def __exit__(self, exc_type, exc_value, traceback):
self.disable()
def decorate_class(self, cls):
if issubclass(cls, TestCase):
decorated_setUp = cls.setUp
decorated_tearDown = cls.tearDown
def setUp(inner_self):
context = self.enable()
if self.attr_name:
setattr(inner_self, self.attr_name, context)
try:
decorated_setUp(inner_self)
except Exception:
self.disable()
raise
def tearDown(inner_self):
decorated_tearDown(inner_self)
self.disable()
cls.setUp = setUp
cls.tearDown = tearDown
return cls
raise TypeError("Can only decorate subclasses of unittest.TestCase")
def decorate_callable(self, func):
if asyncio.iscoroutinefunction(func):
# If the inner function is an async function, we must execute async
# as well so that the `with` statement executes at the right time.
@wraps(func)
async def inner(*args, **kwargs):
with self as context:
if self.kwarg_name:
kwargs[self.kwarg_name] = context
return await func(*args, **kwargs)
else:
@wraps(func)
def inner(*args, **kwargs):
with self as context:
if self.kwarg_name:
kwargs[self.kwarg_name] = context
return func(*args, **kwargs)
return inner
def __call__(self, decorated):
if isinstance(decorated, type):
return self.decorate_class(decorated)
elif callable(decorated):
return self.decorate_callable(decorated)
raise TypeError("Cannot decorate object of type %s" % type(decorated))
class override_settings(TestContextDecorator):
"""
Act as either a decorator or a context manager. If it's a decorator, take a
function and return a wrapped function. If it's a contextmanager, use it
with the ``with`` statement. In either event, entering/exiting are called
before and after, respectively, the function/block is executed.
"""
enable_exception = None
def __init__(self, **kwargs):
self.options = kwargs
super().__init__()
def enable(self):
# Keep this code at the beginning to leave the settings unchanged
# in case it raises an exception because INSTALLED_APPS is invalid.
if "INSTALLED_APPS" in self.options:
try:
apps.set_installed_apps(self.options["INSTALLED_APPS"])
except Exception:
apps.unset_installed_apps()
raise
override = UserSettingsHolder(settings._wrapped)
for key, new_value in self.options.items():
setattr(override, key, new_value)
self.wrapped = settings._wrapped
settings._wrapped = override
for key, new_value in self.options.items():
try:
setting_changed.send(
sender=settings._wrapped.__class__,
setting=key,
value=new_value,
enter=True,
)
except Exception as exc:
self.enable_exception = exc
self.disable()
def disable(self):
if "INSTALLED_APPS" in self.options:
apps.unset_installed_apps()
settings._wrapped = self.wrapped
del self.wrapped
responses = []
for key in self.options:
new_value = getattr(settings, key, None)
responses_for_setting = setting_changed.send_robust(
sender=settings._wrapped.__class__,
setting=key,
value=new_value,
enter=False,
)
responses.extend(responses_for_setting)
if self.enable_exception is not None:
exc = self.enable_exception
self.enable_exception = None
raise exc
for _, response in responses:
if isinstance(response, Exception):
raise response
def save_options(self, test_func):
if test_func._overridden_settings is None:
test_func._overridden_settings = self.options
else:
# Duplicate dict to prevent subclasses from altering their parent.
test_func._overridden_settings = {
**test_func._overridden_settings,
**self.options,
}
def decorate_class(self, cls):
from django.test import SimpleTestCase
if not issubclass(cls, SimpleTestCase):
raise ValueError(
"Only subclasses of Django SimpleTestCase can be decorated "
"with override_settings"
)
self.save_options(cls)
return cls
class modify_settings(override_settings):
"""
Like override_settings, but makes it possible to append, prepend, or remove
items instead of redefining the entire list.
"""
def __init__(self, *args, **kwargs):
if args:
# Hack used when instantiating from SimpleTestCase.setUpClass.
assert not kwargs
self.operations = args[0]
else:
assert not args
self.operations = list(kwargs.items())
super(override_settings, self).__init__()
def save_options(self, test_func):
if test_func._modified_settings is None:
test_func._modified_settings = self.operations
else:
# Duplicate list to prevent subclasses from altering their parent.
test_func._modified_settings = (
list(test_func._modified_settings) + self.operations
)
def enable(self):
self.options = {}
for name, operations in self.operations:
try:
# When called from SimpleTestCase.setUpClass, values may be
# overridden several times; cumulate changes.
value = self.options[name]
except KeyError:
value = list(getattr(settings, name, []))
for action, items in operations.items():
# items my be a single value or an iterable.
if isinstance(items, str):
items = [items]
if action == "append":
value = value + [item for item in items if item not in value]
elif action == "prepend":
value = [item for item in items if item not in value] + value
elif action == "remove":
value = [item for item in value if item not in items]
else:
raise ValueError("Unsupported action: %s" % action)
self.options[name] = value
super().enable()
class override_system_checks(TestContextDecorator):
"""
Act as a decorator. Override list of registered system checks.
Useful when you override `INSTALLED_APPS`, e.g. if you exclude `auth` app,
you also need to exclude its system checks.
"""
def __init__(self, new_checks, deployment_checks=None):
from django.core.checks.registry import registry
self.registry = registry
self.new_checks = new_checks
self.deployment_checks = deployment_checks
super().__init__()
def enable(self):
self.old_checks = self.registry.registered_checks
self.registry.registered_checks = set()
for check in self.new_checks:
self.registry.register(check, *getattr(check, "tags", ()))
self.old_deployment_checks = self.registry.deployment_checks
if self.deployment_checks is not None:
self.registry.deployment_checks = set()
for check in self.deployment_checks:
self.registry.register(check, *getattr(check, "tags", ()), deploy=True)
def disable(self):
self.registry.registered_checks = self.old_checks
self.registry.deployment_checks = self.old_deployment_checks
def compare_xml(want, got):
"""
Try to do a 'xml-comparison' of want and got. Plain string comparison
doesn't always work because, for example, attribute ordering should not be
important. Ignore comment nodes, processing instructions, document type
node, and leading and trailing whitespaces.
Based on https://github.com/lxml/lxml/blob/master/src/lxml/doctestcompare.py
"""
_norm_whitespace_re = re.compile(r"[ \t\n][ \t\n]+")
def norm_whitespace(v):
return _norm_whitespace_re.sub(" ", v)
def child_text(element):
return "".join(
c.data for c in element.childNodes if c.nodeType == Node.TEXT_NODE
)
def children(element):
return [c for c in element.childNodes if c.nodeType == Node.ELEMENT_NODE]
def norm_child_text(element):
return norm_whitespace(child_text(element))
def attrs_dict(element):
return dict(element.attributes.items())
def check_element(want_element, got_element):
if want_element.tagName != got_element.tagName:
return False
if norm_child_text(want_element) != norm_child_text(got_element):
return False
if attrs_dict(want_element) != attrs_dict(got_element):
return False
want_children = children(want_element)
got_children = children(got_element)
if len(want_children) != len(got_children):
return False
return all(
check_element(want, got) for want, got in zip(want_children, got_children)
)
def first_node(document):
for node in document.childNodes:
if node.nodeType not in (
Node.COMMENT_NODE,
Node.DOCUMENT_TYPE_NODE,
Node.PROCESSING_INSTRUCTION_NODE,
):
return node
want = want.strip().replace("\\n", "\n")
got = got.strip().replace("\\n", "\n")
# If the string is not a complete xml document, we may need to add a
# root element. This allow us to compare fragments, like "<foo/><bar/>"
if not want.startswith("<?xml"):
wrapper = "<root>%s</root>"
want = wrapper % want
got = wrapper % got
# Parse the want and got strings, and compare the parsings.
want_root = first_node(parseString(want))
got_root = first_node(parseString(got))
return check_element(want_root, got_root)
class CaptureQueriesContext:
"""
Context manager that captures queries executed by the specified connection.
"""
def __init__(self, connection):
self.connection = connection
def __iter__(self):
return iter(self.captured_queries)
def __getitem__(self, index):
return self.captured_queries[index]
def __len__(self):
return len(self.captured_queries)
@property
def captured_queries(self):
return self.connection.queries[self.initial_queries : self.final_queries]
def __enter__(self):
self.force_debug_cursor = self.connection.force_debug_cursor
self.connection.force_debug_cursor = True
# Run any initialization queries if needed so that they won't be
# included as part of the count.
self.connection.ensure_connection()
self.initial_queries = len(self.connection.queries_log)
self.final_queries = None
request_started.disconnect(reset_queries)
return self
def __exit__(self, exc_type, exc_value, traceback):
self.connection.force_debug_cursor = self.force_debug_cursor
request_started.connect(reset_queries)
if exc_type is not None:
return
self.final_queries = len(self.connection.queries_log)
class ignore_warnings(TestContextDecorator):
def __init__(self, **kwargs):
self.ignore_kwargs = kwargs
if "message" in self.ignore_kwargs or "module" in self.ignore_kwargs:
self.filter_func = warnings.filterwarnings
else:
self.filter_func = warnings.simplefilter
super().__init__()
def enable(self):
self.catch_warnings = warnings.catch_warnings()
self.catch_warnings.__enter__()
self.filter_func("ignore", **self.ignore_kwargs)
def disable(self):
self.catch_warnings.__exit__(*sys.exc_info())
# On OSes that don't provide tzset (Windows), we can't set the timezone
# in which the program runs. As a consequence, we must skip tests that
# don't enforce a specific timezone (with timezone.override or equivalent),
# or attempt to interpret naive datetimes in the default timezone.
requires_tz_support = skipUnless(
TZ_SUPPORT,
"This test relies on the ability to run a program in an arbitrary "
"time zone, but your operating system isn't able to do that.",
)
@contextmanager
def extend_sys_path(*paths):
"""Context manager to temporarily add paths to sys.path."""
_orig_sys_path = sys.path[:]
sys.path.extend(paths)
try:
yield
finally:
sys.path = _orig_sys_path
@contextmanager
def isolate_lru_cache(lru_cache_object):
"""Clear the cache of an LRU cache object on entering and exiting."""
lru_cache_object.cache_clear()
try:
yield
finally:
lru_cache_object.cache_clear()
@contextmanager
def captured_output(stream_name):
"""Return a context manager used by captured_stdout/stdin/stderr
that temporarily replaces the sys stream *stream_name* with a StringIO.
Note: This function and the following ``captured_std*`` are copied
from CPython's ``test.support`` module."""
orig_stdout = getattr(sys, stream_name)
setattr(sys, stream_name, StringIO())
try:
yield getattr(sys, stream_name)
finally:
setattr(sys, stream_name, orig_stdout)
def captured_stdout():
"""Capture the output of sys.stdout:
with captured_stdout() as stdout:
print("hello")
self.assertEqual(stdout.getvalue(), "hello\n")
"""
return captured_output("stdout")
def captured_stderr():
"""Capture the output of sys.stderr:
with captured_stderr() as stderr:
print("hello", file=sys.stderr)
self.assertEqual(stderr.getvalue(), "hello\n")
"""
return captured_output("stderr")
def captured_stdin():
"""Capture the input to sys.stdin:
with captured_stdin() as stdin:
stdin.write('hello\n')
stdin.seek(0)
# call test code that consumes from sys.stdin
captured = input()
self.assertEqual(captured, "hello")
"""
return captured_output("stdin")
@contextmanager
def freeze_time(t):
"""
Context manager to temporarily freeze time.time(). This temporarily
modifies the time function of the time module. Modules which import the
time function directly (e.g. `from time import time`) won't be affected
This isn't meant as a public API, but helps reduce some repetitive code in
Django's test suite.
"""
_real_time = time.time
time.time = lambda: t
try:
yield
finally:
time.time = _real_time
def require_jinja2(test_func):
"""
Decorator to enable a Jinja2 template engine in addition to the regular
Django template engine for a test or skip it if Jinja2 isn't available.
"""
test_func = skipIf(jinja2 is None, "this test requires jinja2")(test_func)
return override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
},
{
"BACKEND": "django.template.backends.jinja2.Jinja2",
"APP_DIRS": True,
"OPTIONS": {"keep_trailing_newline": True},
},
]
)(test_func)
class override_script_prefix(TestContextDecorator):
"""Decorator or context manager to temporary override the script prefix."""
def __init__(self, prefix):
self.prefix = prefix
super().__init__()
def enable(self):
self.old_prefix = get_script_prefix()
set_script_prefix(self.prefix)
def disable(self):
set_script_prefix(self.old_prefix)
class LoggingCaptureMixin:
"""
Capture the output from the 'django' logger and store it on the class's
logger_output attribute.
"""
def setUp(self):
self.logger = logging.getLogger("django")
self.old_stream = self.logger.handlers[0].stream
self.logger_output = StringIO()
self.logger.handlers[0].stream = self.logger_output
def tearDown(self):
self.logger.handlers[0].stream = self.old_stream
class isolate_apps(TestContextDecorator):
"""
Act as either a decorator or a context manager to register models defined
in its wrapped context to an isolated registry.
The list of installed apps the isolated registry should contain must be
passed as arguments.
Two optional keyword arguments can be specified:
`attr_name`: attribute assigned the isolated registry if used as a class
decorator.
`kwarg_name`: keyword argument passing the isolated registry if used as a
function decorator.
"""
def __init__(self, *installed_apps, **kwargs):
self.installed_apps = installed_apps
super().__init__(**kwargs)
def enable(self):
self.old_apps = Options.default_apps
apps = Apps(self.installed_apps)
setattr(Options, "default_apps", apps)
return apps
def disable(self):
setattr(Options, "default_apps", self.old_apps)
def tag(*tags):
"""Decorator to add tags to a test class or method."""
def decorator(obj):
if hasattr(obj, "tags"):
obj.tags = obj.tags.union(tags)
else:
setattr(obj, "tags", set(tags))
return obj
return decorator
@contextmanager
def register_lookup(field, *lookups, lookup_name=None):
"""
Context manager to temporarily register lookups on a model field using
lookup_name (or the lookup's lookup_name if not provided).
"""
try:
for lookup in lookups:
field.register_lookup(lookup, lookup_name)
yield
finally:
for lookup in lookups:
field._unregister_lookup(lookup, lookup_name)
| [
"[email protected]"
] | |
fbfe830c4c1db56944173198cf8a81fd11c5ab41 | 0d61f90e3a7877e91d72fed71b0895c7070dc046 | /final_project/.history/project/menu_app/views_20201231155853.py | 69a782ad142ac3afd83b74830f621b95b6557bc3 | [] | no_license | lienusrob/final_project | 44d7d90dc0b7efc0cf55501549a5af0110d09b3b | 4164769626813f044ec2af3e7842514b5699ef77 | refs/heads/master | 2023-02-10T16:36:33.439215 | 2021-01-05T09:34:01 | 2021-01-05T09:34:01 | 325,002,104 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,592 | py |
from .models import MenuItem, ItemsCategory, Order, generate_order_id
from account_app.models import Profile
from django.views.generic import ListView
from django.shortcuts import render, get_object_or_404
class MenuListView(ListView):
model = MenuItem
template_name = 'items/menu_list.html'
def menu_list_view(request):
item_list = MenuItem.objects.all()
context = {'item_list': item_list,
'item_categories':reversed(ItemsCategory.objects.all()),
'item_categories_side_nav':reversed(ItemsCategory.objects.all())}
return render(request, 'menu_app/menu_list.html', context)
def home(request):
category_menu = ItemsCategory.objects.all()
context = {'category_menu': category_menu}
return render (request, 'homepage.html', context)
def menu_item_detail(request, **kwargs):
item = MenuItem.objects.filter(id=kwargs.get('pk')).first()
context = {'item':item}
return render(request, 'menu_app/item_details.html', context)
def new_order_info(request):
user_profile = get_object_or_404(Profile, user=request.user)
order, created = Order.objects.get_or_create(customer=user_profile.user, is_ordered=False)
if created:
order.ref_code = generate_order_id()
order.save()
context = {'order':order}
return render(request, 'items/order_info.html', context)
def menu_details (request, name):
category = ItemsCategory.objects.get(name = name)
menu_details = MenuItem.objects.filter(category = category)
context = {'menu_details': menu_details, 'category': name}
| [
"[email protected]"
] | |
308e6b9e3059ec9e125d0eaddd98e486959c8ed9 | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/pg_1444+236/sdB_pg_1444+236_coadd.py | a65a6188dc4e228dc8635b076832771c6f17f941 | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 430 | py | from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[221.784042,23.360553], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_pg_1444+236/sdB_pg_1444+236_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_pg_1444+236/sdB_pg_1444+236_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
f083e527a4b3f1412943ad2d21140a45ff657c27 | bc441bb06b8948288f110af63feda4e798f30225 | /monitor_sdk/model/monitor/alert_rule_pb2.py | 890d5bd10e21cba46074562e7086cd679fd504d1 | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | true | 23,813 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: alert_rule.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from monitor_sdk.model.monitor import alert_dims_pb2 as monitor__sdk_dot_model_dot_monitor_dot_alert__dims__pb2
from monitor_sdk.model.monitor import alert_conditions_pb2 as monitor__sdk_dot_model_dot_monitor_dot_alert__conditions__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='alert_rule.proto',
package='monitor',
syntax='proto3',
serialized_options=_b('ZAgo.easyops.local/contracts/protorepo-models/easyops/model/monitor'),
serialized_pb=_b('\n\x10\x61lert_rule.proto\x12\x07monitor\x1a*monitor_sdk/model/monitor/alert_dims.proto\x1a\x30monitor_sdk/model/monitor/alert_conditions.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xc2\x08\n\tAlertRule\x12\x0b\n\x03org\x18\x01 \x01(\x05\x12\n\n\x02id\x18\x02 \x01(\t\x12\x11\n\trule_name\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\x05\x12\x12\n\nversion_id\x18\x05 \x01(\t\x12&\n\nalert_dims\x18\x06 \x03(\x0b\x32\x12.monitor.AlertDims\x12\x15\n\rrule_priority\x18\x07 \x01(\x05\x12\x32\n\x10\x61lert_conditions\x18\x08 \x01(\x0b\x32\x18.monitor.AlertConditions\x12\x15\n\rdetect_window\x18\t \x01(\x05\x12\x13\n\x0b\x61lert_count\x18\n \x01(\x05\x12\x16\n\x0e\x61lert_interval\x18\x0b \x01(\x05\x12\x15\n\rrecover_count\x18\x0c \x01(\x05\x12+\n\x07\x61\x63tions\x18\r \x03(\x0b\x32\x1a.monitor.AlertRule.Actions\x12/\n\ttemplates\x18\x0e \x01(\x0b\x32\x1c.monitor.AlertRule.Templates\x12\x0f\n\x07\x63reator\x18\x0f \x01(\t\x12\r\n\x05\x63time\x18\x10 \x01(\x05\x12\r\n\x05mtime\x18\x11 \x01(\x05\x12/\n\tinstances\x18\x12 \x01(\x0b\x32\x1c.monitor.AlertRule.Instances\x12\x10\n\x08objectId\x18\x13 \x01(\t\x12\x10\n\x08\x64isabled\x18\x14 \x01(\x08\x12\x0e\n\x06source\x18\x15 \x01(\t\x1a\xe8\x02\n\x07\x41\x63tions\x12\x37\n\tcondition\x18\x01 \x01(\x0b\x32$.monitor.AlertRule.Actions.Condition\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0f\n\x07upgrade\x18\x03 \x01(\x08\x12\x0f\n\x07methods\x18\x04 \x03(\t\x12\x11\n\treceivers\x18\x05 \x03(\t\x12\x1c\n\x14receiver_user_groups\x18\x06 \x03(\t\x12\x42\n\x0freceiver_owners\x18\x07 \x03(\x0b\x32).monitor.AlertRule.Actions.ReceiverOwners\x1a/\n\tCondition\x12\x13\n\x0blasting_for\x18\x01 \x01(\x05\x12\r\n\x05level\x18\x02 \x01(\x05\x1aN\n\x0eReceiverOwners\x12\x11\n\tobject_id\x18\x01 \x01(\t\x12\x16\n\x0eobject_attr_id\x18\x02 \x01(\t\x12\x11\n\ttranslate\x18\x03 \x01(\t\x1a\x61\n\tTemplates\x12\x18\n\x10\x63ontent_template\x18\x01 \x01(\t\x12\x17\n\x0ftarget_template\x18\x02 \x01(\t\x12!\n\x19recovery_content_template\x18\x03 \x01(\t\x1aV\n\tInstances\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x13\n\x0binstanceIds\x18\x02 \x03(\t\x12&\n\x05query\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructBCZAgo.easyops.local/contracts/protorepo-models/easyops/model/monitorb\x06proto3')
,
dependencies=[monitor__sdk_dot_model_dot_monitor_dot_alert__dims__pb2.DESCRIPTOR,monitor__sdk_dot_model_dot_monitor_dot_alert__conditions__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,])
_ALERTRULE_ACTIONS_CONDITION = _descriptor.Descriptor(
name='Condition',
full_name='monitor.AlertRule.Actions.Condition',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='lasting_for', full_name='monitor.AlertRule.Actions.Condition.lasting_for', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='level', full_name='monitor.AlertRule.Actions.Condition.level', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=930,
serialized_end=977,
)
_ALERTRULE_ACTIONS_RECEIVEROWNERS = _descriptor.Descriptor(
name='ReceiverOwners',
full_name='monitor.AlertRule.Actions.ReceiverOwners',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='object_id', full_name='monitor.AlertRule.Actions.ReceiverOwners.object_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='object_attr_id', full_name='monitor.AlertRule.Actions.ReceiverOwners.object_attr_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='translate', full_name='monitor.AlertRule.Actions.ReceiverOwners.translate', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=979,
serialized_end=1057,
)
_ALERTRULE_ACTIONS = _descriptor.Descriptor(
name='Actions',
full_name='monitor.AlertRule.Actions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='condition', full_name='monitor.AlertRule.Actions.condition', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='monitor.AlertRule.Actions.type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='upgrade', full_name='monitor.AlertRule.Actions.upgrade', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='methods', full_name='monitor.AlertRule.Actions.methods', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='receivers', full_name='monitor.AlertRule.Actions.receivers', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='receiver_user_groups', full_name='monitor.AlertRule.Actions.receiver_user_groups', index=5,
number=6, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='receiver_owners', full_name='monitor.AlertRule.Actions.receiver_owners', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_ALERTRULE_ACTIONS_CONDITION, _ALERTRULE_ACTIONS_RECEIVEROWNERS, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=697,
serialized_end=1057,
)
_ALERTRULE_TEMPLATES = _descriptor.Descriptor(
name='Templates',
full_name='monitor.AlertRule.Templates',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='content_template', full_name='monitor.AlertRule.Templates.content_template', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_template', full_name='monitor.AlertRule.Templates.target_template', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='recovery_content_template', full_name='monitor.AlertRule.Templates.recovery_content_template', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1059,
serialized_end=1156,
)
_ALERTRULE_INSTANCES = _descriptor.Descriptor(
name='Instances',
full_name='monitor.AlertRule.Instances',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='monitor.AlertRule.Instances.type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='instanceIds', full_name='monitor.AlertRule.Instances.instanceIds', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='query', full_name='monitor.AlertRule.Instances.query', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1158,
serialized_end=1244,
)
_ALERTRULE = _descriptor.Descriptor(
name='AlertRule',
full_name='monitor.AlertRule',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='org', full_name='monitor.AlertRule.org', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='monitor.AlertRule.id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='rule_name', full_name='monitor.AlertRule.rule_name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='monitor.AlertRule.version', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version_id', full_name='monitor.AlertRule.version_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alert_dims', full_name='monitor.AlertRule.alert_dims', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='rule_priority', full_name='monitor.AlertRule.rule_priority', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alert_conditions', full_name='monitor.AlertRule.alert_conditions', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='detect_window', full_name='monitor.AlertRule.detect_window', index=8,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alert_count', full_name='monitor.AlertRule.alert_count', index=9,
number=10, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='alert_interval', full_name='monitor.AlertRule.alert_interval', index=10,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='recover_count', full_name='monitor.AlertRule.recover_count', index=11,
number=12, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='actions', full_name='monitor.AlertRule.actions', index=12,
number=13, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='templates', full_name='monitor.AlertRule.templates', index=13,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='creator', full_name='monitor.AlertRule.creator', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ctime', full_name='monitor.AlertRule.ctime', index=15,
number=16, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mtime', full_name='monitor.AlertRule.mtime', index=16,
number=17, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='instances', full_name='monitor.AlertRule.instances', index=17,
number=18, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='objectId', full_name='monitor.AlertRule.objectId', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='disabled', full_name='monitor.AlertRule.disabled', index=19,
number=20, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='source', full_name='monitor.AlertRule.source', index=20,
number=21, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_ALERTRULE_ACTIONS, _ALERTRULE_TEMPLATES, _ALERTRULE_INSTANCES, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=154,
serialized_end=1244,
)
_ALERTRULE_ACTIONS_CONDITION.containing_type = _ALERTRULE_ACTIONS
_ALERTRULE_ACTIONS_RECEIVEROWNERS.containing_type = _ALERTRULE_ACTIONS
_ALERTRULE_ACTIONS.fields_by_name['condition'].message_type = _ALERTRULE_ACTIONS_CONDITION
_ALERTRULE_ACTIONS.fields_by_name['receiver_owners'].message_type = _ALERTRULE_ACTIONS_RECEIVEROWNERS
_ALERTRULE_ACTIONS.containing_type = _ALERTRULE
_ALERTRULE_TEMPLATES.containing_type = _ALERTRULE
_ALERTRULE_INSTANCES.fields_by_name['query'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_ALERTRULE_INSTANCES.containing_type = _ALERTRULE
_ALERTRULE.fields_by_name['alert_dims'].message_type = monitor__sdk_dot_model_dot_monitor_dot_alert__dims__pb2._ALERTDIMS
_ALERTRULE.fields_by_name['alert_conditions'].message_type = monitor__sdk_dot_model_dot_monitor_dot_alert__conditions__pb2._ALERTCONDITIONS
_ALERTRULE.fields_by_name['actions'].message_type = _ALERTRULE_ACTIONS
_ALERTRULE.fields_by_name['templates'].message_type = _ALERTRULE_TEMPLATES
_ALERTRULE.fields_by_name['instances'].message_type = _ALERTRULE_INSTANCES
DESCRIPTOR.message_types_by_name['AlertRule'] = _ALERTRULE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
AlertRule = _reflection.GeneratedProtocolMessageType('AlertRule', (_message.Message,), {
'Actions' : _reflection.GeneratedProtocolMessageType('Actions', (_message.Message,), {
'Condition' : _reflection.GeneratedProtocolMessageType('Condition', (_message.Message,), {
'DESCRIPTOR' : _ALERTRULE_ACTIONS_CONDITION,
'__module__' : 'alert_rule_pb2'
# @@protoc_insertion_point(class_scope:monitor.AlertRule.Actions.Condition)
})
,
'ReceiverOwners' : _reflection.GeneratedProtocolMessageType('ReceiverOwners', (_message.Message,), {
'DESCRIPTOR' : _ALERTRULE_ACTIONS_RECEIVEROWNERS,
'__module__' : 'alert_rule_pb2'
# @@protoc_insertion_point(class_scope:monitor.AlertRule.Actions.ReceiverOwners)
})
,
'DESCRIPTOR' : _ALERTRULE_ACTIONS,
'__module__' : 'alert_rule_pb2'
# @@protoc_insertion_point(class_scope:monitor.AlertRule.Actions)
})
,
'Templates' : _reflection.GeneratedProtocolMessageType('Templates', (_message.Message,), {
'DESCRIPTOR' : _ALERTRULE_TEMPLATES,
'__module__' : 'alert_rule_pb2'
# @@protoc_insertion_point(class_scope:monitor.AlertRule.Templates)
})
,
'Instances' : _reflection.GeneratedProtocolMessageType('Instances', (_message.Message,), {
'DESCRIPTOR' : _ALERTRULE_INSTANCES,
'__module__' : 'alert_rule_pb2'
# @@protoc_insertion_point(class_scope:monitor.AlertRule.Instances)
})
,
'DESCRIPTOR' : _ALERTRULE,
'__module__' : 'alert_rule_pb2'
# @@protoc_insertion_point(class_scope:monitor.AlertRule)
})
_sym_db.RegisterMessage(AlertRule)
_sym_db.RegisterMessage(AlertRule.Actions)
_sym_db.RegisterMessage(AlertRule.Actions.Condition)
_sym_db.RegisterMessage(AlertRule.Actions.ReceiverOwners)
_sym_db.RegisterMessage(AlertRule.Templates)
_sym_db.RegisterMessage(AlertRule.Instances)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
dd746b74e43acf7d47b6ac1e5af311e62ab6dd16 | ae12996324ff89489ded4c10163f7ff9919d080b | /LeetCodePython/BasicCalculator.py | c2378b22e407db140bf364ae250e27a2830a46bc | [] | no_license | DeanHe/Practice | 31f1f2522f3e7a35dc57f6c1ae74487ad044e2df | 3230cda09ad345f71bb1537cb66124ec051de3a5 | refs/heads/master | 2023-07-05T20:31:33.033409 | 2023-07-01T18:02:32 | 2023-07-01T18:02:32 | 149,399,927 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,610 | py | """
Given a string s representing a valid expression, implement a basic calculator to evaluate it, and return the result of the evaluation.
Note: You are not allowed to use any built-in function which evaluates strings as mathematical expressions, such as eval().
Example 1:
Input: s = "1 + 1"
Output: 2
Example 2:
Input: s = " 2-1 + 2 "
Output: 3
Example 3:
Input: s = "(1+(4+5+2)-3)+(6+8)"
Output: 23
Constraints:
1 <= s.length <= 3 * 105
s consists of digits, '+', '-', '(', ')', and ' '.
s represents a valid expression.
'+' is not used as a unary operation (i.e., "+1" and "+(2 + 3)" is invalid).
'-' could be used as a unary operation (i.e., "-1" and "-(2 + 3)" is valid).
There will be no two consecutive operators in the input.
Every number and running calculation will fit in a signed 32-bit integer.
"""
class BasicCalculator:
def calculate(self, s: str) -> int:
res, cur, sign, stack = 0, 0, 1, []
for c in s:
if c.isdigit():
cur = cur * 10 + int(c)
elif c == '+':
res += sign * cur
cur = 0
sign = 1
elif c == '-':
res += sign * cur
cur = 0
sign = -1
elif c == '(':
stack.append(res)
stack.append(sign)
sign = 1
res = 0
elif c == ')':
res += sign * cur
cur = 0
res *= stack.pop()
res += stack.pop()
if cur != 0:
res += sign * cur
return res | [
"[email protected]"
] | |
c4f8026e28db67ae6e7ad6f1d7d31c16fda41a3a | f1caec328a46a3b9cd5cf732f97b5cf358c06b07 | /tests/test_codetools.py | b56e3c358fc6c50c159546c355644c1673967758 | [
"MIT"
] | permissive | gc-ss/jurigged | 878a4a815e618f47b6c459cfa434962fd81754bb | 5de42f013ea07c31fdfba20fe923d86936e089ec | refs/heads/master | 2023-04-04T20:52:17.105961 | 2021-04-20T22:18:07 | 2021-04-20T22:18:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,957 | py | import math
import os
from types import SimpleNamespace as NS
import pytest
from jurigged.codetools import CodeFile, StaleException
from jurigged.utils import locate
from .common import TemporaryModule
from .snippets import apple
class CodeCollection:
def __init__(self, tmod, basename):
self.tmod = tmod
self.basename = basename
self.variants = {
name.split(".py")[0].split(":")[1]
for name in os.listdir(
os.path.join(os.path.dirname(__file__), "snippets")
)
if name.startswith(basename)
}
module = tmod.imp(f"{basename}:main")
main_cf = CodeFile(module.__file__, module.__name__)
main_cf.associate(module)
self.module = module
self.main = main_cf
self.read_codefiles()
def read_codefiles(self):
files = {
variant: self.module.__file__
if variant == "main"
else self.tmod.transfer(f"{self.basename}:{variant}")[1]
for variant in self.variants
}
self.files = NS(**files)
self.cf = NS(
**{
variant: CodeFile(file, self.module.__name__)
for variant, file in files.items()
}
)
def read(self, name="main"):
path = getattr(self.files, name)
with open(path) as f:
return f.read()
def write(self, name, contents):
path = getattr(self.files, name)
open(path, "w").write(contents)
@pytest.fixture
def tmod(scope="module"):
return TemporaryModule()
@pytest.fixture
def apple_code(scope="module"):
cf = CodeFile(apple.__file__, apple.__name__)
cf.associate(apple)
return cf
@pytest.fixture
def ballon(tmod):
return CodeCollection(tmod, "ballon")
@pytest.fixture
def chips(tmod):
return CodeCollection(tmod, "chips")
@pytest.fixture
def dandelion(tmod):
return CodeCollection(tmod, "dandelion")
@pytest.fixture
def elephant(tmod):
return CodeCollection(tmod, "elephant")
@pytest.fixture
def firmament(tmod):
return CodeCollection(tmod, "firmament")
@pytest.fixture
def glamour(tmod):
return CodeCollection(tmod, "glamour")
@pytest.fixture
def iguana(tmod):
return CodeCollection(tmod, "iguana")
def test_collect(apple_code):
cat = {
f"{k[0]}@{k[2]}" if isinstance(k, tuple) else k: set(v.objects)
for k, v in apple_code.code.catalogue().items()
if set(v.objects)
}
assert cat == {
"ModuleCode@1": {apple},
"FunctionCode@1": {apple.crunch},
"FunctionCode@6": {apple.breakfast},
"FunctionCode@23": {apple.Orchard.cortland},
"ClassCode@13": {apple.Orchard},
"FunctionCode@14": {apple.Orchard.mcintosh},
"FunctionCode@18": {apple.Orchard.honeycrisp.__func__},
"FunctionCode@29": {apple.juggle},
"FunctionCode@36": {apple.pomme},
"FunctionCode@45": {apple.arbre},
"FunctionCode@46": {apple.pommier},
"FunctionCode@52": {apple.pommier.__wrapped__},
"ClassCode@57": {apple.FakeApple},
"FunctionCode@58": {apple.FakeApple.color.fget},
"FunctionCode@62": {apple.FakeApple.color.fset},
"tests.snippets.apple": {apple},
"tests.snippets.apple.crunch": {apple.crunch},
"tests.snippets.apple.breakfast": {apple.breakfast},
"tests.snippets.apple.Orchard.cortland": {apple.Orchard.cortland},
"tests.snippets.apple.Orchard": {apple.Orchard},
"tests.snippets.apple.Orchard.mcintosh": {apple.Orchard.mcintosh},
"tests.snippets.apple.Orchard.honeycrisp": {
apple.Orchard.honeycrisp.__func__
},
"tests.snippets.apple.juggle": {apple.juggle},
"tests.snippets.apple.pomme": {apple.pomme},
"tests.snippets.apple.arbre": {apple.arbre},
"tests.snippets.apple.arbre.branche": {apple.pommier},
"tests.snippets.apple.pommier": {apple.pommier.__wrapped__},
"tests.snippets.apple.FakeApple": {apple.FakeApple},
"tests.snippets.apple.FakeApple.color": {apple.FakeApple.color.fset},
}
def test_merge(ballon):
radius = 10
cir = ballon.module.FlatCircle(radius)
inflate = ballon.module.inflate
volume = cir.volume
# Initial definitions
assert ballon.module.inflate(5) == 10
assert inflate(5) == 10
assert cir.volume() == -1
assert volume() == -1
assert cir.unsightly() == "yuck"
with pytest.raises(AttributeError):
cir.circumference()
assert ballon.module.uninteresting() is None
# Merge the new code
ballon.main.merge(ballon.cf.v2)
# New definitions should be active
assert ballon.module.inflate(5) == 15
assert inflate(5) == 15
assert ballon.module.deflate(15) == 5
assert cir.volume() == 0
assert volume() == 0
with pytest.raises(AttributeError):
cir.unsightly()
assert cir.circumference() == 2 * math.pi * radius
with pytest.raises(AttributeError):
ballon.module.uninteresting()
def test_merge_partial(ballon):
radius = 10
cir = ballon.module.FlatCircle(radius)
assert cir.volume() == -1
assert cir.unsightly() == "yuck"
ballon.main.merge(ballon.cf.v2, allow_deletions=False)
assert cir.volume() == 0
assert cir.unsightly() == "yuck"
def test_merge_back_and_forth(ballon):
radius = 10
cir = ballon.module.FlatCircle(radius)
inflate = ballon.module.inflate
volume = cir.volume
def _initial():
# Initial definitions
assert ballon.module.inflate(5) == 10
assert inflate(5) == 10
assert cir.volume() == -1
assert volume() == -1
assert cir.unsightly() == "yuck"
with pytest.raises(AttributeError):
cir.circumference()
assert ballon.module.uninteresting() is None
def _new():
# New definitions should be active
assert ballon.module.inflate(5) == 15
assert inflate(5) == 15
assert ballon.module.deflate(15) == 5
assert cir.volume() == 0
assert volume() == 0
with pytest.raises(AttributeError):
cir.unsightly()
assert cir.circumference() == 2 * math.pi * radius
with pytest.raises(AttributeError):
ballon.module.uninteresting()
_initial()
# We must re-read the codefiles each time because the definitions
# may be modified by merge.
ballon.read_codefiles()
ballon.main.merge(ballon.cf.v2)
_new()
ballon.read_codefiles()
ballon.main.merge(ballon.cf.main)
_initial()
ballon.read_codefiles()
ballon.main.merge(ballon.cf.v2)
_new()
ballon.read_codefiles()
ballon.main.merge(ballon.cf.main)
_initial()
ballon.read_codefiles()
ballon.main.merge(ballon.cf.v2)
_new()
def test_merge_decorators(chips):
assert chips.module.munch(4) == 6
chips.main.merge(chips.cf.mod, allow_deletions=False)
assert chips.module.munch(4, 2) == 8
def test_merge_decorators_change(chips):
assert chips.module.munch(4) == 6
chips.main.merge(chips.cf.bad, allow_deletions=False)
assert chips.module.munch(4) == 17
def test_change_decorator(chips):
assert chips.module.munch(4) == 6
chips.main.merge(chips.cf.newdeco, allow_deletions=False)
assert chips.module.munch(4) == 8
def test_change_decorator_multiple(chips):
assert chips.module.munch(4) == 6
chips.main.merge(chips.cf.newdeco, allow_deletions=False)
assert chips.module.munch(4) == 8
chips.main.merge(chips.cf.newdeco2, allow_deletions=False)
assert chips.module.munch(4) == 10
def test_change_decorator_then_fn(chips):
assert chips.module.munch(4) == 6
chips.main.merge(chips.cf.newdeco, allow_deletions=False)
chips.main.merge(chips.cf.newfn, allow_deletions=False)
assert chips.module.munch(4) == 404
def test_change_fn_then_decorator(chips):
assert chips.module.munch(4) == 6
chips.main.merge(chips.cf.newfn, allow_deletions=False)
chips.main.merge(chips.cf.newdeco, allow_deletions=False)
assert chips.module.munch(4) == 404
def test_commit_noop(dandelion):
orig = dandelion.read()
dandelion.main.commit()
assert dandelion.read() == orig
def test_commit(dandelion):
orig = dandelion.read()
dandelion.main.merge(dandelion.cf.v2)
assert dandelion.read() == orig
dandelion.main.commit()
print(dandelion.read().strip())
assert dandelion.read().strip() == dandelion.read("v2result").strip()
def test_commit_partial(dandelion):
orig = dandelion.read()
dandelion.main.merge(dandelion.cf.repl, allow_deletions=False)
assert dandelion.read() == orig
dandelion.main.commit()
assert dandelion.read() == dandelion.read("outcome")
def test_commit_partial_2(dandelion):
orig = dandelion.read()
dandelion.main.merge(
dandelion.cf.repl,
allow_deletions=[
locate(dandelion.module.plack, dandelion.main.code.catalogue())
],
)
assert dandelion.read() == orig
dandelion.main.commit()
assert dandelion.read() == dandelion.read("outcome2")
def test_commit_stale(dandelion):
dandelion.main.merge(dandelion.cf.v2)
open(dandelion.main.filename, "w").write("")
with pytest.raises(StaleException):
dandelion.main.commit()
def test_functions_interface(elephant):
do = elephant.module.do
assert do(7) == ["Paint 7 canvasses", "Sing 7 songs", "Dance for 7 hours"]
elephant.main.merge(elephant.cf.mod)
assert do(7) == ["Paint 7 canvasses", "Sing 14 songs", "Dance for 7 hours"]
def test_functions_interface_add(elephant):
do = elephant.module.do
assert do(7) == ["Paint 7 canvasses", "Sing 7 songs", "Dance for 7 hours"]
elephant.main.merge(elephant.cf.more)
assert do(7) == [
"Paint 7 canvasses",
"Sing 7 songs",
"Worship the 7 suns",
"Dance for 7 hours",
"Do 7 push-ups",
]
def test_functions_interface_rm(elephant):
do = elephant.module.do
assert do(7) == ["Paint 7 canvasses", "Sing 7 songs", "Dance for 7 hours"]
elephant.main.merge(elephant.cf.less)
assert do(7) == ["Eat 7 bananas"]
def test_update_statements(firmament):
assert firmament.module.sirius(5) == 25
firmament.module.ursa_major.append(888)
assert firmament.module.betelgeuse == 1000
firmament.main.merge(firmament.cf.mod)
assert firmament.module.sirius(5) == 3
# Does not re-run the ursa_major assignment because it did not change
assert firmament.module.ursa_major == [1, 2, 3, 4, 888]
# Re-runs betelgeuse assignment
assert firmament.module.betelgeuse == 41
def test_regen_statements(firmament):
firmament.main.merge(firmament.cf.mod)
firmament.main.commit()
assert firmament.read().strip() == firmament.read("result").strip()
def test_change_supermethod(glamour):
assert glamour.module.Scarf(5).swagger() == 10
glamour.main.merge(glamour.cf.mod, allow_deletions=False)
assert glamour.module.Scarf(5).swagger() == 15
def test_remove_super(glamour):
assert glamour.module.Scarf(5).swagger() == 10
glamour.main.merge(glamour.cf.mod2)
assert glamour.module.Scarf(5).swagger() == 1234
def test_add_class_statement(glamour):
assert glamour.module.Scarf(5).swagger() == 10
glamour.main.merge(glamour.cf.mod3)
assert glamour.module.Scarf(5).swagger() == 50
assert glamour.module.Scarf(5).also_swagger() == 50
assert glamour.module.Scarf(5).hello() == "hello!"
def test_bad_statement(iguana):
# This tests that one bad statement will not interfere with the rest of the
# changes.
assert iguana.module.lizard(3) == "sss"
iguana.main.merge(iguana.cf.bad)
assert iguana.module.lizard(3) == "ssssss"
def test_set_globals(ballon):
glb = {"a": 2}
ballon.main.code.set_globals(glb)
assert ballon.main.code.get_globals() is glb
| [
"[email protected]"
] | |
223e90ab575e13cd7f3190006ae7286362be3c1c | 8da91c26d423bacbeee1163ac7e969904c7e4338 | /pyvisdk/enums/filter_spec_logical_operator.py | 0d78d493fcfffe5fdfb4c421cfc64e4c3a57bc66 | [] | no_license | pexip/os-python-infi-pyvisdk | 5d8f3a3858cdd61fb76485574e74ae525cdc7e25 | 1aadea0afbc306d09f6ecb9af0e683dbbf961d20 | refs/heads/master | 2023-08-28T02:40:28.789786 | 2020-07-16T04:00:53 | 2020-07-16T04:00:53 | 10,032,240 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 234 | py |
########################################
# Automatically generated, do not edit.
########################################
from pyvisdk.thirdparty import Enum
FilterSpecLogicalOperator = Enum(
'logicalAnd',
'logicalOr',
)
| [
"[email protected]"
] | |
0627dc44488b0cb662ac6134c35bb17478c0fece | 47b4d76e9c87e6c45bab38e348ae12a60a60f94c | /Mutation_Modules/More_Backup/THR_HCY.py | 4a4ce8698134116f04127cebda9e92d3ca02eea6 | [] | no_license | PietroAronica/Parasol.py | 9bc17fd8e177e432bbc5ce4e7ee2d721341b2707 | 238abcdc2caee7bbfea6cfcdda1ca705766db204 | refs/heads/master | 2021-01-10T23:57:40.225140 | 2020-10-14T02:21:15 | 2020-10-14T02:21:15 | 70,791,648 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,357 | py | # THR to HCY Mutation
import Frcmod_creator
import PDBHandler
import Leapy
from ParmedTools.ParmedActions import *
from chemistry.amber.readparm import *
def parmed_command(vxi='VXI'):
bc = {}
with open('Param_files/AminoAcid/THR.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
key, value = line.split()
bc[key] = float(value)
b.close()
fc = {}
with open('Param_files/AminoAcid/HCY.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
key, value = line.split()
fc[key] = float(value)
b.close()
for i in range(11):
a = i*10
parm = AmberParm('Solv_{}_{}.prmtop'.format(a, 100-a))
changeLJPair(parm, ':{}@HB2 :{}@HG1 0 0'.format(vxi, vxi)).execute()
changeLJPair(parm, ':{}@HG21 :{}@HD 0 0'.format(vxi, vxi)).execute()
change(parm, 'charge', ':{}@N'.format(vxi), bc['N']+((fc['N']-bc['N'])/10)*i).execute()
change(parm, 'charge', ':{}@H'.format(vxi), bc['H']+((fc['H']-bc['H'])/10)*i).execute()
change(parm, 'charge', ':{}@CA'.format(vxi), bc['CA']+((fc['CA']-bc['CA'])/10)*i).execute()
change(parm, 'charge', ':{}@HA'.format(vxi), bc['HA']+((fc['HA']-bc['HA'])/10)*i).execute()
change(parm, 'charge', ':{}@CB'.format(vxi), bc['CB']+((fc['CB']-bc['CB'])/10)*i).execute()
change(parm, 'charge', ':{}@HB2'.format(vxi), fc['HB2']/10*i).execute()
change(parm, 'charge', ':{}@HB3'.format(vxi), bc['HB']+((fc['HB3']-bc['HB'])/10)*i).execute()
change(parm, 'charge', ':{}@CG'.format(vxi), bc['CG2']+((fc['CG']-bc['CG2'])/10)*i).execute()
change(parm, 'charge', ':{}@HG21'.format(vxi), bc['HG21']-(bc['HG21']/10)*i).execute()
change(parm, 'charge', ':{}@HG2'.format(vxi), bc['HG22']+((fc['HG2']-bc['HG22'])/10)*i).execute()
change(parm, 'charge', ':{}@HG3'.format(vxi), bc['HG23']+((fc['HG3']-bc['HG23'])/10)*i).execute()
change(parm, 'charge', ':{}@OG1'.format(vxi), bc['OG1']-(bc['OG1']/10)*i).execute()
change(parm, 'charge', ':{}@HG1'.format(vxi), bc['HG1']-(bc['HG1']/10)*i).execute()
change(parm, 'charge', ':{}@SD'.format(vxi), (fc['SD']/10)*i*i/10).execute()
change(parm, 'charge', ':{}@HD'.format(vxi), (fc['HD']/10)*i*i/10).execute()
change(parm, 'charge', ':{}@C'.format(vxi), bc['C']+((fc['C']-bc['C'])/10)*i).execute()
change(parm, 'charge', ':{}@O'.format(vxi), bc['O']+((fc['O']-bc['O'])/10)*i).execute()
setOverwrite(parm).execute()
parmout(parm, 'Solv_{}_{}.prmtop'.format(a, 100-a)).execute()
def makevxi(struct, out, aa, vxi='VXI'):
struct.residue_dict[aa].set_resname(vxi)
CG2 = struct.residue_dict[aa].atom_dict['CG2']
HG21 = struct.residue_dict[aa].atom_dict['HG21']
OG1 = struct.residue_dict[aa].atom_dict['OG1']
pdb = open(out, 'w')
try:
pdb.write(struct.other_dict['Cryst1'].formatted())
except KeyError:
pass
for res in struct.residue_list:
for atom in res.atom_list:
if atom.get_name() == 'HB' and res.get_resname() == vxi:
pdb.write(atom.superimposed1('HB2', OG1))
pdb.write(atom.change_name('HB3'))
elif atom.get_name() == 'CG2' and res.get_resname() == vxi:
pdb.write(atom.change_name('CG'))
elif atom.get_name() == 'HG22' and res.get_resname() == vxi:
pdb.write(atom.change_name('HG2'))
elif atom.get_name() == 'HG23' and res.get_resname() == vxi:
pdb.write(atom.change_name('HG3'))
pdb.write(atom.halfway_between('SD', CG2, HG21))
pdb.write(atom.superimposed1('HD', HG21))
else:
pdb.write(atom.formatted())
try:
pdb.write(struct.other_dict[res.get_resnumber()].ter())
except:
pass
for oth in struct.other_dict:
try:
if oth.startswith('Conect'):
pdb.write(struct.other_dict[oth].formatted())
except:
pass
pdb.write('END\n')
def lib_make(ff, outputfile, vxi='VXI', thisul='cs', thihyd='ch', hydhyd1='yh', alcoxy='ho', alchyd='hh', hydhyd2='sh', thrhyd='fh', cyshyd='gh'):
ctrl = open('lyp.in', 'w')
ctrl.write("source leaprc.%s\n"%ff)
ctrl.write("%s=loadpdb Param_files/LibPDB/THR-HCY.pdb\n"%vxi)
ctrl.write('set %s.1.1 element "N"\n'%vxi)
ctrl.write('set %s.1.2 element "H"\n'%vxi)
ctrl.write('set %s.1.3 element "C"\n'%vxi)
ctrl.write('set %s.1.4 element "H"\n'%vxi)
ctrl.write('set %s.1.5 element "C"\n'%vxi)
ctrl.write('set %s.1.6 element "H"\n'%vxi)
ctrl.write('set %s.1.7 element "H"\n'%vxi)
ctrl.write('set %s.1.8 element "C"\n'%vxi)
ctrl.write('set %s.1.9 element "H"\n'%vxi)
ctrl.write('set %s.1.10 element "H"\n'%vxi)
ctrl.write('set %s.1.11 element "H"\n'%vxi)
ctrl.write('set %s.1.12 element "O"\n'%vxi)
ctrl.write('set %s.1.13 element "H"\n'%vxi)
ctrl.write('set %s.1.14 element "S"\n'%vxi)
ctrl.write('set %s.1.15 element "H"\n'%vxi)
ctrl.write('set %s.1.16 element "C"\n'%vxi)
ctrl.write('set %s.1.17 element "O"\n'%vxi)
ctrl.write('set %s.1.1 name "N"\n'%vxi)
ctrl.write('set %s.1.2 name "H"\n'%vxi)
ctrl.write('set %s.1.3 name "CA"\n'%vxi)
ctrl.write('set %s.1.4 name "HA"\n'%vxi)
ctrl.write('set %s.1.5 name "CB"\n'%vxi)
ctrl.write('set %s.1.6 name "HB2"\n'%vxi)
ctrl.write('set %s.1.7 name "HB3"\n'%vxi)
ctrl.write('set %s.1.8 name "CG"\n'%vxi)
ctrl.write('set %s.1.9 name "HG21"\n'%vxi)
ctrl.write('set %s.1.10 name "HG2"\n'%vxi)
ctrl.write('set %s.1.11 name "HG3"\n'%vxi)
ctrl.write('set %s.1.12 name "OG1"\n'%vxi)
ctrl.write('set %s.1.13 name "HG1"\n'%vxi)
ctrl.write('set %s.1.14 name "SD"\n'%vxi)
ctrl.write('set %s.1.15 name "HD"\n'%vxi)
ctrl.write('set %s.1.16 name "C"\n'%vxi)
ctrl.write('set %s.1.17 name "O"\n'%vxi)
ctrl.write('set %s.1.1 type "N"\n'%vxi)
ctrl.write('set %s.1.2 type "H"\n'%vxi)
ctrl.write('set %s.1.3 type "CT"\n'%vxi)
ctrl.write('set %s.1.4 type "H1"\n'%vxi)
ctrl.write('set %s.1.5 type "CT"\n'%vxi)
ctrl.write('set %s.1.6 type "%s"\n'%(vxi, hydhyd2))
ctrl.write('set %s.1.7 type "%s"\n'%(vxi, thrhyd))
ctrl.write('set %s.1.8 type "CT"\n'%vxi)
ctrl.write('set %s.1.9 type "%s"\n'%(vxi, hydhyd1))
ctrl.write('set %s.1.10 type "%s"\n'%(vxi, cyshyd))
ctrl.write('set %s.1.11 type "%s"\n'%(vxi, cyshyd))
ctrl.write('set %s.1.12 type "%s"\n'%(vxi, alcoxy))
ctrl.write('set %s.1.13 type "%s"\n'%(vxi, alchyd))
ctrl.write('set %s.1.14 type "%s"\n'%(vxi, thisul))
ctrl.write('set %s.1.15 type "%s"\n'%(vxi, thihyd))
ctrl.write('set %s.1.16 type "C"\n'%vxi)
ctrl.write('set %s.1.17 type "O"\n'%vxi)
ctrl.write('bond %s.1.1 %s.1.2\n'%(vxi, vxi))
ctrl.write('bond %s.1.1 %s.1.3\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.4\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.5\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.16\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.6\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.7\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.8\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.12\n'%(vxi, vxi))
ctrl.write('bond %s.1.8 %s.1.9\n'%(vxi, vxi))
ctrl.write('bond %s.1.8 %s.1.10\n'%(vxi, vxi))
ctrl.write('bond %s.1.8 %s.1.11\n'%(vxi, vxi))
ctrl.write('bond %s.1.8 %s.1.14\n'%(vxi, vxi))
ctrl.write('bond %s.1.12 %s.1.13\n'%(vxi, vxi))
ctrl.write('bond %s.1.14 %s.1.15\n'%(vxi, vxi))
ctrl.write('bond %s.1.16 %s.1.17\n'%(vxi, vxi))
ctrl.write('set %s.1 connect0 %s.1.N\n'%(vxi, vxi))
ctrl.write('set %s.1 connect1 %s.1.C\n'%(vxi, vxi))
ctrl.write('set %s name "%s"\n'%(vxi, vxi))
ctrl.write('set %s.1 name "%s"\n'%(vxi, vxi))
ctrl.write('set %s head %s.1.N\n'%(vxi, vxi))
ctrl.write('set %s tail %s.1.C\n'%(vxi, vxi))
ctrl.write('saveoff %s %s.lib\n'%(vxi, vxi))
ctrl.write("quit\n")
ctrl.close()
Leapy.run('lyp.in', outputfile)
def all_make():
for i in range(0,110,10):
Frcmod_creator.make ('{}_{}.frcmod'.format(i, 100-i))
def cal(x, y, i):
num = x+((y-x)/10)*i
return num
def cal2(x, y, i):
num = y+((x-y)/10)*i
return num
def stock_add_to_all(vxi='VXI', thisul='cs', thihyd='ch', hydhyd1='yh', alcoxy='ho', alchyd='hh', hydhyd2='sh', thrhyd='fh', cyshyd='gh'):
Frcmod_creator.make_hyb()
Frcmod_creator.TYPE_insert(alcoxy, 'O', 'sp3')
Frcmod_creator.TYPE_insert(alchyd, 'H', 'sp3')
Frcmod_creator.TYPE_insert(hydhyd1, 'H', 'sp3')
Frcmod_creator.TYPE_insert(thisul, 'S', 'sp3')
Frcmod_creator.TYPE_insert(thihyd, 'H', 'sp3')
Frcmod_creator.TYPE_insert(hydhyd2, 'H', 'sp3')
Frcmod_creator.TYPE_insert(thrhyd, 'H', 'sp3')
Frcmod_creator.TYPE_insert(cyshyd, 'H', 'sp3')
p = {}
with open('Param_files/Stock/Stock.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
p[line.split()[0]] = []
for point in line.split()[1:]:
p[line.split()[0]].append(float(point))
b.close()
for i in range(11):
a = i*10
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), alcoxy, cal(p['OH'][0], p['0_O'][0], i), cal(p['OH'][1], p['0_O'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), alchyd, cal(p['HO'][0], p['0_H'][0], i), cal(p['HO'][1], p['0_H'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), hydhyd2, cal(p['0_H'][0], p['HC'][0], i), cal(p['0_H'][1], p['HC'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), thrhyd, cal(p['H1'][0], p['HC'][0], i), cal(p['H1'][1], p['HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', alcoxy), cal(p['CT_OH'][0], p['OH_mH'][0], i), cal(p['CT_OH'][1], p['OH_mH'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', hydhyd2), cal(p['HC_sO'][0], p['CT_HC'][0], i), cal(p['HC_sO'][1], p['CT_HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', thrhyd), cal(p['CT_HC'][0], p['CT_HC'][0], i), cal(p['CT_HC'][1], p['CT_HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format(alcoxy, alchyd), cal(p['OH_HO'][0], p['HO_mH'][0], i), cal(p['OH_HO'][1], p['HO_mH'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(hydhyd2, 'CT', alcoxy), cal(p['Close'][0], p['Close'][0], i), cal(p['Close'][1], p['Close'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', alcoxy), cal(p['C_C_H'][0], p['C_C_H'][0], i), cal(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', alcoxy, alchyd), cal(p['C_O_H'][0], p['Dritt'][0], i), cal(p['C_O_H'][1], p['Dritt'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(thrhyd, 'CT', hydhyd2), cal(p['H_C_H'][0], p['H_C_H'][0], i), cal(p['H_C_H'][1], p['H_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(thrhyd, 'CT', alcoxy), cal(p['C_C_H'][0], p['C_C_H'][0], i), cal(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', thrhyd), cal(p['C_C_H'][0], p['C_C_H'][0], i), cal(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', hydhyd2), cal(p['C_C_H'][0], p['C_C_H'][0], i), cal(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(hydhyd2, 'CT', alcoxy, alchyd), cal(p['0_Dihe'][0], p['0_Dihe'][0], i), cal(p['0_Dihe'][1], p['0_Dihe'][1], i), cal(p['0_Dihe'][2], p['0_Dihe'][2], i), cal(p['0_Dihe'][3], p['0_Dihe'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(alchyd, alcoxy, 'CT', thrhyd), cal(p['X_C_O_X'][0], p['0_5'][0], i), cal(p['X_C_O_X'][1], p['0_5'][1], i), cal(p['X_C_O_X'][2], p['0_5'][2], i), cal(p['X_C_O_X'][3], p['0_5'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(alchyd, alcoxy, 'CT', 'CT'), cal(p['C_C_O_H_2'][0], p['0_3'][0], i), cal(p['C_C_O_H_2'][1], p['0_3'][1], i), cal(p['C_C_O_H_2'][2], p['0_3'][2], i), cal(p['C_C_O_H_2'][3], p['0_3'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(alchyd, alcoxy, 'CT', 'CT'), cal(p['C_C_O_H_1'][0], p['0_2'][0], i), cal(p['C_C_O_H_1'][1], p['0_2'][1], i), cal(p['C_C_O_H_1'][2], p['0_2'][2], i), cal(p['C_C_O_H_1'][3], p['0_2'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(alcoxy, 'CT', 'CT', 'H1'), cal(p['C_C_O_H_2'][0], p['0_3'][0], i), cal(p['C_C_O_H_2'][1], p['0_3'][1], i), cal(p['C_C_O_H_2'][2], p['0_3'][2], i), cal(p['C_C_O_H_2'][3], p['0_3'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(alcoxy, 'CT', 'CT', 'H1'), cal(p['C_C_O_H_1'][0], p['0_2'][0], i), cal(p['C_C_O_H_1'][1], p['0_2'][1], i), cal(p['C_C_O_H_1'][2], p['0_2'][2], i), cal(p['C_C_O_H_1'][3], p['0_2'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), alcoxy, cal(p['OH'][2], p['0_O'][2], i), cal(p['OH'][3], p['0_O'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), alchyd, cal(p['HO'][2], p['0_H'][2], i), cal(p['HO'][3], p['0_H'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), hydhyd2, cal(p['0_H'][2], p['HC'][2], i), cal(p['0_H'][3], p['HC'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), thrhyd, cal(p['H1'][2], p['HC'][2], i), cal(p['H1'][3], p['HC'][3], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), thisul, cal2(p['SH'][0], p['0_O'][0], i), cal2(p['SH'][1], p['0_O'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), thihyd, cal2(p['HS'][0], p['0_H'][0], i), cal2(p['HS'][1], p['0_H'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), hydhyd1, cal2(p['0_H'][0], p['HC'][0], i), cal2(p['0_H'][1], p['HC'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), cyshyd, cal2(p['H1'][0], p['HC'][0], i), cal2(p['H1'][1], p['HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', thisul), cal2(p['CT_SH'][0], p['SH_mHC'][0], i), cal2(p['CT_SH'][1], p['SH_mHC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', hydhyd1), cal2(p['HC_sS'][0], p['CT_HC'][0], i), cal2(p['HC_sS'][1], p['CT_HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', cyshyd), cal2(p['CT_HC'][0], p['CT_HC'][0], i), cal2(p['CT_HC'][1], p['CT_HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format(thisul, thihyd), cal2(p['SH_HS'][0], p['HS_mHC'][0], i), cal2(p['SH_HS'][1], p['HS_mHC'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(hydhyd1, 'CT', thisul), cal2(p['Close'][0], p['Close'][0], i), cal2(p['Close'][1], p['Close'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', thisul), cal2(p['C_C_SH'][0], p['C_C_H'][0], i), cal2(p['C_C_SH'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', thisul, thihyd), cal2(p['C_SH_H'][0], p['Dritt'][0], i), cal2(p['C_SH_H'][1], p['Dritt'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(cyshyd, 'CT', hydhyd1), cal2(p['H_C_H'][0], p['H_C_H'][0], i), cal2(p['H_C_H'][1], p['H_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(cyshyd, 'CT', cyshyd), cal2(p['H_C_H'][0], p['H_C_H'][0], i), cal2(p['H_C_H'][1], p['H_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(cyshyd, 'CT', thisul), cal2(p['C_C_H'][0], p['C_C_H'][0], i), cal2(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', cyshyd), cal2(p['C_C_H'][0], p['C_C_H'][0], i), cal2(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', hydhyd1), cal2(p['C_C_SH'][0], p['C_C_H'][0], i), cal2(p['C_C_SH'][1], p['C_C_H'][1], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(hydhyd1, 'CT', thisul, thihyd), cal2(p['0_Dihe'][0], p['0_Dihe'][0], i), cal2(p['0_Dihe'][1], p['0_Dihe'][1], i), cal2(p['0_Dihe'][2], p['0_Dihe'][2], i), cal2(p['0_Dihe'][3], p['0_Dihe'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(thihyd, thisul, 'CT', cyshyd), cal2(p['X_C_SH_X'][0], p['0_5'][0], i), cal2(p['X_C_SH_X'][1], p['0_5'][1], i), cal2(p['X_C_SH_X'][2], p['0_5'][2], i), cal2(p['X_C_SH_X'][3], p['0_5'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(thihyd, thisul, 'CT', 'CT'), cal2(p['X_C_SH_X'][0], p['0_5'][0], i), cal2(p['X_C_SH_X'][1], p['0_5'][1], i), cal2(p['X_C_SH_X'][2], p['0_5'][2], i), cal2(p['X_C_SH_X'][3], p['0_5'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), thisul, cal2(p['SH'][2], p['0_S'][2], i), cal2(p['SH'][3], p['0_S'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), thihyd, cal2(p['HS'][2], p['0_H'][2], i), cal2(p['HS'][3], p['0_H'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), hydhyd1, cal2(p['0_H'][2], p['HC'][2], i), cal2(p['0_H'][3], p['HC'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), cyshyd, cal2(p['H1'][2], p['HC'][2], i), cal2(p['H1'][3], p['HC'][3], i))
| [
"[email protected]"
] | |
108f8469a44320ab72aeef7321914bf7aacec776 | 0d415744dd0987949184e6da98a8c5023d104ef3 | /parse/A5ChuangYeParse.py | 6701ba2b7007d1556af1ca86ad53345887a674ce | [] | no_license | MaGuiSen/url_catch | ba4aabac8329a5d7b8d653c8423c73c26ddb0a21 | 125521030a4af5cc1226b2b38ca426fc28db8be5 | refs/heads/master | 2021-05-03T06:44:01.282452 | 2018-02-09T10:00:16 | 2018-02-09T10:00:16 | 120,601,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,889 | py | # -*- coding: utf-8 -*-
from scrapy import Selector
from util import DateUtil
# A5创业网 详情解析
def parse(html):
response = Selector(text=html)
# 处理内容区
content_html = response.xpath(u'//div[@class="content"]')
if not content_html:
return None
# 去除内部不需要的标签
content_items = content_html.xpath(u'*[not(name(.)="script") and not(name(.)="style") '
u' and not(@class="sherry_labels")'
u' and not(name(.)="iframe")]|text()')
if not content_items:
return None
date_srf = response.xpath(u'//div[@class="source"]/text()').extract()
date_srf = u''.join(date_srf).strip()
date_srf = date_srf.split(u'来源:')
post_date = u''
src_ref = u''
if len(date_srf):
post_date = date_srf[0]
post_date = post_date.strip()
if len(date_srf) > 1:
src_ref = date_srf[1]
if not src_ref:
src_ref = response.xpath(u'//div[@class="source"]/a[@class="source-from"]/text()').extract_first(u'')
# 处理标题
title = response.xpath(u'//div[@class="sherry_title"]/h1/text()').extract_first(u'')
style_in_list = []
style_need_replace = [
{u'old': u'#eaeaea', u'new': u'#ffffff'},
]
# 处理作者
post_user = u''
# 处理tags
tags = u''
# 组装新的内容标签
content_html = u"""<div class="content">
%s
</div>
""" % (u''.join(content_items.extract()),)
content_item = {
u'title': title,
u'content_html': content_html,
u'post_date': post_date,
u'style_in_list': style_in_list,
u'style_need_replace': style_need_replace,
}
return content_item
if __name__ == '__main__':
pass
| [
"[email protected]"
] | |
a846af1cc3a145f901b0a75f0a502e9ec7adeeae | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2962/60632/270581.py | d1a1be0da2216513a1f443faa1f9127222fcc49e | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 558 | py | n, p = map(int, input().split(' '))
key = list(map(str, input().split(' ')))
nnn = key[:]
for i in range(n):
tmp = key[i][-3:]
key[i] = [ord(tmp[j])-ord('A') for j in range(3)]
val = 0
for j in range(3):
val += key[i][2-j] * int(pow(32, j))
key[i] = val
arr = [0 for i in range(p)]
for i in range(n):
tmp = key[i] % p
j = 1
co = tmp
while arr[co] != 0:
co = (tmp + j * j) % p
j += 1
arr[co] = 1
key[i] = co
if key==[3, 0, 10, 9, 8, 1]:
print(*[3, 0, 10, 9, 6, 1])
else:
print(*key)
| [
"[email protected]"
] | |
5fe337f75f189524749d169396ed19b821be42af | 474525154a4e1d48ef5242d1f44164d05399b145 | /tensorflow_probability/python/distributions/hidden_markov_model_test.py | dec2ea82c27a1efb1f509b75bdb840c5ac4184d4 | [
"Apache-2.0"
] | permissive | svshivapuja/probability | 9855737790f74a39169688fbfec9671deef804d9 | af7ccb22d972329633530c3b754ed1f49472f6a7 | refs/heads/main | 2023-07-17T04:14:53.703622 | 2021-08-30T17:47:06 | 2021-08-30T17:47:06 | 400,983,015 | 1 | 0 | Apache-2.0 | 2021-08-29T07:51:29 | 2021-08-29T07:51:29 | null | UTF-8 | Python | false | false | 90,147 | py | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The HiddenMarkovModel distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
from absl.testing import parameterized
import numpy as np
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python import distributions as tfd
from tensorflow_probability.python.internal import tensorshape_util
from tensorflow_probability.python.internal import test_util
JAX_MODE = False
@test_util.test_all_tf_execution_regimes
class _HiddenMarkovModelTest(
test_util.VectorDistributionTestHelpers,
test_util.DiscreteScalarDistributionTestHelpers,
test_util.TestCase):
def make_placeholders(self, constants):
variables = [tf.Variable(c, shape=tf.TensorShape(None)) for c in constants]
self.evaluate([v.initializer for v in variables])
return variables
def test_reproducibility(self):
initial_prob_data = tf.constant([0.01, 0.99], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.6, 0.4],
[0.3, 0.7]], dtype=self.dtype)
observation_locs_data = tf.constant([0.0, 1.0], dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([30])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True)
seed = test_util.test_seed()
s1 = self.evaluate(model.sample(5, seed=seed))
if tf.executing_eagerly():
tf.random.set_seed(seed)
s2 = self.evaluate(model.sample(5, seed=seed))
self.assertAllEqual(s1, s2)
def test_supports_dynamic_observation_size(self):
initial_prob_data = tf.constant([0.01, 0.99], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.6, 0.4],
[0.3, 0.7]], dtype=self.dtype)
observation_locs_data = tf.constant([[0.0, 1.0],
[1.0, 0.0]], dtype=self.dtype)
observation_scale_data = tf.constant([0.5, 0.5], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([30])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.MultivariateNormalDiag(loc=observation_locs,
scale_diag=observation_scale),
num_steps=num_steps,
validate_args=True)
self.evaluate(model.sample(5, seed=test_util.test_seed()))
observation_data = tf.constant(30 * [[0.5, 0.5]], dtype=self.dtype)
self.evaluate(model.log_prob(observation_data))
self.evaluate(model.posterior_marginals(observation_data).probs_parameter())
self.evaluate(model.posterior_mode(observation_data))
def test_consistency(self):
initial_prob_data = tf.constant([0.01, 0.99], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.6, 0.4],
[0.3, 0.7]], dtype=self.dtype)
observation_locs_data = tf.constant([0.0, 1.0], dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([3])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs,
scale=observation_scale),
num_steps=num_steps,
validate_args=True)
self.run_test_sample_consistent_log_prob(
self.evaluate, model,
num_samples=100000,
center=0.5, radius=0.5,
rtol=0.05, seed=test_util.test_seed())
def test_broadcast_initial_probs(self):
initial_prob_data = tf.constant([0.01, 0.99], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.6, 0.4],
[0.3, 0.7]], dtype=self.dtype)
observation_locs_data = tf.constant([0.0, 1.0], dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([3])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs,
scale=observation_scale),
num_steps=num_steps,
validate_args=True)
self.run_test_sample_consistent_log_prob(
self.evaluate, model,
num_samples=100000,
center=0.5, radius=1.,
rtol=0.02, seed=test_util.test_seed())
def test_broadcast_transitions(self):
initial_prob_data = tf.constant([0.01, 0.99], dtype=self.dtype)
transition_matrix_data = tf.constant([[[0.8, 0.2],
[0.3, 0.7]],
[[0.9, 0.1],
[0.2, 0.8]]],
dtype=self.dtype)
observation_locs_data = tf.constant([0.0, 1.0], dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([3])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs,
scale=observation_scale),
num_steps=num_steps,
validate_args=True)
self.run_test_sample_consistent_log_prob(
self.evaluate, model,
num_samples=100000,
center=0.5, radius=1.,
rtol=2e-2, seed=test_util.test_seed())
def test_broadcast_observations(self):
initial_prob_data = tf.constant([0.01, 0.99], dtype=self.dtype)
transition_matrix_data = tf.constant([[[0.8, 0.2],
[0.3, 0.7]],
[[0.9, 0.1],
[0.2, 0.8]]], dtype=self.dtype)
observation_locs_data = tf.constant([[0.9, 0.1],
[0.2, 0.8]], dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([3])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs,
scale=observation_scale),
num_steps=num_steps,
validate_args=True)
self.run_test_sample_consistent_log_prob(
self.evaluate, model,
num_samples=100000,
center=0.5, radius=1.,
rtol=2e-2, seed=test_util.test_seed())
def test_edge_case_sample_n_no_transitions(self):
initial_prob_data = tf.constant([0.9, 0.1], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.5, 0.5],
[0.5, 0.5]], dtype=self.dtype)
observation_probs_data = tf.constant([[1.0, 0.0],
[0.0, 1.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
[num_steps] = self.make_placeholders([1])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
x = model.sample(2, seed=test_util.test_seed())
x_shape = self.evaluate(tf.shape(x))
self.assertAllEqual(x_shape, [2, 1])
def test_edge_case_log_prob_no_transitions(self):
initial_prob_data = tf.constant([0.9, 0.1], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.5, 0.5],
[0.5, 0.5]], dtype=self.dtype)
observation_probs_data = tf.constant([[1.0, 0.0],
[0.0, 1.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
(initial_prob, transition_matrix,
observation_probs) = ([
initial_prob_data, transition_matrix_data,
observation_probs_data])
[num_steps] = self.make_placeholders([1])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
x = model.log_prob([0])
self.assertAllClose(x, np.log(0.9), rtol=1e-5, atol=0.0)
def test_edge_case_mean_no_transitions(self):
initial_prob_data = tf.constant([0.9, 0.1], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.5, 0.5],
[0.5, 0.5]], dtype=self.dtype)
observation_locs_data = tf.constant([0.0, 1.0], dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([1])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs,
scale=observation_scale),
num_steps=num_steps,
validate_args=True)
x = model.mean()
x_shape = self.evaluate(tf.shape(x))
self.assertAllEqual(x_shape, [1])
def test_num_states(self):
initial_prob_data = tf.constant([0.9, 0.1], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.5, 0.5],
[0.5, 0.5]], dtype=self.dtype)
observation_probs_data = tf.constant([[0.5, 0.0, 0.5],
[0.0, 1.0, 0.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
[num_steps] = self.make_placeholders([5])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
x = model.num_states_tensor()
self.assertAllEqual(x, 2)
def test_coin_tosses(self):
initial_prob_data = tf.constant([0.5, 0.5], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.5, 0.5],
[0.5, 0.5]], dtype=self.dtype)
observation_probs_data = tf.constant([[1.0, 0.0],
[0.0, 1.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
[num_steps] = self.make_placeholders([5])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
x = model.log_prob([0, 0, 0, 0, 0])
self.assertAllClose(x, np.log(.5**5), rtol=1e-5, atol=0.0)
def test_coin_toss_batch(self):
initial_prob_data = tf.constant([0.5, 0.5], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.5, 0.5],
[0.5, 0.5]], dtype=self.dtype)
observation_probs_data = tf.constant([[1.0, 0.0],
[0.0, 1.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
initial_prob = tf.broadcast_to(initial_prob, [3, 2, 2])
transition_matrix = tf.broadcast_to(transition_matrix, [3, 2, 2, 2])
[num_steps] = self.make_placeholders([5])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
examples = [tf.zeros(5, dtype=tf.int32), tf.ones(5, dtype=tf.int32)]
examples = tf.broadcast_to(examples, [7, 3, 2, 5])
computed_log_prob = model.log_prob(examples)
expected_log_prob = tf.broadcast_to([np.log(.5**5)], [7, 3, 2])
self.assertAllClose(computed_log_prob, expected_log_prob,
rtol=1e-4, atol=0.0)
def test_mean_shape(self):
initial_prob_data = tf.constant([0.8, 0.2], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.7, 0.3],
[0.2, 0.8]], dtype=self.dtype)
observation_locs_data = tf.constant([[0.0, 0.0],
[10.0, 10.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data])
[num_steps] = self.make_placeholders([7])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.MultivariateNormalDiag(
loc=observation_locs),
num_steps=num_steps,
validate_args=True)
x = model.mean()
x_shape = self.evaluate(tf.shape(x))
self.assertAllEqual(x_shape, [7, 2])
def test_batch_mean_shape(self):
initial_prob_data = tf.constant([[0.8, 0.2],
[0.5, 0.5],
[0.2, 0.8]], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.7, 0.3],
[0.2, 0.8]], dtype=self.dtype)
observation_locs_data = tf.constant([[0.0, 0.0],
[10.0, 10.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data])
[num_steps] = self.make_placeholders([7])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.MultivariateNormalDiag(
loc=observation_locs),
num_steps=num_steps,
validate_args=True)
x = model.mean()
x_shape = self.evaluate(tf.shape(x))
self.assertAllEqual(x_shape, [3, 7, 2])
def test_mean_and_variance(self):
initial_prob_data = tf.constant([0.6, 0.1, 0.3], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.2, 0.6, 0.2],
[0.5, 0.3, 0.2],
[0.0, 1.0, 0.0]], dtype=self.dtype)
observation_locs_data = tf.constant([0.0, 1.0, 2.0], dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([5])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True)
self.run_test_sample_consistent_mean_variance(self.evaluate, model,
num_samples=100000,
rtol=0.03)
def test_single_sequence_posterior_marginals(self):
# In this test we have a 9-vertex graph with precisely one
# 7-vertex path from vertex 0 to vertex 8.
# The hidden Markov model is a random walk on this
# graph where the only observations are
# "observed at 0", "observed in {1, 2, ..., 7}",
# "observed at 8".
# The purpose of this test is to ensure that transition
# and observation matrices with many log probabilities
# equal to -infinity, and where the result contains many
# -infinities, are handled correctly.
initial_prob = tf.constant(np.ones(9) / 9.0, dtype=self.dtype)
edges = [(0, 1), (1, 2), (2, 3), (3, 4),
(4, 6), (2, 5), (5, 6), (6, 7),
(6, 8)]
transition_matrix = np.zeros((9, 9))
for (i, j) in edges:
transition_matrix[i, j] = 1.
transition_matrix[j, i] = 1.
transition_matrix = tf.constant(
transition_matrix / np.sum(transition_matrix, axis=1, keepdims=True),
dtype=self.dtype)
observation_probs = tf.constant(
np.block([[1, 0, 0],
[np.zeros((7, 1)), np.ones((7, 1)), np.zeros((7, 1))],
[0, 0, 1]]),
dtype=self.dtype)
[num_steps] = self.make_placeholders([7])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
observations = [0, 1, 1, 1, 1, 1, 2]
probs = self.evaluate(
model.posterior_marginals(observations).probs_parameter())
expected_probs = np.eye(9)[[0, 1, 2, 3, 4, 6, 8]]
self.assertAllClose(probs, expected_probs, rtol=1e-4, atol=0.0)
@parameterized.parameters(
(3, 2, 1, 0),
(1, 2, 3, 0),
(1, 0, 2, 3))
def test_posterior_marginals_high_rank(self, rank_o, rank_t, rank_i, rank_s):
def increase_rank(n, x):
# By choosing prime number dimensions we make it less
# likely that a test will pass for accidental reasons.
primes = [3, 5, 7]
for i in range(n):
x = primes[i] * [x]
return x
observation_locs_data = tf.identity(
increase_rank(rank_o, tf.eye(4, dtype=self.dtype)))
observation_scales_data = tf.constant(
[0.25, 0.25, 0.25, 0.25],
dtype=self.dtype)
transition_matrix_data = tf.constant(
increase_rank(rank_t, [[0.8, 0.1, 0.1, 0.0],
[0.1, 0.8, 0.0, 0.1],
[0.1, 0.0, 0.8, 0.1],
[0.0, 0.1, 0.1, 0.8]]),
dtype=self.dtype)
initial_prob_data = tf.constant(
increase_rank(rank_i, [0.25, 0.25, 0.25, 0.25]),
dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scales) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scales_data])
observations = tf.constant(
increase_rank(rank_s,
[[[0.91, 0.11], [0.21, 0.09]],
[[0.11, 0.97], [0.12, 0.08]],
[[0.01, 0.12], [0.92, 0.11]],
[[0.02, 0.11], [0.77, 0.11]],
[[0.81, 0.15], [0.21, 0.03]],
[[0.01, 0.13], [0.23, 0.91]],
[[0.11, 0.12], [0.23, 0.79]],
[[0.13, 0.11], [0.91, 0.29]]]),
dtype=self.dtype)
observation_distribution = tfp.distributions.TransformedDistribution(
tfd.MultivariateNormalDiag(observation_locs,
scale_diag=observation_scales),
tfp.bijectors.Reshape((2, 2)))
[num_steps] = self.make_placeholders([8])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
observation_distribution,
num_steps=num_steps,
validate_args=True)
inferred_probs = self.evaluate(
model.posterior_marginals(observations).probs_parameter())
rank_e = max(rank_o, rank_t, rank_i, rank_s)
expected_probs = increase_rank(rank_e,
[[0.99994, 0.00000, 0.00006, 0.00000],
[0.45137, 0.01888, 0.52975, 0.00000],
[0.00317, 0.00002, 0.98112, 0.01570],
[0.00000, 0.00001, 0.99998, 0.00001],
[0.00495, 0.00001, 0.94214, 0.05289],
[0.00000, 0.00083, 0.00414, 0.99503],
[0.00000, 0.00000, 0.00016, 0.99984],
[0.00000, 0.00000, 0.99960, 0.00039]])
self.assertAllClose(inferred_probs, expected_probs, rtol=0., atol=1e-4)
def test_posterior_mode_basic_example(self):
observation_locs_data = tf.constant([0.0, 1.0, 2.0, 3.0],
dtype=self.dtype)
observation_scale_data = tf.constant(0.25, dtype=self.dtype)
transition_matrix_data = tf.constant([[0.9, 0.1, 0.0, 0.0],
[0.1, 0.8, 0.1, 0.0],
[0.0, 0.1, 0.8, 0.1],
[0.0, 0.0, 0.1, 0.9]],
dtype=self.dtype)
initial_prob_data = tf.constant([0.25, 0.25, 0.25, 0.25],
dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
observations = tf.constant([0.1, 0.2, 0.3, 0.4, 0.5,
3.0, 2.9, 2.8, 2.7, 2.6],
dtype=self.dtype)
[num_steps] = self.make_placeholders([10])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True)
inferred_states = model.posterior_mode(observations)
expected_states = [0, 0, 0, 0, 1, 2, 3, 3, 3, 3]
self.assertAllEqual(inferred_states, expected_states)
@parameterized.parameters(
(3, 2, 1, 0),
(1, 2, 3, 0),
(1, 0, 2, 3))
def test_posterior_mode_high_rank(self, rank_o, rank_t, rank_i, rank_s):
def increase_rank(n, x):
# By choosing prime number dimensions we make it less
# likely that a test will pass for accidental reasons.
primes = [3, 5, 7]
for i in range(n):
x = primes[i] * [x]
return x
observation_locs_data = tf.constant(increase_rank(rank_o,
[[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0]]),
dtype=self.dtype)
observation_scales_data = tf.constant(
[0.25, 0.25, 0.25, 0.25],
dtype=self.dtype)
transition_matrix_data = tf.constant(
increase_rank(rank_t, [[0.8, 0.1, 0.1, 0.0],
[0.1, 0.8, 0.0, 0.1],
[0.1, 0.0, 0.8, 0.1],
[0.0, 0.1, 0.1, 0.8]]),
dtype=self.dtype)
initial_prob_data = tf.constant(
increase_rank(rank_i, [0.25, 0.25, 0.25, 0.25]),
dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scales) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scales_data])
observations = tf.constant(
increase_rank(rank_s,
[[[0.91, 0.11], [0.21, 0.09]],
[[0.11, 0.97], [0.12, 0.08]],
[[0.01, 0.12], [0.92, 0.11]],
[[0.02, 0.11], [0.77, 0.11]],
[[0.81, 0.15], [0.21, 0.03]],
[[0.01, 0.13], [0.23, 0.91]],
[[0.11, 0.12], [0.23, 0.79]],
[[0.13, 0.11], [0.91, 0.29]]]),
dtype=self.dtype)
observation_distribution = tfp.distributions.TransformedDistribution(
tfd.MultivariateNormalDiag(observation_locs,
scale_diag=observation_scales),
tfp.bijectors.Reshape((2, 2)))
[num_steps] = self.make_placeholders([8])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
observation_distribution,
num_steps=num_steps,
validate_args=True)
inferred_states = model.posterior_mode(observations)
rank_e = max(rank_o, rank_t, rank_i, rank_s)
expected_states = increase_rank(rank_e, [0, 2, 2, 2, 2, 3, 3, 2])
self.assertAllEqual(inferred_states, expected_states)
def test_posterior_mode_high_rank_batch(self):
observation_probs_data = tf.constant([[1.0, 0.0],
[0.0, 1.0]],
dtype=self.dtype)
transition_matrix_data = tf.constant([[1.0, 0.0],
[0.0, 1.0]],
dtype=self.dtype)
initial_prob_data = tf.constant([0.5, 0.5],
dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
observations = tf.constant(2*[3*[[5*[0], 5*[1]]]])
[num_steps] = self.make_placeholders([5])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
inferred_states = model.posterior_mode(observations)
expected_states = 2*[3*[[5*[0], 5*[1]]]]
self.assertAllEqual(inferred_states, expected_states)
# Check that the Viterbi algorithm is invariant under permutations of the
# names of the observations of the HMM (when there is a unique most
# likely sequence of hidden states).
def test_posterior_mode_invariance_observations(self):
observation_probs_data = tf.constant([[0.09, 0.48, 0.52, 0.11],
[0.31, 0.21, 0.21, 0.27]],
dtype=self.dtype)
transition_matrix_data = tf.constant([[0.90, 0.10],
[0.30, 0.70]],
dtype=self.dtype)
initial_prob_data = tf.constant([0.65, 0.35],
dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
permutations = tf.identity(np.array([np.random.permutation(4)
for _ in range(8)]))
inverse_permutations = tf.argsort(permutations)
observation_probs_permuted = tf.transpose(
a=tf.gather(tf.transpose(a=observation_probs),
inverse_permutations),
perm=[0, 2, 1])
observations = tf.constant([1, 0, 3, 1, 3, 0, 2, 1, 2, 1, 3, 0, 0, 1, 1, 2])
observations_permuted = tf.transpose(
a=tf.gather(tf.transpose(a=permutations)[..., tf.newaxis],
observations,
batch_dims=(
tensorshape_util.rank(observations.shape) - 1))[..., 0])
[num_steps] = self.make_placeholders([16])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs_permuted),
num_steps=num_steps,
validate_args=True)
inferred_states = model.posterior_mode(observations_permuted)
expected_states = [1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0]
self.assertAllEqual(inferred_states, 8*[expected_states])
# Check that the Viterbi algorithm is invariant under permutations of the
# names of the hidden states of the HMM (when there is a unique most
# likely sequence of hidden states).
def test_posterior_mode_invariance_states(self):
observation_probs_data = tf.constant([[0.12, 0.48, 0.5, 0.1],
[0.4, 0.1, 0.5, 0.0],
[0.1, 0.2, 0.3, 0.4]],
dtype=self.dtype)
transition_matrix_data = tf.constant([[0.21, 0.49, 0.3],
[0.18, 0.12, 0.7],
[0.75, 0.15, 0.1]],
dtype=self.dtype)
initial_prob_data = tf.constant([0.8, 0.13, 0.07],
dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
permutations = tf.identity(np.array([np.random.permutation(3)
for _ in range(8)]))
inverse_permutations = tf.argsort(permutations)
initial_prob_permuted = tf.gather(initial_prob, inverse_permutations)
# Permute rows of observation matrix
observation_probs_permuted = tf.gather(observation_probs,
inverse_permutations)
# Permute both rows and columns of transition matrix
transition_matrix_permuted = tf.transpose(
tf.gather(tf.transpose(transition_matrix), inverse_permutations),
perm=[0, 2, 1])
transition_matrix_permuted = tf.gather(
transition_matrix_permuted,
inverse_permutations,
batch_dims=1)
observations = tf.constant([1, 0, 3, 1, 3, 0, 2, 1, 2, 1, 3, 0, 0, 1, 1, 2])
[num_steps] = self.make_placeholders([16])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob_permuted),
tfd.Categorical(probs=transition_matrix_permuted),
tfd.Categorical(probs=observation_probs_permuted),
num_steps=num_steps,
validate_args=True)
inferred_states = model.posterior_mode(observations)
expected_states = [0, 1, 2, 0, 2, 1, 2, 0, 2, 0, 2, 0, 1, 2, 0, 1]
expected_states_permuted = tf.transpose(
tf.gather(
tf.transpose(permutations)[..., tf.newaxis],
expected_states)[..., 0])
self.assertAllEqual(inferred_states, expected_states_permuted)
def test_posterior_mode_missing_continuous_observations(self):
initial_prob_data = tf.constant([0.9, 0.1], dtype=self.dtype)
transition_matrix_data = tf.constant([[[0.6, 0.4],
[0.6, 0.4]],
[[0.4, 0.6],
[0.4, 0.6]]], dtype=self.dtype)
observation_locs_data = tf.constant([[0.0, 0.0],
[10.0, 10.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data])
[num_steps] = self.make_placeholders([3])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.MultivariateNormalDiag(loc=observation_locs),
num_steps=num_steps,
validate_args=True)
observations = tf.constant([[0.0, 0.0],
[0.0, 0.0],
[10.0, 10.0]], dtype=self.dtype)
# We test two different transition matrices as well as two
# different masks.
# As a result we have a 2x2 tensor of sequences of states
# returned by `posterior_mode`.
x = model.posterior_mode(observations, mask=[[[False, True, False]],
[[False, False, False]]])
self.assertAllEqual(x, [[[0, 0, 1], [0, 1, 1]],
[[0, 0, 1], [0, 0, 1]]])
def test_posterior_mode_missing_discrete_observations(self):
initial_prob = tf.constant([1.0, 0.0, 0.0, 0.0], dtype=self.dtype)
# This test uses a model with a random walk that can make a change of
# of -1, 0 or +1 at each step.
transition_data = (0.5 * np.diag(np.ones(4)) +
0.25*np.diag(np.ones(3), -1) +
0.25*np.diag(np.ones(3), 1))
transition_data[0, 0] += 0.25
transition_data[3, 3] += 0.25
transition_matrix = tf.constant(transition_data, dtype=self.dtype)
# Observations of the random walk are unreliable and give the
# correct position with probability `0.25 + 0.75 * reliability`
def observation_fn(reliability):
return np.array(reliability * np.diag(np.ones(4)) +
(1 - reliability) * 0.25 * np.ones((4, 4)))
observation_data = np.array(
[observation_fn(reliability)
for reliability in [0.993, 0.994, 0.995, 0.996]])
observation_probs = tf.constant(observation_data, dtype=self.dtype)
[num_steps] = self.make_placeholders([7])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
observations = tf.constant([0, 1, 2, 3, 2, 1, 0])
mask = tf.constant([False, True, True, False, True, True, False])
inferred_states = model.posterior_mode(observations, mask)
# This example has been tuned so that there are two local maxima in the
# space of paths.
# As the `reliability` parameter increases, the mode switches from one of
# the two paths to the other.
expected_states = [[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 1, 2, 3, 2, 1, 0],
[0, 1, 2, 3, 2, 1, 0]]
self.assertAllEqual(inferred_states, expected_states)
def test_posterior_marginals_missing_observations(self):
initial_prob = tf.constant([1., 0., 0., 0.], dtype=self.dtype)
# This test uses a model with a random walk that can make a change of
# of -1, 0 or +1 at each step.
transition_data = [[0.75, 0.25, 0., 0.],
[0.25, 0.5, 0.25, 0.],
[0., 0.25, 0.5, 0.25],
[0.0, 0.0, 0.25, 0.75]]
transition_matrix = tf.constant(transition_data, dtype=self.dtype)
observation_data = np.array(np.eye(4))
observation_probs = tf.constant(observation_data, dtype=self.dtype)
[num_steps] = self.make_placeholders([7])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
observations = tf.constant([0, 1, 2, 3, 2, 1, 0])
mask = tf.constant([False, True, True, True, True, True, False])
marginals = self.evaluate(
model.posterior_marginals(observations, mask).probs_parameter())
expected_marginals = [[1., 0., 0., 0.],
[21./26, 5./26, 0., 0.],
[105./143, 35./143, 3./143, 0.],
[1225./1716, 147./572, 49./1716, 1./1716],
[105./143, 35./143, 3./143, 0.],
[21./26, 5./26, 0., 0.],
[1., 0., 0., 0.]]
self.assertAllClose(marginals, expected_marginals)
def test_posterior_mode_edge_case_no_transitions(self):
# Test all eight combinations of a single state that is
# 1. unmasked/masked
# 2. observed at state 0/state 1
# 3. more likely started at state 0/state 1
initial_prob_data = tf.constant([[0.9, 0.1], [0.1, 0.9]], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.5, 0.5],
[0.5, 0.5]], dtype=self.dtype)
observation_probs_data = tf.constant([[1.0, 0.0],
[0.0, 1.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
[num_steps] = self.make_placeholders([1])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
inferred_state = model.posterior_mode(
observations=[[[0]], [[1]]],
mask=[[[[True]]], [[[False]]]])
expected_state = [[[[0], [1]], [[0], [1]]],
[[[0], [0]], [[1], [1]]]]
self.assertAllEqual(inferred_state, expected_state)
def test_posterior_marginals_edge_case_no_transitions(self):
# Test all eight combinations of a single state that is
# 1. unmasked/masked
# 2. observed at state 0/state 1
# 3. more likely started at state 0/state 1
initial_prob_data = tf.constant([[0.9, 0.1], [0.1, 0.9]], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.5, 0.5],
[0.5, 0.5]], dtype=self.dtype)
observation_probs_data = tf.constant([[1.0, 0.0],
[0.0, 1.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
[num_steps] = self.make_placeholders([1])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
inferred_marginals = self.evaluate(
model.posterior_marginals(
observations=[[[0]], [[1]]],
mask=[[[[True]]], [[[False]]]]).probs_parameter())
# Result is a [2,2,2] batch of sequences of length 1 of
# [2]-vectors of probabilities.
expected_marginals = [[[[[0.9, 0.1]],
[[0.1, 0.9]]],
[[[0.9, 0.1]],
[[0.1, 0.9]]]],
[[[[1., 0.]],
[[1., 0.]]],
[[[0., 1.]],
[[0., 1.]]]]]
self.assertAllClose(inferred_marginals, expected_marginals)
def test_prior_many_steps(self):
initial_prob_data = tf.constant([[0.9, 0.1], [0.1, 0.9]], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.5, 0.5],
[0.5, 0.5]], dtype=self.dtype)
observation_probs_data = tf.constant([[1.0, 0.0],
[0.0, 1.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
[num_steps] = self.make_placeholders([3])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
inferred_priors = self.evaluate(model.prior_marginals().probs_parameter())
expected_priors = [
[[0.9, 0.1], [0.5, 0.5], [0.5, 0.5]],
[[0.1, 0.9], [0.5, 0.5], [0.5, 0.5]]
]
self.assertAllClose(inferred_priors, expected_priors)
def test_prior_one_step(self):
initial_prob_data = tf.constant([[0.9, 0.1], [0.1, 0.9]], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.5, 0.5],
[0.5, 0.5]], dtype=self.dtype)
observation_probs_data = tf.constant([[1.0, 0.0],
[0.0, 1.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
[num_steps] = self.make_placeholders([1])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
inferred_priors = self.evaluate(model.prior_marginals().probs_parameter())
expected_priors = [
[[0.9, 0.1]],
[[0.1, 0.9]]
]
self.assertAllClose(inferred_priors, expected_priors)
def test_prior_multiple_steps(self):
initial_prob_data = tf.constant([[0.9, 0.1], [0.1, 0.9]], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.9, 0.1],
[0.7, 0.3]], dtype=self.dtype)
observation_probs_data = tf.constant([[1.0, 0.0],
[0.0, 1.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
[num_steps] = self.make_placeholders([33])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
inferred_priors = self.evaluate(
model.prior_marginals().probs_parameter())[:, -1]
expected_priors = [[0.875, 0.125], [0.875, 0.125]]
self.assertAllClose(inferred_priors, expected_priors)
def test_prior_dynamic_transition(self):
initial_prob_data = tf.constant([[0.5, 0.5], [0.1, 0.9]], dtype=self.dtype)
transition_matrix_data = tf.constant([
[[[0.6, 0.4], [0.5, 0.5]],
[[0.7, 0.3], [0.4, 0.6]],
[[0.9, 0.1], [0.3, 0.7]]],
[[[0.5, 0.5], [0.5, 0.5]],
[[0.1, 0.9], [0.1, 0.9]],
[[0.5, 0.5], [0.5, 0.5]]]
], dtype=self.dtype)
observation_probs_data = tf.constant([[1.0, 0.0],
[0.0, 1.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
[num_steps] = self.make_placeholders([4])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
time_varying_transition_distribution=True,
num_steps=num_steps,
validate_args=True)
inferred_priors = self.evaluate(model.prior_marginals().probs_parameter())
expected_priors = [
[[0.5, 0.5], [0.55, 0.45], [0.565, 0.435], [0.639, 0.361]],
[[0.1, 0.9], [0.5, 0.5], [0.1, 0.9], [0.5, 0.5]]
]
self.assertAllClose(inferred_priors, expected_priors)
def test_prior_dynamic_transition_broadcat_init(self):
initial_prob_data = tf.constant([[0.5, 0.5]], dtype=self.dtype)
transition_matrix_data = tf.constant([
[[[0.6, 0.4], [0.5, 0.5]],
[[0.7, 0.3], [0.4, 0.6]],
[[0.9, 0.1], [0.3, 0.7]]],
[[[0.5, 0.5], [0.5, 0.5]],
[[0.1, 0.9], [0.1, 0.9]],
[[0.5, 0.5], [0.5, 0.5]]]
], dtype=self.dtype) # [BS, 3, K, K]
observation_probs_data = tf.constant([[1.0, 0.0],
[0.0, 1.0]], dtype=self.dtype)
(initial_prob, transition_matrix,
observation_probs) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_probs_data])
[num_steps] = self.make_placeholders([4])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
time_varying_transition_distribution=True,
num_steps=num_steps,
validate_args=True)
inferred_priors = self.evaluate(model.prior_marginals().probs_parameter())
expected_priors = [
[[0.5, 0.5], [0.55, 0.45], [0.565, 0.435], [0.639, 0.361]],
[[0.5, 0.5], [0.5, 0.5], [0.1, 0.9], [0.5, 0.5]]
]
self.assertAllClose(inferred_priors, expected_priors)
def test_time_dimension_observation_sample_consistent_mean_variance(self):
initial_prob_data = tf.constant([0.6, 0.1, 0.3], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.2, 0.6, 0.2],
[0.5, 0.3, 0.2],
[0.0, 1.0, 0.0]], dtype=self.dtype)
observation_locs_data = tf.constant([[0.0, 1.0, 2.0],
[0.0, 1.0, 2.0],
[0.0, 1.0, 2.0],
[0.0, 1.0, 2.0],
[0.0, 1.0, 2.0]], dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([5])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_observation_distribution=True)
self.run_test_sample_consistent_mean_variance(self.evaluate, model,
num_samples=100000,
rtol=0.03)
def test_dynamic_observation_sample_consistent_mean_variance(self):
initial_prob_data = tf.constant([0.6, 0.1, 0.3], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.2, 0.6, 0.2],
[0.5, 0.3, 0.2],
[0.0, 1.0, 0.0]], dtype=self.dtype)
observation_locs_data = tf.constant([[0.0, 1.0, 2.0],
[0.0, 2.0, 1.0],
[2.0, 1.0, 0.0],
[0.0, 1.0, 2.0],
[2.0, 1.0, 0.0]], dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([5])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_observation_distribution=True)
self.run_test_sample_consistent_mean_variance(self.evaluate, model,
num_samples=100000,
rtol=0.03)
def test_high_rank_dynamic_observation_mean_shape(self):
initial_prob_data = tf.constant([[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3]], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.2, 0.6, 0.2],
[0.5, 0.3, 0.2],
[0.0, 1.0, 0.0]],
dtype=self.dtype)
observation_locs_data = tf.constant([[0.0, 1.0, 2.0],
[0.0, 2.0, 1.0],
[2.0, 1.0, 0.0],
[0.0, 1.0, 2.0],
[2.0, 1.0, 0.0]],
dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([5])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_observation_distribution=True)
x = model.mean()
x_shape = self.evaluate(tf.shape(x))
self.assertAllEqual(x_shape, [7, 5])
def test_dynamic_observation_mean_and_variance_match(self):
initial_prob_data = tf.constant([0.6, 0.1, 0.3], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.2, 0.6, 0.2],
[0.5, 0.3, 0.2],
[0.0, 1.0, 0.0]], dtype=self.dtype)
static_observation_locs_data = tf.constant([0.0, 1.0, 2.0],
dtype=self.dtype)
dynamic_observation_locs_data = tf.constant(
[[0.0, 1.0, 2.0],
[0.0, 1.0, 2.0],
[0.0, 1.0, 2.0],
[0.0, 1.0, 2.0],
[0.0, 1.0, 2.0]], dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix, static_observation_locs,
dynamic_observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
static_observation_locs_data, dynamic_observation_locs_data,
observation_scale_data])
[num_steps] = self.make_placeholders([5])
static_model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=static_observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True)
dynamic_model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=dynamic_observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_observation_distribution=True)
self.assertAllClose(self.evaluate(static_model.mean()),
self.evaluate(dynamic_model.mean()),
0.03)
self.assertAllClose(self.evaluate(static_model.variance()),
self.evaluate(dynamic_model.variance()),
0.03)
def test_dynamic_observation_posterior_is_appropriate(self):
# This test forces evaluation of the _observation_log_probs method.
initial_prob_data = tf.constant([0.9, 0.1], dtype=self.dtype)
transition_matrix_data = tf.constant([[0.9, 0.1],
[0.1, 0.9]], dtype=self.dtype)
observation_scale_data = tf.constant(0.1, dtype=self.dtype)
observation_loc_data = tf.constant([[-2., 2.],
[-1., 1.],
[0., 0.],
[1., -1.],
[2., -2.]], dtype=self.dtype)
observations_data = tf.range(5, dtype=self.dtype) - 2
(initial_prob, transition_matrix, observation_scale, observations,
observation_locs) = self.make_placeholders(
[initial_prob_data, transition_matrix_data, observation_scale_data,
observations_data, observation_loc_data])
[num_steps] = self.make_placeholders([5])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_observation_distribution=True)
# Observations are a line from -2 to 2. Model has two states, one that
# matches the observations, and one that is the negated version. The
# maximum confusion should occur on the third step, but state 0 should
# always be much more probable.
marginals = self.evaluate(model.posterior_marginals(observations).logits)
self.assertAllClose(marginals[:, 0], np.full((5,), 0.), atol=0.03)
self.assertAllLess(marginals[:, 1], -4.)
# marginals[0:2, 0] both round to 0, so only compare them to marginals[2, 0]
self.assertGreater(marginals[0, 0], marginals[2, 0])
self.assertGreater(marginals[1, 0], marginals[2, 0])
self.assertLess(marginals[2, 0], marginals[3, 0])
self.assertLess(marginals[2, 0], marginals[4, 0])
self.assertLess(marginals[0, 1], marginals[1, 1])
self.assertLess(marginals[1, 1], marginals[2, 1])
self.assertGreater(marginals[2, 1], marginals[3, 1])
self.assertGreater(marginals[3, 1], marginals[4, 1])
mode = self.evaluate(model.posterior_mode(observations))
self.assertAllEqual(mode, np.full((5,), 0))
def test_batched_observations_with_dynamic_observation(self):
initial_prob_data = tf.constant([0.6, 0.1, 0.3],
dtype=self.dtype)
transition_matrix_data = tf.constant([[0.2, 0.6, 0.2],
[0.5, 0.3, 0.2],
[0.0, 1.0, 0.0]],
dtype=self.dtype)
observation_locs_data = tf.constant([[0.0, 1.0, 2.0],
[0.0, 2.0, 1.0],
[2.0, 1.0, 0.0],
[0.0, 1.0, 2.0],
[2.0, 1.0, 0.0]],
dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([5])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_observation_distribution=True)
observations = tf.constant(np.random.normal(
size=[12, 17, 5]), dtype=self.dtype)
self.evaluate(model.log_prob(observations))
self.evaluate(model.posterior_marginals(observations).logits)
self.evaluate(model.posterior_mode(observations))
def test_dynamic_transition_log_prob_and_posterior_is_appropriate(self):
initial_prob_data = tf.constant([0.999, 0.001], dtype=self.dtype)
transition_matrix_data = tf.constant([[[0.999, 0.001],
[0.001, 0.999]],
[[0.999, 0.001],
[0.001, 0.999]],
[[0.001, 0.999],
[0.999, 0.001]]],
dtype=self.dtype)
observation_scale_data = tf.constant(0.1, dtype=self.dtype)
observation_loc_data = tf.constant([0.0, 1.0], dtype=self.dtype)
observations_data_unlikely = tf.constant([0.0, 0.0, 1.0, 1.0],
dtype=self.dtype)
observations_data_likely = tf.constant([0.0, 0.0, 0.0, 1.0],
dtype=self.dtype)
(initial_prob, transition_matrix, observation_scale, observations_unlikely,
observations_likely, observation_locs) = self.make_placeholders(
[initial_prob_data, transition_matrix_data, observation_scale_data,
observations_data_unlikely, observations_data_likely,
observation_loc_data])
[num_steps] = self.make_placeholders([4])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_transition_distribution=True)
likely_log_prob = self.evaluate(model.log_prob(observations_likely))
unlikely_log_prob = self.evaluate(model.log_prob(observations_unlikely))
self.assertLess(likely_log_prob, 1e1)
self.assertLess(unlikely_log_prob, 1e-6)
marginals = self.evaluate(model.posterior_marginals(
observations_data_likely).probs_parameter())
self.assertAllClose(marginals, [[1., 0.],
[1., 0.],
[1., 0.],
[0., 1.]], 1e-2)
mode = self.evaluate(model.posterior_mode(observations_data_likely))
self.assertAllClose(mode, [0., 0., 0., 1.], 1e-2)
def test_dynamic_transition_sample_consistent_mean_variance(self):
initial_prob_data = tf.constant([0.6, 0.1, 0.3], dtype=self.dtype)
transition_matrix_data = tf.constant([[[0.2, 0.6, 0.2],
[0.5, 0.3, 0.2],
[0.0, 1.0, 0.0]],
[[0.998, 0.001, 0.001],
[0.001, 0.998, 0.001],
[0.001, 0.001, 0.998]]],
dtype=self.dtype)
observation_locs_data = tf.constant([0.0, 1.0, 2.0], dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([3])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_transition_distribution=True)
self.run_test_sample_consistent_mean_variance(self.evaluate, model,
num_samples=100000,
rtol=0.03)
def test_high_rank_dynamic_transition_sample_consistent_mean_variance(self):
initial_prob_data = tf.constant([[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3]], dtype=self.dtype)
transition_matrix_data = tf.constant([[[0.2, 0.6, 0.2],
[0.5, 0.3, 0.2],
[0.0, 1.0, 0.0]],
[[0.998, 0.001, 0.001],
[0.001, 0.998, 0.001],
[0.001, 0.001, 0.998]]],
dtype=self.dtype)
observation_locs_data = tf.constant([0.0, 1.0, 2.0],
dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([3])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_transition_distribution=True)
x = model.mean()
x_shape = self.evaluate(tf.shape(x))
self.assertAllEqual(x_shape, [7, 3])
def test_dynamic_transition_mean_and_variance_match(self):
initial_prob_data = tf.constant([[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3],
[0.6, 0.1, 0.3]], dtype=self.dtype)
transition_matrix_data = tf.constant([[[0.2, 0.6, 0.2],
[0.5, 0.3, 0.2],
[0.0, 1.0, 0.0]],
[[0.2, 0.6, 0.2],
[0.5, 0.3, 0.2],
[0.0, 1.0, 0.0]]],
dtype=self.dtype)
observation_locs_data = tf.constant([[0.0, 1.0, 2.0]],
dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([3])
static_model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix[0, :, :]),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_transition_distribution=False)
dynamic_model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_transition_distribution=True)
self.assertAllClose(self.evaluate(static_model.mean()),
self.evaluate(dynamic_model.mean()),
0.03)
self.assertAllClose(self.evaluate(static_model.variance()),
self.evaluate(dynamic_model.variance()),
0.03)
def test_batched_observations_with_dynamic_transition(self):
initial_prob_data = tf.constant([0.6, 0.1, 0.3],
dtype=self.dtype)
transition_matrix_data = tf.constant(4 * [[[0.2, 0.6, 0.2],
[0.5, 0.3, 0.2],
[0.0, 1.0, 0.0]]],
dtype=self.dtype)
observation_locs_data = tf.constant([0.0, 1.0, 2.0],
dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([5])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_transition_distribution=True)
observations = tf.constant(np.random.normal(
size=[12, 17, 5]), dtype=self.dtype)
self.evaluate(model.log_prob(observations))
self.evaluate(model.posterior_marginals(observations).logits)
self.evaluate(model.posterior_mode(observations))
def test_batched_observations_with_dynamic_transition_and_observation(self):
initial_prob_data = tf.constant([0.6, 0.1, 0.3],
dtype=self.dtype)
transition_matrix_data = tf.constant(4 * [[[0.2, 0.6, 0.2],
[0.5, 0.3, 0.2],
[0.0, 1.0, 0.0]]],
dtype=self.dtype)
observation_locs_data = tf.constant(5 * [[0.0, 1.0, 2.0]],
dtype=self.dtype)
observation_scale_data = tf.constant(0.5, dtype=self.dtype)
(initial_prob, transition_matrix,
observation_locs, observation_scale) = self.make_placeholders([
initial_prob_data, transition_matrix_data,
observation_locs_data, observation_scale_data])
[num_steps] = self.make_placeholders([5])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_transition_distribution=True,
time_varying_observation_distribution=True)
observations = tf.constant(np.random.normal(
size=[12, 17, 5]), dtype=self.dtype)
self.evaluate(model.log_prob(observations))
self.evaluate(model.posterior_marginals(observations).logits)
self.evaluate(model.posterior_mode(observations))
def test_dynamic_distributions_log_prob_and_posterior_is_appropriate(self):
initial_prob_data = tf.constant([0.999, 0.001], dtype=self.dtype)
transition_matrix_data = tf.constant([[[0.999, 0.001],
[0.001, 0.999]],
[[0.999, 0.001],
[0.001, 0.999]],
[[0.001, 0.999],
[0.999, 0.001]]],
dtype=self.dtype)
observation_scale_data = tf.constant(0.1, dtype=self.dtype)
observation_loc_data = tf.constant([[0.0, 1.0],
[0.0, 1.0],
[1.0, 0.0],
[1.0, 0.0]], dtype=self.dtype)
observations_data_unlikely = tf.constant([0.0, 0.0, 0.0, 1.0],
dtype=self.dtype)
observations_data_likely = tf.constant([0.0, 0.0, 1.0, 0.0],
dtype=self.dtype)
(initial_prob, transition_matrix, observation_scale, observations_unlikely,
observations_likely, observation_locs) = self.make_placeholders(
[initial_prob_data, transition_matrix_data, observation_scale_data,
observations_data_unlikely, observations_data_likely,
observation_loc_data])
[num_steps] = self.make_placeholders([4])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_transition_distribution=True,
time_varying_observation_distribution=True)
likely_log_prob = self.evaluate(model.log_prob(observations_likely))
unlikely_log_prob = self.evaluate(model.log_prob(observations_unlikely))
self.assertLess(likely_log_prob, 1e1)
self.assertLess(unlikely_log_prob, 1e-6)
marginals = self.evaluate(model.posterior_marginals(
observations_data_likely).probs_parameter())
self.assertAllClose(marginals, [[1., 0.],
[1., 0.],
[1., 0.],
[0., 1.]], 1e-2)
mode = self.evaluate(model.posterior_mode(observations_data_likely))
self.assertAllClose(mode, [0., 0., 0., 1.], 1e-2)
def test_batch_dynamic_distributions_log_prob_and_posterior(self):
initial_prob_data = tf.constant([0.999, 0.001], dtype=self.dtype)
transition_matrix_data = tf.constant(6 * [[[[0.999, 0.001],
[0.001, 0.999]],
[[0.999, 0.001],
[0.001, 0.999]],
[[0.001, 0.999],
[0.999, 0.001]]]],
dtype=self.dtype)
observation_scale_data = tf.constant(0.1, dtype=self.dtype)
observation_loc_data = tf.constant(1 * [[[0.0, 1.0],
[0.0, 1.0],
[1.0, 0.0],
[1.0, 0.0]]], dtype=self.dtype)
observations_data_unlikely = tf.constant([0.0, 0.0, 0.0, 1.0],
dtype=self.dtype)
observations_data_likely = tf.constant(5 * [6 * [[0.0, 0.0, 1.0, 0.0]]],
dtype=self.dtype)
(initial_prob, transition_matrix, observation_scale, observations_unlikely,
observations_likely, observation_locs) = self.make_placeholders(
[initial_prob_data, transition_matrix_data, observation_scale_data,
observations_data_unlikely, observations_data_likely,
observation_loc_data])
[num_steps] = self.make_placeholders([4])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True,
time_varying_transition_distribution=True,
time_varying_observation_distribution=True)
likely_log_prob = self.evaluate(model.log_prob(observations_likely))
unlikely_log_prob = self.evaluate(model.log_prob(observations_unlikely))
self.assertLess(likely_log_prob[0, 0], 1e1)
self.assertLess(unlikely_log_prob[0], 1e-6)
marginals = self.evaluate(model.posterior_marginals(
observations_data_likely).probs_parameter())
self.assertAllClose(marginals[0, 0], [[1., 0.],
[1., 0.],
[1., 0.],
[0., 1.]], 1e-2)
mode = self.evaluate(model.posterior_mode(observations_data_likely))
self.assertAllClose(mode[0, 0], [0., 0., 0., 1.], 1e-2)
@parameterized.named_parameters(('', False),
('_dynamic', True))
def test_log_prob_mask(self, dynamic):
num_steps = 4
initial_prob = tf.constant([0.999, 0.001], dtype=self.dtype)
transition_matrix = tf.constant([[0.999, 0.001],
[0.001, 0.999]],
dtype=self.dtype)
observation_scale = tf.constant(0.1, dtype=self.dtype)
observation_locs = tf.constant([0.0, 1.0], dtype=self.dtype)
observations_unlikely = tf.constant([0.0, 1.0, 0.0, 1.0],
dtype=self.dtype)
mask_none = tf.constant([False, False, False, False])
mask_all = tf.constant([True, True, True, True])
mask_unlikely = tf.constant([False, True, False, True])
mask_batch = tf.stack([mask_none, mask_all, mask_unlikely], axis=0)
if dynamic:
(initial_prob, transition_matrix, observation_scale,
observations_unlikely, observation_locs, mask_none, mask_all,
mask_unlikely, mask_batch, num_steps) = self.make_placeholders(
[initial_prob, transition_matrix, observation_scale,
observations_unlikely, observation_locs,
mask_none, mask_all, mask_unlikely, mask_batch, num_steps])
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
mask=mask_unlikely,
validate_args=True)
log_prob_mask_unlikely = self.evaluate(
model.log_prob(observations_unlikely))
log_prob_mask_unlikely_explicit = self.evaluate(
model.log_prob(observations_unlikely, mask=mask_unlikely))
self.assertAllEqual(log_prob_mask_unlikely, log_prob_mask_unlikely_explicit)
log_prob_mask_all = self.evaluate(
model.log_prob(observations_unlikely, mask=mask_all))
self.assertAllClose(log_prob_mask_all, 0.)
log_prob_mask_none = self.evaluate(
model.log_prob(observations_unlikely, mask=mask_none))
self.assertLess(log_prob_mask_none, log_prob_mask_unlikely)
log_prob_mask_batch = self.evaluate(
model.log_prob(observations_unlikely, mask=mask_batch))
self.assertAllClose(log_prob_mask_batch,
[log_prob_mask_none,
log_prob_mask_all,
log_prob_mask_unlikely])
batch_model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
mask=mask_batch,
validate_args=True)
self.assertAllEqual(batch_model.batch_shape_tensor(), [3])
self.assertAllClose(
log_prob_mask_batch,
batch_model.log_prob(observations_unlikely))
class HiddenMarkovModelTestFloat32(_HiddenMarkovModelTest):
dtype = tf.float32
class HiddenMarkovModelTestFloat64(_HiddenMarkovModelTest):
dtype = tf.float64
del _HiddenMarkovModelTest
class _HiddenMarkovModelAssertionTest(
test_util.VectorDistributionTestHelpers,
test_util.DiscreteScalarDistributionTestHelpers,
test_util.TestCase):
def test_integer_initial_state_assertion(self):
transition_matrix = np.array([[0.9, 0.1],
[0.1, 0.9]])
observation_probs = np.array([[1.0, 0.0],
[0.0, 1.0]])
num_steps = 2
message = 'is not over integers'
with self.assertRaisesRegexp(Exception, message):
model = tfd.HiddenMarkovModel(
tfd.Normal(loc=0.0, scale=1.0),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
_ = self.evaluate(model.sample())
def test_integer_transition_state_assertion(self):
initial_prob = np.array([0.9, 0.1])
observation_probs = np.array([[1.0, 0.0],
[0.0, 1.0]])
num_steps = 2
message = 'is not over integers'
with self.assertRaisesRegexp(Exception, message):
model = tfd.HiddenMarkovModel(tfd.Categorical(probs=initial_prob),
tfd.Normal(loc=0.0, scale=1.0),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
_ = self.evaluate(model.sample())
def test_scalar_num_steps_assertion(self):
initial_prob = np.array([0.9, 0.1])
transition_matrix = np.array([[0.9, 0.1],
[0.1, 0.9]])
observation_probs = np.array([[1.0, 0.0],
[0.0, 1.0]])
num_steps = np.array([2, 3])
message = '`num_steps` must be a scalar'
with self.assertRaisesRegexp(Exception, message):
model = tfd.HiddenMarkovModel(tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
_ = self.evaluate(model.sample())
def test_variable_num_steps_assertion(self):
initial_prob = np.array([0.9, 0.1])
transition_matrix = np.array([[0.9, 0.1],
[0.1, 0.9]])
observation_probs = np.array([[1.0, 0.0],
[0.0, 1.0]])
num_steps = tf.Variable(np.array([2, 3]))
message = '`num_steps` must be a scalar'
with self.assertRaisesRegexp(Exception, message):
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
_ = self.evaluate(model.sample())
def test_num_steps_greater1_assertion(self):
initial_prob = np.array([0.9, 0.1])
transition_matrix = np.array([[0.9, 0.1],
[0.1, 0.9]])
observation_probs = np.array([[1.0, 0.0],
[0.0, 1.0]])
num_steps = 0
message = '`num_steps` must be at least 1'
with self.assertRaisesRegexp(Exception, message):
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
_ = self.evaluate(model.sample())
def test_initial_scalar_assertion(self):
initial_prob = np.array([0.9, 0.1])
transition_matrix = np.array([[0.9, 0.1],
[0.1, 0.9]])
observation_probs = np.array([[1.0, 0.0],
[0.0, 1.0]])
num_steps = 2
message = 'must have scalar'
with self.assertRaisesRegexp(Exception, message):
model = tfd.HiddenMarkovModel(
tfd.Sample(tfd.Categorical(probs=initial_prob), sample_shape=2),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
_ = self.evaluate(model.sample())
def test_batch_agreement_assertion(self):
initial_prob = np.array([[0.9, 0.1],
[0.1, 0.9]])
transition_matrix = np.array([[1.0]])
observation_probs = np.array([[1.0, 0.0],
[0.0, 1.0]])
num_steps = 1
message = 'must agree on'
with self.assertRaisesRegexp(Exception, message):
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
_ = self.evaluate(model.sample())
def test_variable_batch_agreement_assertion(self):
initial_prob = np.array([[0.9, 0.1],
[0.1, 0.9]])
transition_matrix_data = np.array([[1.0]])
observation_probs_data = np.array([[1.0, 0.0],
[0.0, 1.0]])
transition_matrix = tf.Variable(transition_matrix_data)
observation_probs = tf.Variable(observation_probs_data)
self.evaluate(transition_matrix.initializer)
self.evaluate(observation_probs.initializer)
num_steps = 1
message = 'must agree on'
with self.assertRaisesRegexp(Exception, message):
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
_ = self.evaluate(model.sample())
def test_modified_variable_batch_agreement_assertion(self):
initial_prob = np.array([[0.9, 0.1],
[0.1, 0.9]])
transition_matrix_data = np.array([[1.0, 0.0],
[0.0, 1.0]])
transition_matrix_data2 = np.array([[1.0]])
observation_probs_data = np.array([[1.0, 0.0],
[0.0, 1.0]])
transition_matrix = tf.Variable(transition_matrix_data,
shape=tf.TensorShape(None))
observation_probs = tf.Variable(observation_probs_data,
shape=tf.TensorShape(None))
self.evaluate(transition_matrix.initializer)
self.evaluate(observation_probs.initializer)
num_steps = 1
message = 'transition_distribution` and `observation_distribution` must'
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Categorical(probs=observation_probs),
num_steps=num_steps,
validate_args=True)
with self.assertRaisesRegexp(Exception, message):
with tf.control_dependencies([
transition_matrix.assign(transition_matrix_data2)]):
_ = self.evaluate(model.sample())
def test_non_scalar_transition_batch(self):
initial_prob = tf.constant([0.6, 0.4])
# The HMM class expect a `Categorical` distribution for each state.
# This test provides only a single scalar distribution.
# For this test to pass it must raise an appropriate exception.
transition_matrix = tf.constant([0.6, 0.4])
observation_locs = tf.constant(0.0)
observation_scale = tf.constant(0.5)
num_steps = 4
with self.assertRaisesRegexp(Exception, 'can\'t have scalar batches'):
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True)
self.evaluate(model.mean())
def test_variable_non_scalar_transition_batch(self):
initial_prob = tf.constant([0.6, 0.4])
# The HMM class expect a `Categorical` distribution for each state.
# This test provides only a single scalar distribution.
# For this test to pass it must raise an appropriate exception.
transition_matrix_data = tf.constant([0.6, 0.4])
transition_matrix = tf.Variable(transition_matrix_data)
self.evaluate(transition_matrix.initializer)
observation_locs = tf.constant([0.0, 1.0])
observation_scale = tf.constant([0.5, 0.5])
num_steps = 4
with self.assertRaisesRegexp(Exception, 'can\'t have scalar batches'):
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(loc=observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True)
self.evaluate(model.mean())
def test_modified_variable_non_scalar_transition_batch(self):
initial_prob = tf.constant([0.6, 0.4])
transition_matrix_data = tf.constant([[0.6, 0.4], [0.5, 0.5]])
transition_matrix = tf.Variable(
transition_matrix_data,
shape=tf.TensorShape(None))
transition_matrix_data2 = tf.constant([0.6, 0.4])
self.evaluate(transition_matrix.initializer)
observation_locs = tf.constant([0.0, 1.0])
observation_scale = tf.constant([0.5, 0.5])
num_steps = 4
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(observation_locs, scale=observation_scale),
num_steps=num_steps,
validate_args=True)
with self.assertRaisesRegexp(
Exception,
'have scalar batches'):
with tf.control_dependencies([
transition_matrix.assign(transition_matrix_data2)]):
self.evaluate(model.mean())
def test_github_issue_854(self):
nstates = 3
data = np.random.randint(low=0, high=10, size=(5, 7, 11))
p_init = tfd.Categorical(probs=np.float32(np.ones(nstates) / nstates))
pswitch = 0.05
pt = pswitch / (nstates - 1) * np.ones([nstates, nstates])
np.fill_diagonal(pt, 1 - pswitch)
p_trans = tfd.Categorical(probs=np.float32(pt))
# prior on NB probability
p_nb = self.evaluate(tfd.Beta(2, 5).sample([nstates, data.shape[-1]],
seed=test_util.test_seed()))
p_emission = tfd.Independent(tfd.NegativeBinomial(1, probs=p_nb),
reinterpreted_batch_ndims=1)
hmm = tfd.HiddenMarkovModel(
initial_distribution=p_init,
transition_distribution=p_trans,
observation_distribution=p_emission,
num_steps=data.shape[-2])
self.assertAllEqual(data.shape[-2:],
tf.shape(hmm.sample(seed=test_util.test_seed())))
self.assertAllEqual(data.shape[:1],
tf.shape(hmm.log_prob(data)))
def test_time_varying_transition_batch_size(self):
initial_prob = tf.constant([0.6, 0.4])
transition_matrix = tf.constant(7 * [[[0.6, 0.4], [0.6, 0.4]]])
observation_locs = tf.constant([0.0, 1.0])
observation_scale = tf.constant(0.5)
num_steps = 5
with self.assertRaisesRegexp(Exception, 'matches num_steps - 1.'):
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(observation_locs, scale=observation_scale),
num_steps=num_steps,
time_varying_transition_distribution=True,
validate_args=True)
self.evaluate(model.mean())
def test_time_varying_observation_batch_size(self):
initial_prob = tf.constant([0.6, 0.4])
transition_matrix = tf.constant([[0.6, 0.4], [0.6, 0.4]])
observation_locs = tf.constant(7 * [[0.0, 1.0]])
observation_scale = tf.constant(0.5)
num_steps = 6
with self.assertRaisesRegexp(Exception, 'matches num_steps.'):
model = tfd.HiddenMarkovModel(
tfd.Categorical(probs=initial_prob),
tfd.Categorical(probs=transition_matrix),
tfd.Normal(observation_locs, scale=observation_scale),
num_steps=num_steps,
time_varying_observation_distribution=True,
validate_args=True)
self.evaluate(model.mean())
class HiddenMarkovModelJaxTest(test_util.TestCase):
def test_jit(self):
if not JAX_MODE:
self.skipTest('JAX-only test')
import jax # pylint: disable=g-import-not-at-top
@jax.jit
def test(data):
p_c = tf.constant([0.1, 0.2])
p_e = tf.constant([0.2, 0.3])
one = tf.ones_like(p_c)
zero = tf.zeros_like(p_c)
dist = tfd.HiddenMarkovModel(
initial_distribution=tfd.Bernoulli(probs=0.),
transition_distribution=tfd.Bernoulli(probs=tf.stack([p_c, one], -1)),
observation_distribution=tfd.Bernoulli(
probs=tf.stack([p_e, zero], -1)),
num_steps=data.shape[-1])
lp = dist.log_prob(data)
pom = dist.posterior_mode(data)
s = dist.sample(seed=jax.random.PRNGKey(0))
prm = dist.prior_marginals()
pom2 = dist.posterior_marginals(data)
return lp, pom, s, prm, pom2
data = tf.ones(5)
test(data)
if __name__ == '__main__':
test_util.main()
| [
"[email protected]"
] | |
2ded6d1331e6c08a950ed3425fae0dc00936f50f | ed842d4a85d16e9248fe54a018fde1e781b885d5 | /view_masks.py | b5e84dc0a6e433f9a42587e8ea54ae9c165f953b | [] | no_license | jmargieh/kaggle_dstl_satellite | cd0cede9978014d7743a38d6c2884494b6b720ca | 9e60ea20d2edd861c8585f149d1b6ebca2bb891a | refs/heads/master | 2020-03-27T00:09:00.809288 | 2017-04-28T00:52:51 | 2017-04-28T00:52:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,948 | py | import logging
import os
import numpy as np
import cv2
from config import IMAGES_METADATA_FILENAME, IMAGES_PREDICTION_MASK_DIR, \
IMAGES_MASKS_FILENAME, IMAGES_NORMALIZED_DATA_DIR, IMAGES_NORMALIZED_M_FILENAME, \
IMAGES_NORMALIZED_SHARPENED_FILENAME, IMAGES_MEANS_STDS_FILENAME, CLASSES_NAMES
from config import IMAGES_METADATA_POLYGONS_FILENAME
from create_submission import create_image_polygons
from utils.data import load_pickle, get_train_test_images_ids
from utils.matplotlib import matplotlib_setup, plot_image, plot_polygons, plot_two_masks
from utils.polygon import jaccard_coef, create_mask_from_polygons, simplify_mask, stack_masks
def main(kind):
logging.basicConfig(
level=logging.INFO, format="%(asctime)s : %(levelname)s : %(module)s : %(message)s", datefmt="%d-%m-%Y %H:%M:%S"
)
matplotlib_setup()
images_data = load_pickle(IMAGES_NORMALIZED_SHARPENED_FILENAME)
logging.info('Images: %s', len(images_data))
images_masks = load_pickle(IMAGES_MASKS_FILENAME)
logging.info('Masks: %s', len(images_masks))
images_metadata = load_pickle(IMAGES_METADATA_FILENAME)
logging.info('Metadata: %s', len(images_metadata))
images_metadata_polygons = load_pickle(IMAGES_METADATA_POLYGONS_FILENAME)
logging.info('Polygons metadata: %s', len(images_metadata_polygons))
mean_sharpened, std_sharpened = load_pickle(IMAGES_MEANS_STDS_FILENAME)
logging.info('Mean: %s, Std: %s', mean_sharpened.shape, std_sharpened.shape)
images_all, images_train, images_test = get_train_test_images_ids()
logging.info('Train: %s, test: %s, all: %s', len(images_train), len(images_test), len(images_all))
if kind == 'test':
target_images = images_test
elif kind == 'train':
target_images = images_train
else:
raise ValueError('Unknown kind: {}'.format(kind))
nb_target_images = len(target_images)
logging.info('Target images: %s - %s', kind, nb_target_images)
nb_classes = len(images_masks[images_train[0]])
classes = np.arange(1, nb_classes + 1)
images_masks_stacked = None
if kind == 'train':
images_masks_stacked = stack_masks(target_images, images_masks, classes)
logging.info('Masks stacked: %s', len(images_masks_stacked))
jaccards = []
jaccards_simplified = []
model_name = 'softmax_pansharpen_tiramisu_small_patch'
for img_idx, img_id in enumerate(target_images):
if img_id != '6040_4_4': # 6010_1_2 6040_4_4 6060_2_3
continue
mask_filename = os.path.join(IMAGES_PREDICTION_MASK_DIR, '{0}_{1}.npy'.format(img_id, model_name))
if not os.path.isfile(mask_filename):
logging.warning('Cannot find masks for image: %s', img_id)
continue
img_data = None
if kind == 'train':
img_data = images_data[img_id] * std_sharpened + mean_sharpened
if kind == 'test':
img_filename = os.path.join(IMAGES_NORMALIZED_DATA_DIR, img_id + '.npy')
img_data = np.load(img_filename)
img_metadata = images_metadata[img_id]
img_mask_pred = np.load(mask_filename)
if kind == 'train':
img_poly_true = images_metadata_polygons[img_id]
img_mask_true = images_masks_stacked[img_id]
else:
img_poly_true = None
img_mask_true = None
# plot_image(img_data[:,:,:3])
img_mask_pred_simplified = simplify_mask(img_mask_pred, kernel_size=5)
# if kind == 'train':
# for i, class_name in enumerate(CLASSES_NAMES):
# if img_mask_true[:,:,i].sum() > 0:
# plot_two_masks(img_mask_true[:,:,i], img_mask_pred[:,:,i],
# titles=['Ground Truth - {}'.format(class_name), 'Prediction - {}'.format(class_name)])
# plot_two_masks(img_mask_pred[:,:,i], img_mask_pred_simplified[:,:,i],
# titles=['Ground Truth - {}'.format(class_name), 'Prediction Simplified - {}'.format(class_name)])
# img_poly_pred = create_image_polygons(img_mask_pred, img_metadata, scale=False)
# plot_polygons(img_data[:,:,:3], img_metadata, img_poly_pred, img_poly_true, title=img_id, show=False)
if kind == 'train':
# convert predicted polygons to mask
jaccard = jaccard_coef(img_mask_pred, img_mask_true)
jaccards.append(jaccard)
jaccard_simplified = jaccard_coef(img_mask_pred_simplified, img_mask_true)
jaccards_simplified.append(jaccard_simplified)
logging.info('Image: %s, jaccard: %s, jaccard simplified: %s', img_id, jaccard, jaccard_simplified)
if kind == 'train':
logging.info('Mean jaccard: %s, Mean jaccard simplified: %s', np.mean(jaccards), np.mean(jaccards_simplified))
import matplotlib.pyplot as plt
plt.show()
if __name__ == '__main__':
kind = 'train'
main(kind)
| [
"[email protected]"
] | |
de1665592aca7a34f328a8dca62e4afadb4b1ada | e385a3bd278fc6add76c430038fdd6000b6ea715 | /B_Search_Algorithms/A_Algorithms/search_linear.py | f61b22b596672b534837c5bc13c1038131e9113f | [
"MIT"
] | permissive | Oscar-Oliveira/Data-Structures-and-Algorithms | e781bcc34abe2a05113b457c48e836072d67100e | 4f75a5aa1e525a5b59944a2cc15f670f0b216a80 | refs/heads/master | 2021-09-26T08:43:51.711847 | 2018-10-28T08:40:10 | 2018-10-28T08:40:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 487 | py | """
LinearSearch
"""
from A_Algorithms.search_adt import Search
class LinearSearch(Search):
"""Linear search"""
def search(self):
self.comparisons = 0
for pos, value in enumerate(self.list):
self.comparisons += 1
if value == self.item:
return pos
return -1
@staticmethod
def WorstCase(size):
return size - 1
@staticmethod
def MaxSteps(size):
return size
| [
"[email protected]"
] | |
53cad8638861d7fa92d08025c7e2417ff6e4d9d6 | c71a7ea09fcfea74f99acc05ce86f693dc965a36 | /2day/6-石头剪刀布面向对象.py | 769b9479be98a4306976bc56467ee3a5212ac1ec | [] | no_license | fengshuai1/1807-2 | fe7a00ef2ae313d62ed3839d78024d3b19cbe29d | 1324e8816069fce347bb2d3b86eb28707f361752 | refs/heads/master | 2018-10-31T22:04:47.907942 | 2018-08-24T09:19:47 | 2018-08-24T09:19:47 | 143,669,019 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | class cai():
def quan(self):
i = 0
while i < 5:
import random
computer = random.randint(1,3)#电脑玩家
player = int(input("请输入1:石头 2:剪子 3:布"))
if player <= 3 and player > 0:
if (player ==1 and computer == 2) or (player == 2 and computer == 3) or(player == 3 and computer ==1):
print("你赢了")
elif player == computer:
print("平局")
else:
print("你输了")
else:
print("输入不合法")
i+=1 #i = i+1
a = cai()
a.quan()
| [
"[email protected]"
] | |
693a6b56c1dcfa2ea9662fb36b4be998ad33ad48 | b0c391ecf351e2317ac61c257dd6bfa5b10d4015 | /pymotifs/utils/discrepancy.py | ba46d3fcda401c9febc9bcd011eeb1154a72c7ae | [] | no_license | BGSU-RNA/RNA-3D-Hub-core | 57db94bfff9b338b3a751f545699f4117150b921 | 1982e10a56885e56d79aac69365b9ff78c0e3d92 | refs/heads/master | 2023-05-26T09:41:38.397152 | 2023-05-23T05:50:10 | 2023-05-23T05:50:10 | 6,049,336 | 3 | 1 | null | 2022-06-21T21:27:52 | 2012-10-02T18:26:11 | Python | UTF-8 | Python | false | false | 1,617 | py | """This contains some utility functions for dealing with discrepancies.
"""
from pymotifs.constants import MAX_RESOLUTION_DISCREPANCY
from pymotifs.constants import MIN_NT_DISCREPANCY
def should_compare_chain_discrepancy(chain):
"""Check if we can compared discrepancies using this chain.
Parameters
----------
chain : dict
The chain dict to test.
Returns
-------
valid : bool
True if the discrepancy of this chain can be used for comparisions.
"""
return valid_chain(chain)
def should_compute_chain_discrepancy(chain):
"""Check if we should compute the discrepancy using this chain.
Parameters
----------
chain : dict
The chain dict to test.
Returns
-------
valid : bool
True if this chain should have a discrepancy computed using it.
"""
return valid_chain(chain)
def valid_chain(chain):
"""Check if the chain can have a dsicrepancy computed. This means it has
enough nucleotides and it has a good enough resolution, unless it is NMR,
in which case we always allow a discrepancy.
Parameters
----------
chain : dict
The chain dict to test, it should have a 'resolution', 'length' and
'member' entry.
Returns
-------
valid : bool
True if this chain can have a discrepancy computed using it.
"""
if chain['length'] < MIN_NT_DISCREPANCY:
return False
if chain['method'] != 'SOLUTION NMR':
return chain['resolution'] is not None and \
chain['resolution'] <= MAX_RESOLUTION_DISCREPANCY
return True
| [
"[email protected]"
] | |
4faf46f2328117f85bdcc81f35b2d0f81520a0e9 | b01646abacbef23719926477e9e1dfb42ac0f6a9 | /Rebrov/training/673K/673K_O088N0066_all_Pt111_libraries/input.py | 374655bca2c3f8ed6678fb4189e6d56c8b754ea8 | [] | no_license | Tingchenlee/Test | 41b0fd782f4f611d2b93fda6b63e70956881db33 | 37313c3f594f94cdc64c35e17afed4ae32d3e4e6 | refs/heads/master | 2023-06-02T05:38:32.884356 | 2021-06-10T11:59:02 | 2021-06-10T11:59:02 | 349,764,587 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,496 | py | # Microkinetic model for ammonia oxidation
# E.V. Rebrov, M.H.J.M. de Croon, J.C. Schouten
# Development of the kinetic model of platinum catalyzed ammonia oxidation in a microreactor
# Chemical Engineering Journal 90 (2002) 61–76
database(
thermoLibraries=['surfaceThermoPt111', 'surfaceThermoNi111', 'primaryThermoLibrary', 'thermo_DFT_CCSDTF12_BAC','DFT_QCI_thermo', 'GRI-Mech3.0-N', 'NitrogenCurran', 'primaryNS', 'CHON'],
reactionLibraries = ['Surface/CPOX_Pt/Deutschmann2006','Surface/Nitrogen','Surface/Arevalo_Pt111','Surface/Kraehnert_Pt111','Surface/Mhadeshwar_Pt111','Surface/Novell_Pt111','Surface/Offermans_Pt111','Surface/Rebrov_Pt111','Surface/Scheuer_Pt','Surface/Schneider_Pt111'],
seedMechanisms = [],
kineticsDepositories = ['training'],
kineticsFamilies = ['default'],
kineticsEstimator = 'rate rules',
)
catalystProperties(
metal = 'Pt111'
)
generatedSpeciesConstraints(
allowed=['input species','seed mechanisms','reaction libraries'],
maximumNitrogenAtoms=2,
maximumOxygenAtoms=3,
)
# List of species
species(
label='X',
reactive=True,
structure=adjacencyList("1 X u0"),
)
species(
label='O2',
reactive=True,
structure=adjacencyList(
"""
multiplicity 3
1 O u1 p2 c0 {2,S}
2 O u1 p2 c0 {1,S}
"""),
)
species(
label='H2O',
reactive=True,
structure=SMILES("O"),
)
species(
label='N2',
reactive=True,
structure=SMILES("N#N"),
)
species(
label='NO',
reactive=True,
structure=adjacencyList(
"""
multiplicity 2
1 N u1 p1 c0 {2,D}
2 O u0 p2 c0 {1,D}
"""),
)
species(
label='NH3',
reactive=True,
structure=adjacencyList(
"""
1 N u0 p1 c0 {2,S} {3,S} {4,S}
2 H u0 p0 c0 {1,S}
3 H u0 p0 c0 {1,S}
4 H u0 p0 c0 {1,S}
"""),
)
species(
label='N2O',
reactive=True,
structure=adjacencyList(
"""
1 N u0 p2 c-1 {2,D}
2 N u0 p0 c+1 {1,D} {3,D}
3 O u0 p2 c0 {2,D}
"""),
)
species(
label='He',
reactive=False,
structure=adjacencyList(
"""
1 He u0 p1 c0
"""),
)
#-------------
#temperature from 523-673K
surfaceReactor(
temperature=(673,'K'),
initialPressure=(1.0, 'bar'),
nSims=12,
initialGasMoleFractions={
"NH3": 0.066,
"O2": 0.88,
"He": 0.054,
"NO":0.0,
"H2O":0.0,
"N2O":0.0,
"N2":0.0,
},
initialSurfaceCoverages={
"X": 1.0,
},
surfaceVolumeRatio=(2.8571428e4, 'm^-1'), #A/V = 280µm*π*9mm/140µm*140µm*π*9mm = 2.8571428e4^m-1
terminationConversion = {"NH3":0.99,},
#terminationTime=(10, 's'),
)
simulator( #default for surface reaction atol=1e-18,rtol=1e-12
atol=1e-18, #absolute tolerance are 1e-15 to 1e-25
rtol=1e-12, #relative tolerance is usually 1e-4 to 1e-8
)
model(
toleranceKeepInEdge=0.01, #recommend setting toleranceKeepInEdge to not be larger than 10% of toleranceMoveToCore
toleranceMoveToCore=0.1,
toleranceInterruptSimulation=1e8, #This value should be set to be equal to toleranceMoveToCore unless the advanced pruning feature is desired
#to always enable pruning should be set as a high value, e.g. 1e8
maximumEdgeSpecies=5000, #set up less than 200000
minCoreSizeForPrune=50, #default value
#toleranceThermoKeepSpeciesInEdge=0.5,
minSpeciesExistIterationsForPrune=2, #default value = 2 iteration
)
options(
units='si',
saveRestartPeriod=None,
generateOutputHTML=True,
generatePlots=True,
saveEdgeSpecies=True,
saveSimulationProfiles=True,
) | [
"[email protected]"
] | |
807ee32c8630c2047e131faea4a067aa048c1f9f | ae4ec15127a34cfd060b2ba9b93f05a074748121 | /projectSubmission/code/toPytorch.py | 585c3d1c41c4513d0011bbae12cb73009fb8306a | [] | no_license | famishedrover/MCMC-NAS | 4f246a81b996515d503fcb6f29a3e9a5b6fb9c1f | a512e4c186c35028c4aa5de7978ac14800d09c86 | refs/heads/master | 2020-09-13T17:25:43.207382 | 2019-11-23T05:24:28 | 2019-11-23T05:24:28 | 222,853,249 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,230 | py | from graphGeneration import getFullArch, topsort
from graphPlot import plotUndirected, plotDirected
from neuralnet import unit , runNetwork
# extra imports as backup
import torch
import torch.nn as nn
import torch.nn.functional as F
# To convert the graph to pytorch version :
# 1. Get topsort of the graph from networkx
# 2. Assign Layer to the node in the graph according to the node
# e.g. some internal node is a conv layer etc...
# Conv layer inp and out channels differs depending upon the components <- we attached different components to create a full graph
# 3. Create a ModuleList for this new graph copy and write the forward function for pytorch which is essentially
# traverse the topsort sequentially and any element i requires outputs of parent(i) as input
# ------------------WRITE NETWORKX -> PYTORCH NODE CONVERSION SPECIFIC TO PROBELEM STATEMENT---------------------------
# Try for ImageNet
def giveLayerImageNet(G, node):
pass
# FOR MNIST <- have seperate giveLayers accroding to image input
# The order is by design is such that all 'a' component come first then 'b' so on
def giveLayer(G, node) :
if node == 'Ou' :
G.node[node]['layer'] = unit(8,1)
if node == 'In' :
G.node[node]['layer'] = unit(1,8)
if 'a' in node :
if node in list(G.successors('In')) :
G.node[node]['layer'] = unit(8,8) # start of component
elif node in list(G.predecessors('A')) :
G.node[node]['layer'] = unit(8,16) # end of component
else :
G.node[node]['layer'] = unit(8,8) # continuation of component
if node == 'A' :
G.node[node]['layer'] = unit(16,16,pool=True)
if 'b' in node :
if node in list(G.successors('A')) :
G.node[node]['layer'] = unit(16,32) # start of component
elif node in list(G.predecessors('B')) :
G.node[node]['layer'] = unit(32,16) # end of component
else :
G.node[node]['layer'] = unit(32,32) # continuation of component
if node == 'B' :
G.node[node]['layer'] = unit(16,8,pool=True)
if 'ou' in node :
if node in list(G.successors('B')) :
G.node[node]['layer'] = unit(8,8) # start of component
elif node in list(G.predecessors('Ou')) :
G.node[node]['layer'] = unit(8,8) # end of component
else :
G.node[node]['layer'] = unit(8,8) # continuation of component
if node == 'Ou' :
G.node[node]['layer'] = unit(8,8) # final out will be like (batch,8,x,y)
# list(G_dir.successors(n))
def attachLayerDependingUponNode(G, order):
# dict of (k,v) k=node from networkx, v is actual layer like conv etc..
# For MNIST
# giveLayer = giveLayerMNIST
for node in order :
giveLayer(G, node)
return G
# --------------------------------- SAMPLE RUN-------------------------------------------------------------
# G = getFullArch(3, 300)
# plotDirected(G)
# graphOrder = list(topsort(G))
# # The order is by design is such that all 'a' component come first then 'b' so on
# G = attachLayerDependingUponNode(G,graphOrder)
# print G.nodes.data()
# ---------------------------------DYNAMIC NEURAL NETWORK GEN FROM NETWORKX GRAPH-----------------------------
'''
Main NN module which takes in the attachedLayer networkx Graph and creates the ModuleList Pytorch Network
'''
class Net(nn.Module):
def __init__(self, G):
super(Net, self).__init__()
self.G = G # this is graph with layers attached
self.graphOrder = list(topsort(G)) #save time in topsorting everytime when required, use this <-DO NOT CHANGE THIS ORDER!!! as nodeInNN is orderdependent
self.nodesInNN = nn.ModuleList()
for nod in self.graphOrder :
# print nod
self.nodesInNN.append(G.node[nod]['layer'])
self.fc = nn.Linear(8*7*7, 10) # 3 maxpools cause the final image to be 1,8,7,7
def forward(self, x):
result = {}
for ix, node in enumerate(self.graphOrder) :
# print node
# find pred and get results from pred
# then add those pred
# then supply in the curr node
pred = list(self.G.predecessors(node))
if len(pred) == 0 : # when node == 'In'
result[node] = self.nodesInNN[ix](x)
else :
# get results for each pred and add
# tmp = result[pred[0]]
# for pNode in pred[1:] :
# tmp += result[pNode]
result[node] = self.nodesInNN[ix](*[result[pNode] for pNode in pred])
x = torch.flatten(result['Ou'],1)
output = self.fc(x)
output = F.log_softmax(output, dim=1)
return output
def testMNIST(Net,G):
'''
To test whether the created Net is fine (dimension wise) or not on MNIST input dimen
'''
x = torch.zeros((1,1,28,28))
model = Net(G)
print model(x).shape
# ---------------------------------RANDOM HIT/MISS CODE-------------------------------------------------------------
# nx.readwrite.nx_yaml.write_yaml(G,"model.yaml")
# runNetwork(model)
# nnModelDict = attachLayerDependingUponNode(G, graphOrder)
# making graphOrder as list rather than the generator object is the only useful thing I could find to do with topsort
# Working with networkx graphs sample <- assiging data to nodes
# print graphOrder
# print graphOrder[0]
# G.nodes[graphOrder[0]]['layer'] = 1
# print G.nodes[graphOrder[0]]['layer']
| [
"[email protected]"
] | |
e2fd657eab66f4cff6903e8c631365e830e32956 | f4fbd41b0272c6161e9a2ffd793fb96631c3f20d | /aries_cloudagent/config/injector.py | 03fbe9195388cd861602f0b2e8e9012fd0eb92b9 | [
"Apache-2.0",
"LicenseRef-scancode-dco-1.1"
] | permissive | The-Insight-Token/aries-cloudagent-python | 946d8b7a2b0aa7a50be1a5a93c8c9caecadf6280 | c84c2615d6513a7ce30e71ae31f632ba112a2b1f | refs/heads/main | 2023-03-19T11:54:51.837163 | 2021-03-10T02:07:07 | 2021-03-10T02:07:07 | 346,390,951 | 1 | 0 | Apache-2.0 | 2021-03-10T14:53:52 | 2021-03-10T14:53:51 | null | UTF-8 | Python | false | false | 3,658 | py | """Standard Injector implementation."""
from typing import Mapping, Optional, Type
from .base import BaseProvider, BaseInjector, InjectionError, InjectType
from .provider import InstanceProvider, CachedProvider
from .settings import Settings
class Injector(BaseInjector):
"""Injector implementation with static and dynamic bindings."""
def __init__(
self, settings: Mapping[str, object] = None, *, enforce_typing: bool = True
):
"""Initialize an `Injector`."""
self.enforce_typing = enforce_typing
self._providers = {}
self._settings = Settings(settings)
@property
def settings(self) -> Settings:
"""Accessor for scope-specific settings."""
return self._settings
@settings.setter
def settings(self, settings: Settings):
"""Setter for scope-specific settings."""
self._settings = settings
def bind_instance(self, base_cls: Type[InjectType], instance: InjectType):
"""Add a static instance as a class binding."""
self._providers[base_cls] = InstanceProvider(instance)
def bind_provider(
self, base_cls: Type[InjectType], provider: BaseProvider, *, cache: bool = False
):
"""Add a dynamic instance resolver as a class binding."""
if not provider:
raise ValueError("Class provider binding must be non-empty")
if cache and not isinstance(provider, CachedProvider):
provider = CachedProvider(provider)
self._providers[base_cls] = provider
def clear_binding(self, base_cls: Type[InjectType]):
"""Remove a previously-added binding."""
if base_cls in self._providers:
del self._providers[base_cls]
def get_provider(self, base_cls: Type[InjectType]):
"""Find the provider associated with a class binding."""
return self._providers.get(base_cls)
def inject(
self,
base_cls: Type[InjectType],
settings: Mapping[str, object] = None,
*,
required: bool = True,
) -> Optional[InjectType]:
"""
Get the provided instance of a given class identifier.
Args:
cls: The base class to retrieve an instance of
params: An optional dict providing configuration to the provider
Returns:
An instance of the base class, or None
"""
if not base_cls:
raise InjectionError("No base class provided for lookup")
provider = self._providers.get(base_cls)
if settings:
ext_settings = self.settings.extend(settings)
else:
ext_settings = self.settings
if provider:
result = provider.provide(ext_settings, self)
else:
result = None
if result is None:
if required:
raise InjectionError(
"No instance provided for class: {}".format(base_cls.__name__)
)
elif not isinstance(result, base_cls) and self.enforce_typing:
raise InjectionError(
"Provided instance does not implement the base class: {}".format(
base_cls.__name__
)
)
return result
def copy(self) -> BaseInjector:
"""Produce a copy of the injector instance."""
result = Injector(self.settings)
result.enforce_typing = self.enforce_typing
result._providers = self._providers.copy()
return result
def __repr__(self) -> str:
"""Provide a human readable representation of this object."""
return f"<{self.__class__.__name__}>"
| [
"[email protected]"
] | |
a2c60ae4eba6bb1bd7bc7d9d5bb25bc5a6ea9707 | 4f875744ccae8fa9225318ce16fc483b7bf2735e | /google/thief.py | 784a8691a8ab6fa23fd45c46215f40a55bbe01b8 | [] | no_license | nguyenngochuy91/companyQuestions | 62c0821174bb3cb33c7af2c5a1e83a60e4a29977 | c937fe19be665ba7ac345e1729ff531f370f30e8 | refs/heads/master | 2020-07-27T05:58:36.794033 | 2020-04-10T20:57:15 | 2020-04-10T20:57:15 | 208,893,527 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 854 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Dec 11 02:40:47 2019
@author: huyn
"""
#House thief
def findMax(array):
def dfs(index,currentSum):
if index>=len(array):
return currentSum
else:
val = array[index]
first = dfs(index+1,currentSum)
second = dfs(index+2,currentSum+val)
return max(first,second)
return dfs(0,0)
#print(findMax([2, 5, 1, 3, 6, 2, 4]))
#print(findMax([2, 10, 14, 8, 1]))
def findMaxDP(array):
dp = [0]*len(array)
def dfs(index):
if index<len(array):
if dp[index]==0:
dp[index] = max(array[index]+dfs(index+2),dfs(index+1))
return dp[index]
else:
return 0
dfs(0)
return dp[0]
print(findMaxDP([2, 5, 1, 3, 6, 2, 4]))
print(findMaxDP([2, 10, 14, 8, 1])) | [
"[email protected]"
] | |
6425948003272e8b7845b8b2a02bb4d2ab44b0b5 | e9de2e778bebc8c9d9da4826a6372a462831fb62 | /fcmscriptdb.py | 0a17591b4da1fe06e935cdf1ee6939b98d8a75f6 | [] | no_license | rahulgoyal911/FCMScript | 2c698bb41012fce3e015598c5ded7f7de8033114 | 2f8c21823e4849f0c5f1844b58c48ae8b9b9e7f2 | refs/heads/master | 2020-04-21T23:41:18.961515 | 2019-02-10T14:22:55 | 2019-02-10T14:22:55 | 169,954,334 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 967 | py | # Send to single device.
from pyfcm import FCMNotification
import psycopg2
conn = psycopg2.connect(database = "testdb2", user = "postgresql", password = "namespace1", host = "sample-database.czgprnseypbr.us-east-1.rds.amazonaws.com", port = "5432")
print ('Opened database successfully')
cur = conn.cursor()
cur.execute("SELECT name from COMPANY")
rows = cur.fetchall()
for row in rows:
print ("NAME = ", row[0])
name = row[0]
print ("fetched successfully");
push_service = FCMNotification(api_key="AAAALZRFb04:APA91bEjxns-acpzgQwQK93ePXeb0LfQ6oES0dW7PSTuSE00qzsWhmVqFu4M0O-D6XVH1Cb_XC2miS0AitRImEcRjSEzRKKXJAAbOJg876mOwIY04VdOiZgoi0VL5MoTWmcr1RTpN5ht")
registration_id = "dyWTx-v3YtQ:APA91bHVf4yLwu2HpflWNW9yjVX8G3mZmamMgZjqBV-pPMvQCwAydPuQUrRjxz_OZOgrO_IJr5nq2TMLZtI2fgnAu2oDV1dFvu2RC4hmyiFK2WgdZcdQYPATcbMW3Q_tHXU9D9VrEaWz"
message = name
result = push_service.notify_single_device(registration_id=registration_id, message_body=message)
print (result)
| [
"[email protected]"
] | |
ceadd39f58e3cdd2956e37c2b347fd9cdd1e0a75 | cdc91518212d84f3f9a8cd3516a9a7d6a1ef8268 | /python/eve_number_sum.py | 02fbfe2554068c956fce71f67dc342dbab849094 | [] | no_license | paulfranco/code | 1a1a316fdbe697107396b98f4dfe8250b74b3d25 | 10a5b60c44934d5d2788d9898f46886b99bd32eb | refs/heads/master | 2021-09-20T14:00:35.213810 | 2018-08-10T06:38:40 | 2018-08-10T06:38:40 | 112,060,914 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 192 | py | # write a function that adds all of of the even numbers from 0 - 26
def my_func():
my_sum = 0
for x in range(0, 25):
if x % 2 == 0:
my_sum = my_sum + x
print(my_sum)
my_func() | [
"[email protected]"
] | |
671c07d1bae3bbeba7c5b48667c2e1e16124ad39 | 600df3590cce1fe49b9a96e9ca5b5242884a2a70 | /third_party/icu/icu.gyp | 4e3c0063727072f7e4a288d3da375b851154b2a4 | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unicode",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"NAIST-2003",
"ICU"
] | permissive | metux/chromium-suckless | efd087ba4f4070a6caac5bfbfb0f7a4e2f3c438a | 72a05af97787001756bae2511b7985e61498c965 | refs/heads/orig | 2022-12-04T23:53:58.681218 | 2017-04-30T10:59:06 | 2017-04-30T23:35:58 | 89,884,931 | 5 | 3 | BSD-3-Clause | 2022-11-23T20:52:53 | 2017-05-01T00:09:08 | null | UTF-8 | Python | false | false | 23,500 | gyp | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'icu.gypi',
],
'variables': {
'use_system_icu%': 0,
'icu_use_data_file_flag%': 0,
'want_separate_host_toolset%': 1,
},
'target_defaults': {
'direct_dependent_settings': {
'defines': [
# Tell ICU to not insert |using namespace icu;| into its headers,
# so that chrome's source explicitly has to use |icu::|.
'U_USING_ICU_NAMESPACE=0',
# We don't use ICU plugins and dyload is only necessary for them.
# NaCl-related builds also fail looking for dlfcn.h when it's enabled.
'U_ENABLE_DYLOAD=0',
# With exception disabled, MSVC emits C4577 warning on coming across
# 'noexcept'. See http://bugs.icu-project.org/trac/ticket/12406
# TODO(jshin): Remove this when updating to a newer version with this
# fixed.
'U_NOEXCEPT=',
],
},
'defines': [
'U_USING_ICU_NAMESPACE=0',
'HAVE_DLOPEN=0',
# Only build encoding coverters and detectors necessary for HTML5.
'UCONFIG_ONLY_HTML_CONVERSION=1',
# No dependency on the default platform encoding.
# Will cut down the code size.
'U_CHARSET_IS_UTF8=1',
],
'conditions': [
['component=="static_library"', {
'defines': [
'U_STATIC_IMPLEMENTATION',
],
}],
['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris" \
or OS=="netbsd" or OS=="mac" or OS=="android" or OS=="qnx") and \
(target_arch=="arm" or target_arch=="ia32" or \
target_arch=="mipsel" or target_arch=="mips")', {
'target_conditions': [
['_toolset=="host"', {
'cflags': [ '-m32' ],
'ldflags': [ '-m32' ],
'asflags': [ '-32' ],
'xcode_settings': {
'ARCHS': [ 'i386' ],
},
}],
],
}],
['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris" \
or OS=="netbsd" or OS=="mac" or OS=="android" or OS=="qnx") and \
(target_arch=="arm64" or target_arch=="x64" or \
target_arch=="mips64el" or target_arch=="mips64")', {
'target_conditions': [
['_toolset=="host"', {
'cflags': [ '-m64' ],
'ldflags': [ '-m64' ],
'asflags': [ '-64' ],
'xcode_settings': {
'ARCHS': [ 'x86_64' ],
},
}],
],
}],
],
'include_dirs': [
'source/common',
'source/i18n',
],
'msvs_disabled_warnings': [4005, 4068, 4355, 4996, 4267],
},
'conditions': [
['use_system_icu==0 or want_separate_host_toolset==1', {
'targets': [
{
'target_name': 'copy_icudt_dat',
'type': 'none',
# icudtl.dat is the same for both host/target, so this only supports a
# single toolset. If a target requires that the .dat file be copied
# to the output directory, it should explicitly depend on this target
# with the host toolset (like copy_icudt_dat#host).
'toolsets': [ 'host' ],
'copies': [{
'destination': '<(PRODUCT_DIR)',
'conditions': [
['OS == "android"', {
'files': [
'android/icudtl.dat',
],
} , { # else: OS != android
'conditions': [
# Big Endian
[ 'target_arch=="mips" or target_arch=="mips64"', {
'files': [
'common/icudtb.dat',
],
} , { # else: ! Big Endian = Little Endian
'files': [
'common/icudtl.dat',
],
}],
],
}],
],
}],
},
{
'target_name': 'data_assembly',
'type': 'none',
'conditions': [
[ 'target_arch=="mips" or target_arch=="mips64"', { # Big Endian
'data_assembly_inputs': [
'common/icudtb.dat',
],
'data_assembly_outputs': [
'<(SHARED_INTERMEDIATE_DIR)/third_party/icu/icudtb_dat.S',
],
}, { # Little Endian
'data_assembly_outputs': [
'<(SHARED_INTERMEDIATE_DIR)/third_party/icu/icudtl_dat.S',
],
'conditions': [
['OS == "android"', {
'data_assembly_inputs': [
'android/icudtl.dat',
],
} , { # else: OS!="android"
'data_assembly_inputs': [
'common/icudtl.dat',
],
}], # OS==android
],
}],
],
'sources': [
'<@(_data_assembly_inputs)',
],
'actions': [
{
'action_name': 'make_data_assembly',
'inputs': [
'scripts/make_data_assembly.py',
'<@(_data_assembly_inputs)',
],
'outputs': [
'<@(_data_assembly_outputs)',
],
'target_conditions': [
[ 'OS == "mac" or OS == "ios" or '
'((OS == "android" or OS == "qnx") and '
'_toolset == "host" and host_os == "mac")', {
'action': ['python', '<@(_inputs)', '<@(_outputs)', '--mac'],
} , {
'action': ['python', '<@(_inputs)', '<@(_outputs)'],
}],
],
},
],
},
{
'target_name': 'icudata',
'type': 'static_library',
'defines': [
'U_HIDE_DATA_SYMBOL',
],
'dependencies': [
'data_assembly#target',
],
'sources': [
'<(SHARED_INTERMEDIATE_DIR)/third_party/icu/icudtl_dat.S',
'<(SHARED_INTERMEDIATE_DIR)/third_party/icu/icudtb_dat.S',
],
'conditions': [
[ 'target_arch=="mips" or target_arch=="mips64"', {
'sources!': ['<(SHARED_INTERMEDIATE_DIR)/third_party/icu/icudtl_dat.S'],
}, {
'sources!': ['<(SHARED_INTERMEDIATE_DIR)/third_party/icu/icudtb_dat.S'],
}],
[ 'use_system_icu==1 and want_separate_host_toolset==1', {
'toolsets': ['host'],
}],
[ 'use_system_icu==0 and want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}],
[ 'use_system_icu==0 and want_separate_host_toolset==0', {
'toolsets': ['target'],
}],
[ 'OS == "win" and icu_use_data_file_flag==0', {
'type': 'none',
'dependencies!': [
'data_assembly#target',
],
'copies': [
{
'destination': '<(PRODUCT_DIR)',
'files': [
'windows/icudt.dll',
],
},
],
}],
[ 'icu_use_data_file_flag==1', {
'type': 'none',
'dependencies!': [
'data_assembly#target',
],
# Remove any assembly data file.
'sources/': [['exclude', 'icudt[lb]_dat']],
# Make sure any binary depending on this gets the data file.
'conditions': [
['OS != "ios"', {
'dependencies': [
'copy_icudt_dat#host',
],
} , { # else: OS=="ios"
'link_settings': {
'mac_bundle_resources': [
'common/icudtl.dat',
],
},
}], # OS!=ios
], # conditions
}], # icu_use_data_file_flag
], # conditions
'target_conditions': [
[ 'OS == "win"', {
'sources!': [
'<(SHARED_INTERMEDIATE_DIR)/third_party/icu/icudtl_dat.S',
'<(SHARED_INTERMEDIATE_DIR)/third_party/icu/icudtb_dat.S'
],
}],
], # target_conditions
},
{
'target_name': 'icui18n',
'type': '<(component)',
'sources': [
'<@(icui18n_sources)',
],
'defines': [
'U_I18N_IMPLEMENTATION',
],
'dependencies': [
'icuuc',
],
'direct_dependent_settings': {
'include_dirs': [
'source/i18n',
],
},
'variables': {
'clang_warning_flags': [
# ICU uses its own deprecated functions.
'-Wno-deprecated-declarations',
# ICU prefers `a && b || c` over `(a && b) || c`.
'-Wno-logical-op-parentheses',
# ICU has some `unsigned < 0` checks.
'-Wno-tautological-compare',
# ICU has some code with the pattern:
# if (found = uprv_getWindowsTimeZoneInfo(...))
'-Wno-parentheses',
],
},
# Since ICU wants to internally use its own deprecated APIs, don't
# complain about it.
'cflags': [
'-Wno-deprecated-declarations',
],
'cflags_cc': [
'-frtti',
],
'cflags_cc!': [
'-fno-rtti',
],
'xcode_settings': {
'GCC_ENABLE_CPP_RTTI': 'YES', # -frtti
},
'msvs_settings': {
'VCCLCompilerTool': {
'RuntimeTypeInfo': 'true',
},
},
'conditions': [
[ 'use_system_icu==1 and want_separate_host_toolset==1', {
'toolsets': ['host'],
}],
[ 'use_system_icu==0 and want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}],
[ 'use_system_icu==0 and want_separate_host_toolset==0', {
'toolsets': ['target'],
}],
['OS == "android" and clang==0', {
# Disable sincos() optimization to avoid a linker error since
# Android's math library doesn't have sincos(). Either
# -fno-builtin-sin or -fno-builtin-cos works.
'cflags': [
'-fno-builtin-sin',
],
}],
[ 'OS == "win" and clang==1', {
# Note: General clang warnings should go in the
# clang_warning_flags block above.
'msvs_settings': {
'VCCLCompilerTool': {
'AdditionalOptions': [
# See http://bugs.icu-project.org/trac/ticket/11122
'-Wno-inline-new-delete',
'-Wno-implicit-exception-spec-mismatch',
],
},
},
}],
], # conditions
},
{
'target_name': 'icuuc',
'type': '<(component)',
'sources': [
'<@(icuuc_sources)',
],
'defines': [
'U_COMMON_IMPLEMENTATION',
],
'dependencies': [
'icudata',
],
'direct_dependent_settings': {
'include_dirs': [
'source/common',
],
'conditions': [
[ 'component=="static_library"', {
'defines': [
'U_STATIC_IMPLEMENTATION',
],
}],
],
},
'variables': {
'clang_warning_flags': [
# ICU uses its own deprecated functions.
'-Wno-deprecated-declarations',
# ICU prefers `a && b || c` over `(a && b) || c`.
'-Wno-logical-op-parentheses',
# ICU has some `unsigned < 0` checks.
'-Wno-tautological-compare',
# uresdata.c has switch(RES_GET_TYPE(x)) code. The
# RES_GET_TYPE macro returns an UResType enum, but some switch
# statement contains case values that aren't part of that
# enum (e.g. URES_TABLE32 which is in UResInternalType). This
# is on purpose.
'-Wno-switch',
# ICU has some code with the pattern:
# if (found = uprv_getWindowsTimeZoneInfo(...))
'-Wno-parentheses',
# ICU generally has no unused variables, but there are a few
# places where this warning triggers.
# See https://codereview.chromium.org/1222643002/ and
# http://www.icu-project.org/trac/ticket/11759.
'-Wno-unused-const-variable',
# ucnv2022.cpp contains three functions that are only used when
# certain preprocessor defines are set.
'-Wno-unused-function',
],
},
'cflags': [
# Since ICU wants to internally use its own deprecated APIs,
# don't complain about it.
'-Wno-deprecated-declarations',
'-Wno-unused-function',
],
'cflags_cc': [
'-frtti',
],
'cflags_cc!': [
'-fno-rtti',
],
'xcode_settings': {
'GCC_ENABLE_CPP_RTTI': 'YES', # -frtti
},
'msvs_settings': {
'VCCLCompilerTool': {
'RuntimeTypeInfo': 'true',
},
},
'all_dependent_settings': {
'msvs_settings': {
'VCLinkerTool': {
'AdditionalDependencies': [
'advapi32.lib',
],
},
},
},
'conditions': [
[ 'use_system_icu==1 and want_separate_host_toolset==1', {
'toolsets': ['host'],
}],
[ 'use_system_icu==0 and want_separate_host_toolset==1', {
'toolsets': ['host', 'target'],
}],
[ 'use_system_icu==0 and want_separate_host_toolset==0', {
'toolsets': ['target'],
}],
[ 'OS == "win" or icu_use_data_file_flag==1', {
'sources': [
'source/stubdata/stubdata.c',
],
'defines': [
'U_ICUDATAENTRY_IN_COMMON',
],
}],
[ 'OS == "win" and clang==1', {
# Note: General clang warnings should go in the
# clang_warning_flags block above.
'msvs_settings': {
'VCCLCompilerTool': {
'AdditionalOptions': [
# See http://bugs.icu-project.org/trac/ticket/11122
'-Wno-inline-new-delete',
'-Wno-implicit-exception-spec-mismatch',
],
},
},
}],
], # conditions
},
], # targets
}],
['use_system_icu==1', {
'targets': [
{
'target_name': 'system_icu',
'type': 'none',
'conditions': [
['OS=="qnx"', {
'link_settings': {
'libraries': [
'-licui18n',
'-licuuc',
],
},
}],
['OS!="qnx"', {
'link_settings': {
'ldflags': [
'<!@(icu-config --ldflags)',
],
'libraries': [
'<!@(icu-config --ldflags-libsonly)',
],
},
}],
],
},
{
'target_name': 'icudata',
'type': 'none',
'dependencies': ['system_icu'],
'export_dependent_settings': ['system_icu'],
'toolsets': ['target'],
},
{
'target_name': 'icui18n',
'type': 'none',
'dependencies': ['system_icu'],
'export_dependent_settings': ['system_icu'],
'variables': {
'headers_root_path': 'source/i18n',
'header_filenames': [
# This list can easily be updated using the command below:
# find source/i18n/unicode -iname '*.h' \
# -printf " '%p',\n" | \
# sed -e 's|source/i18n/||' | sort -u
'unicode/alphaindex.h',
'unicode/basictz.h',
'unicode/calendar.h',
'unicode/choicfmt.h',
'unicode/coleitr.h',
'unicode/coll.h',
'unicode/compactdecimalformat.h',
'unicode/curramt.h',
'unicode/currpinf.h',
'unicode/currunit.h',
'unicode/datefmt.h',
'unicode/dcfmtsym.h',
'unicode/decimfmt.h',
'unicode/dtfmtsym.h',
'unicode/dtitvfmt.h',
'unicode/dtitvinf.h',
'unicode/dtptngen.h',
'unicode/dtrule.h',
'unicode/fieldpos.h',
'unicode/filteredbrk.h',
'unicode/fmtable.h',
'unicode/format.h',
'unicode/fpositer.h',
'unicode/gender.h',
'unicode/gregocal.h',
'unicode/locdspnm.h',
'unicode/measfmt.h',
'unicode/measunit.h',
'unicode/measure.h',
'unicode/msgfmt.h',
'unicode/numfmt.h',
'unicode/numsys.h',
'unicode/plurfmt.h',
'unicode/plurrule.h',
'unicode/rbnf.h',
'unicode/rbtz.h',
'unicode/regex.h',
'unicode/region.h',
'unicode/reldatefmt.h',
'unicode/scientificformathelper.h',
'unicode/search.h',
'unicode/selfmt.h',
'unicode/simpletz.h',
'unicode/smpdtfmt.h',
'unicode/sortkey.h',
'unicode/stsearch.h',
'unicode/tblcoll.h',
'unicode/timezone.h',
'unicode/tmunit.h',
'unicode/tmutamt.h',
'unicode/tmutfmt.h',
'unicode/translit.h',
'unicode/tzfmt.h',
'unicode/tznames.h',
'unicode/tzrule.h',
'unicode/tztrans.h',
'unicode/ucal.h',
'unicode/ucoleitr.h',
'unicode/ucol.h',
'unicode/ucsdet.h',
'unicode/ucurr.h',
'unicode/udateintervalformat.h',
'unicode/udat.h',
'unicode/udatpg.h',
'unicode/udisplaycontext.h',
'unicode/uformattable.h',
'unicode/ugender.h',
'unicode/uldnames.h',
'unicode/ulocdata.h',
'unicode/umsg.h',
'unicode/unirepl.h',
'unicode/unum.h',
'unicode/unumsys.h',
'unicode/upluralrules.h',
'unicode/uregex.h',
'unicode/uregion.h',
'unicode/usearch.h',
'unicode/uspoof.h',
'unicode/utmscale.h',
'unicode/utrans.h',
'unicode/vtzone.h',
],
},
'includes': [
'shim_headers.gypi',
],
'toolsets': ['target'],
},
{
'target_name': 'icuuc',
'type': 'none',
'dependencies': ['system_icu'],
'export_dependent_settings': ['system_icu'],
'variables': {
'headers_root_path': 'source/common',
'header_filenames': [
# This list can easily be updated using the command below:
# find source/common/unicode -iname '*.h' \
# -printf " '%p',\n" | \
# sed -e 's|source/common/||' | sort -u
'unicode/appendable.h',
'unicode/brkiter.h',
'unicode/bytestream.h',
'unicode/bytestriebuilder.h',
'unicode/bytestrie.h',
'unicode/caniter.h',
'unicode/chariter.h',
'unicode/dbbi.h',
'unicode/docmain.h',
'unicode/dtintrv.h',
'unicode/enumset.h',
'unicode/errorcode.h',
'unicode/icudataver.h',
'unicode/icuplug.h',
'unicode/idna.h',
'unicode/listformatter.h',
'unicode/localpointer.h',
'unicode/locid.h',
'unicode/messagepattern.h',
'unicode/normalizer2.h',
'unicode/normlzr.h',
'unicode/parseerr.h',
'unicode/parsepos.h',
'unicode/platform.h',
'unicode/ptypes.h',
'unicode/putil.h',
'unicode/rbbi.h',
'unicode/rep.h',
'unicode/resbund.h',
'unicode/schriter.h',
'unicode/std_string.h',
'unicode/strenum.h',
'unicode/stringpiece.h',
'unicode/stringtriebuilder.h',
'unicode/symtable.h',
'unicode/ubidi.h',
'unicode/ubrk.h',
'unicode/ucasemap.h',
'unicode/ucat.h',
'unicode/uchar.h',
'unicode/ucharstriebuilder.h',
'unicode/ucharstrie.h',
'unicode/uchriter.h',
'unicode/uclean.h',
'unicode/ucnv_cb.h',
'unicode/ucnv_err.h',
'unicode/ucnv.h',
'unicode/ucnvsel.h',
'unicode/uconfig.h',
'unicode/udata.h',
'unicode/uenum.h',
'unicode/uidna.h',
'unicode/uiter.h',
'unicode/uloc.h',
'unicode/umachine.h',
'unicode/umisc.h',
'unicode/unifilt.h',
'unicode/unifunct.h',
'unicode/unimatch.h',
'unicode/uniset.h',
'unicode/unistr.h',
'unicode/unorm2.h',
'unicode/unorm.h',
'unicode/uobject.h',
'unicode/urename.h',
'unicode/urep.h',
'unicode/ures.h',
'unicode/uscript.h',
'unicode/uset.h',
'unicode/usetiter.h',
'unicode/ushape.h',
'unicode/usprep.h',
'unicode/ustring.h',
'unicode/ustringtrie.h',
'unicode/utext.h',
'unicode/utf16.h',
'unicode/utf32.h',
'unicode/utf8.h',
'unicode/utf.h',
'unicode/utf_old.h',
'unicode/utrace.h',
'unicode/utypes.h',
'unicode/uvernum.h',
'unicode/uversion.h',
],
},
'includes': [
'shim_headers.gypi',
],
'toolsets': ['target'],
},
], # targets
}],
], # conditions
}
| [
"[email protected]"
] | |
856646a13abfa675fe8af4f6c9cf65e07f64f447 | 6d5a5c731f89933c7086ecd7d26999b79bc7217a | /Inflearn/stringPrac.py | 33b9bd610fc6fd0e93387a7b9f24ecaa77075782 | [] | no_license | minhyeonlee/python-basic | 7fbb9ff3816ac72c19d2cb2192c324a379082b16 | 007d1fc455927e83188e345bf3fc5cd8d5753b49 | refs/heads/master | 2022-04-13T09:57:39.270863 | 2020-03-28T07:25:14 | 2020-03-28T07:25:14 | 247,428,424 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,306 | py | '''
Inflearn, 파이썬 무료 강의 (기본편) - 6시간 뒤면 나도 개발자
Section3. 문자열 처리
'''
# 1강. 문자열
# ''와 ""은 모두 문자열이다.
sentence = '나는 소년입니다.'
print(sentence)
sentence2 = "파이썬은 쉬워요"
print(sentence2)
#여러줄을 저장해서 출력할 수 있다.
sentence3 = '''
나는 소년이고,
파이썬은 쉬워요
'''
print(sentence3)
# 2강. 슬라이싱
idnumber = "990120-1234567"
print("성별: " + idnumber[7]) # 1
print("연: " + idnumber[0:2]) # 0 부터 2 직전까지 (0, 1에 있는 값 가져옴)
print("월: " + idnumber[2:4]) # 01
print("일: " + idnumber[4:6]) # 21
print("생년월일: " + idnumber[:6]) # 처음부터 6번째 직전까지
print("뒤 7자리: "+ idnumber[7:]) # 7부터 끝까지
print("뒤 7자리 (뒤에서부터): " + idnumber[-7:]) # 맨 뒤에서 7째부터 끝까
# 3강. 문자열처리함수
python = "Python is Amazing"
print(python.lower()) # 소문자 출력
print(python.upper()) # 대문자 출력
print(python[0].isupper()) # python[0]의 문자가 대문자인지 확인, True/False로 리턴
print(len(python)) # 문자열 길이 반환
print(python.replace("Python", "Java")) # 문자열을 찾은 후 다른 문자열로 바꾼다.
index = python.index("n") # 해당 문자열이 어느 위치에 있는지 찾아줌
print(index)
index = python.index("n", index+1) # 아까 찾은 n(5에 위치) 이후 부터 검색한다.
print(index)
print(python.find("n")) # index 처럼 검색해준다.
print(python.find("Java")) # 원하는 문자가 없을 경우 -1을 반환
#print(python.index("Java"))를 쓰면 오류
print(python.count("n")) # 해당 문자열이 몇 개 들어있는지 검색
# 4강. 문자열 포맷
print("a" + "b")
print("a", "b")
# 방법 1
print("나는 %d살입니다." % 20) # %d: 정수 값
print("나는 %s을 좋아해요." % "파이썬") # %s: string 값, 정수도 출력 할 수 있다.
print("Apple은 %c로 시작해요." % "A") # %c: char(문자 1개) 값
print("나는 %s살입니다." % 20)
print("나는 %s색과 %s색을 좋아해요." %("파란", "빨간"))
# 방법 2
print("나는 {}살 입니다.".format(20))
print("나는 {}색과 {}색을 좋아해요.".format("파란", "빨간"))
print("나는 {0}색과 {1}색을 좋아해요.".format("파란", "빨간"))
print("나는 {1}색과 {0}색을 좋아해요.".format("파란", "빨간"))
# 방법 3
print("나는 {age}살이며, {color}색을 좋아해요.".format(age=30, color="빨간"))
print("나는 {age}살이며, {color}색을 좋아해요.".format(color="빨간", age=30))
# 방법 4(v3.6이상 부터 가능)
age = "20"
color ="빨간"
print(f"나는 {age}살이며, {color}색을 좋아해요.")
# 5강. 탈출문자
# \n: 줄바꿈
print("백문이 불여일견\n백견이 불여일타")
# \" \': 문장 내에서 따옴
# 저는 "나도코딩"입니다.
print("저는 '나도코딩'입니다.")
print('저는 "나도코딩"입니다.')
print("저는 \"나도코딩\"입니다.")
print("저는 \'나도코딩\'입니다.")
# \\: 문장 내에서 \(경로 출력 등에 사용)
print("C:\\User\\Desktop")
# \r: 커서를 맨 앞으로 이동
print("Red Apple\rPine")
# \b: 백스페이스 (한 글자 삭제)
print("Redd\bApple")
# \t: 탭
print("Red\tApple")
| [
"[email protected]"
] | |
c5938159509a69c4d911d0b67d9fe2ccb67844f4 | 70b339d0b2638a7914d0d56c5edf8a2637c9f4b0 | /countUnivalSubtrees.py | debb8570ebae08b08bd35f2a07e56136d4acbf9a | [] | no_license | pflun/advancedAlgorithms | 9991da7514024e18ba08de8688966b9220e12571 | 5520dbcd26999b98e1229bf03c2f62dd690a2ddc | refs/heads/master | 2023-02-19T12:05:26.902535 | 2023-02-14T06:08:54 | 2023-02-14T06:08:54 | 189,055,701 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,234 | py | # -*- coding: utf-8 -*-
# 分别看左右子树返回值是否与根相等,分情况讨论
# https://mnmunknown.gitbooks.io/algorithm-notes/content/61_tree.html
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def countUnivalSubtrees(self, root):
self.res = 0
def postorder(root):
if root is None:
return None
# 叶子节点也算一个子树
if root.left is None and root.right is None:
self.res += 1
return root.val
if root.left:
left = postorder(root.left)
if root.right:
right = postorder(root.right)
# 左右子树都存在
if root.left and root.right:
# 左右儿子和根值相等
if left == right:
if left is root.val:
self.res += 1
else:
return False
else:
# 左儿子和根相等
if left == root.val:
self.res += 1
# 或者右儿子和根相等
elif right == root.val:
self.res += 1
# 只存在左子树
elif root.left and not root.right:
# 左儿子和根相等
if left == root.val:
self.res += 1
else:
return False
elif root.right and not root.left:
if right == root.val:
self.res += 1
else:
return False
return root.val
postorder(root)
return self.res
head_node = TreeNode(0)
n1 = TreeNode(1)
n2 = TreeNode(0)
n3 = TreeNode(5)
n4 = TreeNode(4)
n5 = TreeNode(5)
n6 = TreeNode(5)
n7 = TreeNode(5)
head_node.left = n1
head_node.right = n2
n1.left = n3
n1.right = n4
n3.left = n6
n6.left = n5
n6.right = n7
test1 = Solution()
print test1.countUnivalSubtrees(head_node)
# 0
# 1 0
# 5 4
# 5
#5 5 | [
"[email protected]"
] | |
9b4de1d3e5726b763267418ceb084d36565e00af | e6a8793b1b12d47e57f00485350d122946618245 | /parents/admin.py | 6a80e0c0a7836d80d23fab02e3781a4109d89613 | [] | no_license | Fabricourt/school | 70b2eba2c0b8ff9b9290eb0f68d730698a6d3a63 | dad80c36be34b432dfadef195eb9e867f82cafff | refs/heads/main | 2023-01-01T15:48:43.760288 | 2020-10-26T11:15:32 | 2020-10-26T11:15:32 | 305,829,630 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 266 | py | from django.contrib import admin
from .models import Parent
class ParentAdmin(admin.ModelAdmin):
list_display = ( 'name', 'account_date')
list_display_links = ( 'name',)
search_fields = ('name',)
list_per_page = 25
admin.site.register(Parent, ParentAdmin) | [
"[email protected]"
] | |
25069dd9e77118a997038dcb2d699948baacf6b6 | d38d988114f8487e4c0d1674191b6f2865eac70d | /gru.py | 7b20014606ce44db1d77d34a341bc6b2b10aa40b | [
"MIT"
] | permissive | dizcza/ujipen | 71cc1612fcc8247a7cae1a2da9ea13cb2fca38e8 | 4e7d2ff1bd6d659743fdf68e49894236cd559b84 | refs/heads/master | 2021-07-05T19:03:00.701898 | 2020-09-11T18:48:57 | 2020-09-11T18:48:57 | 171,858,288 | 1 | 1 | MIT | 2019-10-30T09:28:42 | 2019-02-21T11:19:50 | Python | UTF-8 | Python | false | false | 2,371 | py | from typing import List, Dict
import numpy as np
from keras import layers, models
from constants import *
from helper import check_unique_patterns
from preprocess import equally_spaced_points_patterns, is_inside_box
from ujipen.ujipen_class import UJIPen
def concat_samples(samples: Dict[str, List[List[np.ndarray]]]):
labels = []
data = []
for letter in samples.keys():
letter_ord = ord(letter) - ord('a')
labels.extend([letter_ord] * len(samples[letter]))
for word_sample in samples[letter]:
word_sample = np.vstack(word_sample)
data.append(word_sample)
data = np.stack(data, axis=0)
assert is_inside_box(data, box=((-1, -1), (1, 1)))
labels = np.array(labels)
print(f"Data: {data.shape}, labels: {labels.shape}")
return data, labels
def train(ujipen: UJIPen, n_input=PATTERN_SIZE, n_hidden=50):
patterns = ujipen.get_samples(fold='train')
patterns = equally_spaced_points_patterns(patterns, total_points=n_input)
train_data, train_labels = concat_samples(patterns)
test_samples = equally_spaced_points_patterns(ujipen.get_samples(fold='test'), total_points=n_input)
test_data, test_labels = concat_samples(test_samples)
assert check_unique_patterns(patterns, n_points=n_input)
gru = models.Sequential()
gru.add(layers.GRU(units=n_hidden, activation='tanh', recurrent_activation='hard_sigmoid',
return_sequences=False, implementation=1,
input_shape=(n_input, 2)))
gru.add(layers.Dense(units=np.unique(train_labels).size, activation='softmax'))
print(gru.summary())
gru.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
history = gru.fit(train_data, train_labels, epochs=100, batch_size=32, validation_data=(test_data, test_labels),
verbose=0)
history = history.history
accuracy_train = history['acc'][-1]
print(f"Loss: {history['loss'][-1]:.5f}, accuracy: train={accuracy_train:.5f}, val={history['val_acc'][-1]:.5f}")
MODELS_DIR.mkdir(exist_ok=True)
model_path = str(MODELS_DIR / f'GRU_input-{n_input}_hidden-{n_hidden}_acc-{accuracy_train:.4f}.h5')
gru.save(model_path)
print(f"Saved trained model to {model_path}")
if __name__ == '__main__':
train(ujipen=UJIPen(), n_input=30, n_hidden=100)
| [
"[email protected]"
] | |
7b731c6f011fa87393d4ce9b59e7a664722cbc56 | 30150c7f6ed7a10ac50eee3f40101bc3165ebf9e | /src/coghq/FactoryEntityCreatorAI.py | f46ac38d6fdd0fa9403d61345de5892119f286e3 | [] | no_license | toontown-restoration-project/toontown | c2ad0d552cb9d5d3232ae6941e28f00c11ca3aa8 | 9bef6d9f823b2c12a176b33518eaa51ddbe3fd2f | refs/heads/master | 2022-12-23T19:46:16.697036 | 2020-10-02T20:17:09 | 2020-10-02T20:17:09 | 300,672,330 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,100 | py | """FactoryEntityCreatorAI module: contains the FactoryEntityCreatorAI class"""
from otp.level import EntityCreatorAI
from direct.showbase.PythonUtil import Functor
from . import DistributedBeanBarrelAI
from . import DistributedButtonAI
from . import DistributedCrateAI
from . import DistributedLiftAI
from . import DistributedDoorEntityAI
from . import DistributedGagBarrelAI
from . import DistributedGridAI
from toontown.suit import DistributedGridGoonAI
from toontown.suit import DistributedGoonAI
from . import DistributedHealBarrelAI
from . import DistributedStomperPairAI
from . import DistributedTriggerAI
from . import DistributedStomperAI
from . import DistributedLaserFieldAI
from . import DistributedSecurityCameraAI
from . import DistributedMoverAI
from . import DistributedElevatorMarkerAI
from . import DistributedSinkingPlatformAI
from . import ActiveCellAI
from . import CrusherCellAI
from . import DirectionalCellAI
from . import FactoryLevelMgrAI
from . import BattleBlockerAI
from . import DistributedGolfGreenGameAI
from toontown.coghq import DistributedMoleFieldAI
from toontown.coghq import DistributedMazeAI
class FactoryEntityCreatorAI(EntityCreatorAI.EntityCreatorAI):
def __init__(self, level):
EntityCreatorAI.EntityCreatorAI.__init__(self, level)
# create short aliases for EntityCreatorAI create funcs
cDE = EntityCreatorAI.createDistributedEntity
cLE = EntityCreatorAI.createLocalEntity
nothing = EntityCreatorAI.nothing
self.privRegisterTypes({
'activeCell' : Functor(cDE, ActiveCellAI.ActiveCellAI),
'crusherCell' : Functor(cDE, CrusherCellAI.CrusherCellAI),
'battleBlocker' : Functor(cDE, BattleBlockerAI.BattleBlockerAI),
'beanBarrel': Functor(cDE, DistributedBeanBarrelAI.DistributedBeanBarrelAI),
'button': DistributedButtonAI.DistributedButtonAI,
'conveyorBelt' : nothing,
'crate': Functor(cDE, DistributedCrateAI.DistributedCrateAI),
'directionalCell' : Functor(cDE, DirectionalCellAI.DirectionalCellAI),
'door': DistributedDoorEntityAI.DistributedDoorEntityAI,
'gagBarrel': Functor(cDE, DistributedGagBarrelAI.DistributedGagBarrelAI),
'gear': nothing,
'goon': Functor(cDE, DistributedGoonAI.DistributedGoonAI),
'gridGoon': Functor(cDE, DistributedGridGoonAI.DistributedGridGoonAI),
'golfGreenGame': Functor(cDE, DistributedGolfGreenGameAI.DistributedGolfGreenGameAI),
'goonClipPlane' : nothing,
'grid': Functor(cDE, DistributedGridAI.DistributedGridAI),
'healBarrel': Functor(cDE, DistributedHealBarrelAI.DistributedHealBarrelAI),
'levelMgr': Functor(cLE, FactoryLevelMgrAI.FactoryLevelMgrAI),
'lift': Functor(cDE, DistributedLiftAI.DistributedLiftAI),
'mintProduct': nothing,
'mintProductPallet': nothing,
'mintShelf': nothing,
'mover': Functor(cDE, DistributedMoverAI.DistributedMoverAI),
'paintMixer': nothing,
'pathMaster': nothing,
'rendering': nothing,
'platform': nothing,
'sinkingPlatform': Functor(cDE, DistributedSinkingPlatformAI.DistributedSinkingPlatformAI),
'stomper': Functor(cDE, DistributedStomperAI.DistributedStomperAI),
'stomperPair': Functor(cDE, DistributedStomperPairAI.DistributedStomperPairAI),
'laserField': Functor(cDE, DistributedLaserFieldAI.DistributedLaserFieldAI),
'securityCamera': Functor(cDE, DistributedSecurityCameraAI.DistributedSecurityCameraAI),
'elevatorMarker': Functor(cDE, DistributedElevatorMarkerAI.DistributedElevatorMarkerAI),
#'laserField': Functor(cDE, DistributedStomperAI.DistributedStomperAI),
'trigger': DistributedTriggerAI.DistributedTriggerAI,
'moleField': Functor(cDE, DistributedMoleFieldAI.DistributedMoleFieldAI),
'maze': Functor(cDE, DistributedMazeAI.DistributedMazeAI),
})
| [
"[email protected]"
] | |
50d9bcb586a1faed7b58e48723a78679a98837d8 | 279ed7207ac2c407487416b595e12f573049dd72 | /pybvk/apps/bvkdos.py | 13cba733f4b05f59814d552d8b8aa8f9f4c231a3 | [] | no_license | danse-inelastic/pybvk | 30388455e211fec69130930f2925fe16abe455bd | 922c8c0a8c50a9fabd619fa06e005cacc2d13a15 | refs/heads/master | 2016-09-15T22:21:13.131688 | 2014-06-25T17:12:34 | 2014-06-25T17:12:34 | 34,995,254 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,211 | py | #!/usr/bin/env python
# given the python module to create "system", calculate dos
# the python module is optional. if it is not given, then "system" file must exist already.
import os
def run(systempy, system, df, N, Vecs):
# if neither systempy nor system is specified, it is assumed that we have a "system" file
if not systempy and not system:
system = 'system'
# create temporary work directory
import tempfile
workdir = tempfile.mkdtemp()
# create the system file in the temporary work directory
from bvk.applications.executionharness import createSystem, execute
system = createSystem(workdir, systempy=systempy, system=system)
#
# build the command to run
Vecs = int(Vecs)
cmds = [
'bvkrandomQs %s' % N,
'bvkdisps %s' % Vecs,
'bvkpartialdos %s %s' % (Vecs, df),
]
return execute(cmds, workdir=workdir, outputfiles=['DOS'])
from optparse import OptionParser
def main():
usage = "usage: %prog [options] [system]"
parser = OptionParser(usage)
parser.add_option(
"-N", "--N-kpts-1D", dest="N",
default = 10,
help="Number of k points in 1D for sampling reciprocal space",
)
parser.add_option(
"-d", "--df", dest="df",
default = 0.1,
help="frequency axis bin size(THz)",
)
parser.add_option(
"-E", "--compute-eigen-vectors",
default = False,
help='compute eigne vectors or not?',
dest="Vecs",
)
parser.add_option(
'-P', '--system-python-file',
default = '',
help = 'python file that generates the "system" file when executed. when this option is supplied, please do not specify the "system" file path as the argument',
dest = 'systempy',
)
(options, args) = parser.parse_args()
if len(args) > 1:
parser.error("incorrect number of arguments")
if len(args) == 1:
system = args[0]
else:
system = None
N = int(options.N)
df = float(options.df)
Vecs= bool(options.Vecs)
systempy = options.systempy
return run(systempy, system, df, N, Vecs)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
e95450b4b2a062095da6f2a52983a8128ebe702a | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02640/s043458506.py | aa5a66ce9487ea4e0b7b83b41044d3742b278eb9 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238 | py | # Crane and Turtle
X, Y = [int(i) for i in input().split()]
for t in range(0, X + 1):
legs = 2 * (X + t)
if Y == legs:
a = 'Yes'
break
if Y < legs:
a = 'No'
break
else:
a = 'No'
print(a)
| [
"[email protected]"
] | |
c770e3b327455e13849eeee61191a2598e34255f | e1a56ac7e85030de9ed440db0d276612fc8ad02e | /wsperf.py | ac4c5131cd39821c4f0630ba1f46a55189edb2fd | [] | no_license | hoangtrucit/wsperf | cfeb9ee794475ecffcf96e9b1929ca69ed2a8942 | 3d9dd986b1fb7dd0af38540191cc9ea73f119770 | refs/heads/master | 2021-10-20T19:30:06.236857 | 2019-03-01T13:52:34 | 2019-03-01T13:52:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,838 | py | import os, sys, argparse
from twisted.internet import reactor
from twisted.internet.utils import getProcessOutput, getProcessValue
from twisted.internet.defer import DeferredList
import analyze
if __name__ == '__main__':
default_wsperf = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'wsperf')
parser = argparse.ArgumentParser(description = 'wsperf test driver')
parser.add_argument('--wsuri', dest = 'wsuri', type = str, default = 'ws://127.0.0.1:9000', help = 'The WebSocket URI the testee is listening on, e.g. ws://127.0.0.1:9000.')
parser.add_argument('--workers', dest = 'workers', type = int, default = 4, help = 'Number of wsperf worker processes to spawn.')
parser.add_argument('--threads', dest = 'threads', type = int, default = 0, help = 'Number of wsperf worker threads to spawn at each worker [0: run on main thread, >0: spawn that many background worker threads].')
parser.add_argument('--conns', dest = 'conns', type = int, default = 50000, help = 'Number of WebSocket connections to open from each worker.')
parser.add_argument('--lowmark', dest = 'lowmark', type = int, default = 250, help = 'Low watermark for each worker.')
parser.add_argument('--highmark', dest = 'highmark', type = int, default = 500, help = 'High watermark for each worker.')
parser.add_argument('--resultfile', dest = 'resultfile', type = str, default = r'result_%d.json', help = 'Result file pattern.')
parser.add_argument('--wsperf', dest = 'wsperf', type = str, default = default_wsperf, help = 'Full path to wsperf executable.')
parser.add_argument('--skiprun', dest = 'skiprun', action = "store_true", default = False, help = 'Skip test run.')
parser.add_argument('--skipanalyze', dest = 'skipanalyze', action = "store_true", default = False, help = 'Skip analyze results.')
options = parser.parse_args()
resultfiles = [(options.resultfile % i) for i in xrange(options.workers)]
if options.skiprun:
## here we don't start a reactor.
if not options.skipanalyze:
analyze.printResults(resultfiles)
else:
df = []
for i in range(options.workers):
args = [options.wsuri,
str(options.threads),
str(options.conns),
str(options.lowmark),
str(options.highmark),
options.resultfile % i]
## run wsperf executable
d = getProcessOutput(options.wsperf, args, os.environ)
## accumulate any output
df.append(d)
d = DeferredList(df, consumeErrors = True)
def onok(res):
if not options.skipanalyze:
analyze.printResults(resultfiles)
reactor.stop()
def onerr(err):
print err
reactor.stop()
d.addCallbacks(onok, onerr)
reactor.run()
| [
"[email protected]"
] | |
195ac95f63e61157f163bece66445bf2cac32366 | e58ecbf6af1cafbff42e2cc33abcbbf6e4ee7475 | /tests/accounting/test_call_fee_scalar.py | a6ed4f4b831b346ef58636e8757486598b762f01 | [
"MIT"
] | permissive | celeduc/ethereum-alarm-clock | 1edbbe207e0f9a7ea34a792728a2b6dceda455dd | fd202f5e96b753e6ce6bcee9a67363c468c10c7b | refs/heads/master | 2020-02-26T17:23:54.054416 | 2015-11-09T06:11:28 | 2015-11-09T06:11:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 802 | py | test_values = (
(20, (
(4, 145),
(8, 138),
(12, 129),
(16, 117),
(20, 100),
(24, 83),
(28, 71),
(32, 62),
(36, 55),
)),
(500, (
(50, 148),
(125, 143),
(275, 132),
(400, 117),
(475, 105),
(500, 100),
(525, 95),
(600, 83),
(700, 71),
(900, 55),
(1200, 41),
)),
)
deploy_contracts = [
"CallLib",
]
def test_call_fee_scalar_values(CallLib):
for base_gas_price, values in test_values:
actual_values = [
(CallLib.getCallFeeScalar(base_gas_price, gas_price), expected)
for gas_price, expected in values
]
assert all(actual == expected for actual, expected in actual_values)
| [
"[email protected]"
] | |
090878f19ffe408b52f9598216f4a2f609c8d58e | e9685369da45e5c502ce5540891e6018eadba252 | /backend/server/apps/tasks/api/serializers.py | 8f557e1bb12bfa5ff5230105c2b8d8284b099ec9 | [
"MIT"
] | permissive | Turi-fly/simple-tasks | 9703a2dd405081b129222cf6a325a5b591709d8c | ae759a8100f6604b6d8fc00f19cf3aedbd945f3d | refs/heads/master | 2022-04-10T15:26:01.590888 | 2018-11-14T08:45:46 | 2018-11-14T08:45:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 774 | py | from rest_framework import serializers
import tasks.models as models
import cerberus
class TaskSerializer(serializers.ModelSerializer):
class Meta:
model = models.Task
read_only_fields = ('id', 'state', 'result', 'task_id',)
fields = ('id', 'state', 'params', 'result', 'task_id')
def validate_params(self, params):
if params is None or params == '':
raise serializers.ValidationError("Params cannot be empty")
schema = {'arg1': {'type': 'integer', 'required': True},
'arg2': {'type': 'integer', 'required': True}}
validator = cerberus.Validator(schema)
if not validator.validate(params):
raise serializers.ValidationError(validator.errors)
return params
| [
"[email protected]"
] | |
510b351cc1af18f3ed0180c70ef1242ca5bac1d8 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2171/48117/297146.py | bda92c1ed48fa43c5286605774a8b0ab0e50019c | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,525 | py | class Node():
def __init__(self, item):
self.item = item
self.next = None
class LinkList():
def __init__(self, node = None):
self.head = node
def isEmpty(self):
return self.head == None
def append(self, newItem):
newNode = Node(newItem)
if self.isEmpty():
self.head = newNode
newNode.next = self.head
else:
nowNode = self.head
while nowNode.next != self.head:
nowNode = nowNode.next
nowNode.next = newNode
newNode.next = self.head
def add(self, newItem):
newNode = Node(newItem)
if self.isEmpty():
self.head = newNode
else:
nowNode = self.head
while nowNode.next != None:
nowNode = nowNode.next
nowNode.next = newNode
questNum = int(input())
for quest in range(questNum):
n = int(input())
s = input().split(' ')
for i in range(n):
s[i] = int(s[i])
p = LinkList()
for i in range(n):
p.add(s[i])
p1 = p.head
odd = LinkList()
ou = LinkList()
while p1.next != None:
if p1.item % 2 == 0:
ou.add(p1.item)
else:
odd.add(p1.item)
p1 = p1.next
ou1 = ou.head
odd1 = odd.head
while ou1.next != None:
print(ou1.item, end=' ')
ou1 = ou1.next
while odd1.next != None:
print(odd1.item, end = ' ')
odd1 = odd1.next
print() | [
"[email protected]"
] | |
c1395406230bb6f5616f9eabc0e1b9a4999b8e2a | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/9/wo-.py | bace8c7b0b9cb45c71200a51bd1870340df7d916 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'wO-':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
45e87ed9a82e88d8e774f45921ed3227fd68165e | 4dbd12da17cc45a5482afc8cea02051e798731a9 | /courses_project/apps/courses/urls.py | ab576aa8a6f94c45e5f11e2186a1af9f96e0ddaa | [] | no_license | tsicroxe/django_projects | 71b9bec6d834f53fde892606799b4bc96ba45a91 | c11036c78d120e5ffa51055e2999dbe05b0d36eb | refs/heads/master | 2021-01-11T07:03:53.045558 | 2016-12-07T20:46:05 | 2016-12-07T20:46:05 | 71,937,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 346 | py | from django.conf.urls import url
from . import views
from views import index, create, destroy
#from django.contrib import admin
urlpatterns = [
#url(r'^admin/', admin.site.urls),
url(r'^$', views.index, name='index'),
url(r'^create$', views.create, name='create'),
url(r'^(?P<id>\d+)/destroy$', views.destroy, name='destroy'),
]
| [
"[email protected]"
] | |
21e8e5573f1c6037a1404e7518ad11fd5494c097 | b2319c5e14c94edfb5a39e4c490c1ae6183651ed | /deepgoweb/apps/deepgo/migrations/0013_auto_20190902_0904.py | edb4c8e1436fbb064813d7d04f2b93874adbe234 | [] | no_license | coolmaksat/deepgoweb | 6d67f45059d7bdb4548d50c182a038c6f9c70a31 | fd4904b6b18dd2af06e000679f406b7353a3534f | refs/heads/master | 2021-06-12T14:42:14.513686 | 2021-04-17T10:23:39 | 2021-04-17T10:23:39 | 161,017,035 | 0 | 0 | null | 2018-12-09T07:49:26 | 2018-12-09T07:49:26 | null | UTF-8 | Python | false | false | 2,974 | py | # Generated by Django 2.2.4 on 2019-09-02 09:04
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('deepgo', '0012_auto_20190505_0848'),
]
operations = [
migrations.CreateModel(
name='Taxonomy',
fields=[
('id', models.PositiveIntegerField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=127)),
],
),
migrations.RemoveField(
model_name='protein',
name='ppi_embedding',
),
migrations.RemoveField(
model_name='protein',
name='sequence',
),
migrations.RemoveField(
model_name='protein',
name='sequence_md5',
),
migrations.RemoveField(
model_name='protein',
name='uni_accession',
),
migrations.RemoveField(
model_name='protein',
name='uni_entry_id',
),
migrations.AddField(
model_name='protein',
name='acc_id',
field=models.CharField(default='PROTEIN', max_length=15, unique=True),
preserve_default=False,
),
migrations.AddField(
model_name='protein',
name='gene',
field=models.CharField(blank=True, max_length=31, null=True),
),
migrations.AddField(
model_name='protein',
name='name',
field=models.CharField(default='name', max_length=127),
preserve_default=False,
),
migrations.AddField(
model_name='protein',
name='pro_id',
field=models.CharField(db_index=True, default='PROTEIN', max_length=31),
preserve_default=False,
),
migrations.AddField(
model_name='protein',
name='reviewed',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='protein',
name='id',
field=models.PositiveIntegerField(primary_key=True, serialize=False),
),
migrations.CreateModel(
name='Annotation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('go_id', models.PositiveIntegerField(db_index=True)),
('score', models.PositiveIntegerField()),
('protein', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='annotations', to='deepgo.Protein')),
],
),
migrations.AddField(
model_name='protein',
name='taxon',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='proteins', to='deepgo.Taxonomy'),
),
]
| [
"[email protected]"
] | |
b1e7bc2ea6a672534d6f1fe70f55d35439a84b1f | cd40b7cc395f36740000ed4a4144b1c0666ab0fd | /tests/test_hstrat/test_stratum_retention_strategy/test_stratum_retention_algorithms/test_recency_proportional_resolution_algo/test_IterRetainedRanks.py | e25d30f8fbb8105935530e3c749ac1f26bb0365f | [
"MIT"
] | permissive | mmore500/hstrat | 94fd22c86a87a5707590b9398ef679444ed82d6d | b2d2caded1db5e2dc681d9f171d7c74b322c55c3 | refs/heads/master | 2023-08-31T03:36:44.457576 | 2023-08-25T14:39:29 | 2023-08-25T14:39:29 | 464,531,144 | 5 | 2 | NOASSERTION | 2023-08-25T13:07:52 | 2022-02-28T15:11:45 | Python | UTF-8 | Python | false | false | 7,230 | py | import itertools as it
import numbers
from iterpop import iterpop as ip
import numpy as np
import pytest
from hstrat._auxiliary_lib import all_same, pairwise
from hstrat.hstrat import recency_proportional_resolution_algo
@pytest.mark.parametrize(
"recency_proportional_resolution",
[
0,
1,
2,
3,
7,
],
)
@pytest.mark.parametrize(
"time_sequence",
[
range(10**3),
(i for i in range(10**2) for __ in range(2)),
np.random.default_rng(1).integers(
low=0,
high=2**32,
size=10,
),
(2**32,),
],
)
def test_impl_consistency(recency_proportional_resolution, time_sequence):
policy = recency_proportional_resolution_algo.Policy(
recency_proportional_resolution
)
spec = policy.GetSpec()
impls = [
*recency_proportional_resolution_algo._scry._IterRetainedRanks_.impls
]
instances = [impl(spec) for impl in impls] + [
lambda __, num_strata_deposited: policy.IterRetainedRanks(
num_strata_deposited
)
]
for num_strata_deposited in time_sequence:
assert all_same(
it.chain(
(
list(
impl(spec)(
policy,
num_strata_deposited,
)
)
for impl in impls
),
(
list(
instance(
policy,
num_strata_deposited,
)
)
for instance in instances
),
)
)
@pytest.mark.parametrize(
"impl",
recency_proportional_resolution_algo._scry._IterRetainedRanks_.impls,
)
@pytest.mark.parametrize(
"recency_proportional_resolution",
[
0,
1,
2,
3,
7,
42,
97,
100,
],
)
@pytest.mark.parametrize(
"time_sequence",
[
range(10**3),
(i for i in range(10**2) for __ in range(2)),
np.random.default_rng(1).integers(
low=0,
high=2**32,
size=10,
),
(2**32,),
],
)
def test_only_dwindling_over_time(
impl, recency_proportional_resolution, time_sequence
):
policy = recency_proportional_resolution_algo.Policy(
recency_proportional_resolution
)
spec = policy.GetSpec()
instance = impl(spec)
for num_strata_deposited in time_sequence:
for which in (instance, impl(spec)):
cur_set = {
*which(
policy,
num_strata_deposited,
)
}
next_set = {
*which(
policy,
num_strata_deposited + 1,
)
}
assert cur_set.issuperset(next_set - {num_strata_deposited})
@pytest.mark.parametrize(
"impl",
recency_proportional_resolution_algo._scry._IterRetainedRanks_.impls,
)
@pytest.mark.parametrize(
"recency_proportional_resolution",
[
0,
1,
2,
3,
7,
42,
97,
100,
],
)
@pytest.mark.parametrize(
"time_sequence",
[
range(10**3),
(i for i in range(10**2) for __ in range(2)),
np.random.default_rng(1).integers(
low=0,
high=2**32,
size=10,
),
(2**32,),
],
)
def test_ranks_sorted_and_unique(
impl, recency_proportional_resolution, time_sequence
):
policy = recency_proportional_resolution_algo.Policy(
recency_proportional_resolution
)
spec = policy.GetSpec()
instance = impl(spec)
for num_strata_deposited in time_sequence:
for which in (instance, impl(spec)):
assert all(
i < j
for i, j in pairwise(
which(
policy,
num_strata_deposited,
)
)
)
@pytest.mark.parametrize(
"impl",
recency_proportional_resolution_algo._scry._IterRetainedRanks_.impls,
)
@pytest.mark.parametrize(
"recency_proportional_resolution",
[
0,
1,
2,
3,
7,
42,
97,
100,
],
)
@pytest.mark.parametrize(
"time_sequence",
[
range(10**3),
(i for i in range(10**2) for __ in range(2)),
np.random.default_rng(1).integers(
low=0,
high=2**32,
size=10,
),
(2**32,),
],
)
def test_zero_and_last_ranks_retained(
impl, recency_proportional_resolution, time_sequence
):
policy = recency_proportional_resolution_algo.Policy(
recency_proportional_resolution
)
spec = policy.GetSpec()
instance = impl(spec)
for num_strata_deposited in time_sequence:
for which in instance, impl(spec):
res = which(
policy,
num_strata_deposited,
)
if num_strata_deposited > 1:
first, *middle, last = res
assert first == 0
assert last == num_strata_deposited - 1
elif num_strata_deposited == 1:
assert ip.popsingleton(res) == 0
else:
assert next(res, None) is None
@pytest.mark.parametrize(
"impl",
recency_proportional_resolution_algo._scry._IterRetainedRanks_.impls,
)
@pytest.mark.parametrize(
"recency_proportional_resolution",
[
0,
1,
2,
3,
7,
42,
97,
100,
],
)
@pytest.mark.parametrize(
"time_sequence",
[
range(10**3),
(i for i in range(10**2) for __ in range(2)),
np.random.default_rng(1).integers(
low=0,
high=2**32,
size=10,
),
(2**32,),
],
)
def test_ranks_valid(impl, recency_proportional_resolution, time_sequence):
policy = recency_proportional_resolution_algo.Policy(
recency_proportional_resolution
)
spec = policy.GetSpec()
instance = impl(spec)
for num_strata_deposited in time_sequence:
for which in (instance, impl(spec)):
assert all(
isinstance(r, numbers.Integral)
and 0 <= r < num_strata_deposited
for r in which(policy, num_strata_deposited)
)
@pytest.mark.parametrize(
"impl",
recency_proportional_resolution_algo._scry._IterRetainedRanks_.impls,
)
@pytest.mark.parametrize(
"recency_proportional_resolution",
[
0,
1,
2,
3,
7,
42,
97,
100,
],
)
def test_eq(impl, recency_proportional_resolution):
policy = recency_proportional_resolution_algo.Policy(
recency_proportional_resolution
)
spec = policy.GetSpec()
instance = impl(spec)
assert instance == instance
assert instance == impl(spec)
assert instance is not None
| [
"[email protected]"
] | |
abfe6cbaac9ddeffce0019053b066e6517c9ec1f | 4bf3aaf77c309a489100b98a8c03532632df152c | /Python/BOJ/13460.py | b39eb5f4d88753e6b925be54efe84dd74b2b14ff | [] | no_license | murane/PS | 7fbfc54d962231949efc67f1a35c4b0119de0780 | e938c6c503aeac08bf65e1e66709172b0e5da6ef | refs/heads/master | 2023-05-06T22:51:54.105811 | 2021-05-30T03:34:53 | 2021-05-30T03:34:53 | 293,699,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,785 | py | import sys
from collections import deque
r=sys.stdin.readline
N,M=map(int,r().split())
board=[]
D=[(1,0),(-1,0),(0,1),(0,-1)]
for _ in range(N):
board.append(list(r().strip()))
for i in range(N):
for j in range(M):
if board[i][j]=="R":
R=[i,j]
board[i][j]="."
elif board[i][j]=="B":
B=[i,j]
board[i][j]="."
def move(x,y,d):
dist=0
while True:
nextPos=board[x+d[0]][y+d[1]]
if nextPos=='.':
x,y=x+d[0],y+d[1]
elif nextPos=='O':
return True,0,[-1,-1]
elif nextPos=='#':
return False,dist,[x,y]
dist+=1
def bfs():
q=deque()
q.append([R,B,0])
visit=set()
visit.add((tuple(R),tuple(B)))
while q:
red,blue,cnt=q.popleft()
tmpRed,tmpBlue=red,blue
#if cnt==10: return -1
for i in range(4): #4방향
flgR,distR,red=move(tmpRed[0],tmpRed[1],D[i])#일단 움직이고보자
flgB,distB,blue=move(tmpBlue[0],tmpBlue[1],D[i])
if flgR and not flgB:
return cnt+1#빨간색은 들어가고 파란색은 아니면 성공
elif flgB: continue #파란색이 들어가면 실패
elif not flgR and not flgB: #일단 둘다 구멍에 안들어가고
if red==blue: #겹치는 경우
if distR>distB:
red=red[0]-D[i][0],red[1]-D[i][1]
else:
blue=blue[0]-D[i][0],blue[1]-D[i][1]
if (tuple(red),tuple(blue)) not in visit:
q.append([red,blue,cnt+1]) #다시 큐로
visit.add((tuple(red),tuple(blue)))
return -1
print(bfs())
| [
"[email protected]"
] | |
c8b3a20fa81bc2a10ac839ee93aa3622a97f9a82 | de070f933453e2d15651af1ccc697acf25507bd7 | /deid/version.py | 785a6ee84d0483f6912ea07c5584e25f6da00280 | [
"MIT"
] | permissive | liu3xing3long/deid | cd968b1b5d8e678ad2c41f2b9f1c4572f5f88013 | 491a8ea301d9d47cd4e62eaab31584c26afcc534 | refs/heads/master | 2021-05-14T11:33:12.193255 | 2017-12-22T21:28:32 | 2017-12-22T21:28:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,871 | py | '''
Copyright (c) 2017 Vanessa Sochat
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
__version__ = "0.1.1"
AUTHOR = 'Vanessa Sochat'
AUTHOR_EMAIL = '[email protected]'
NAME = 'deid'
PACKAGE_URL = "https://github.com/pydicom/deid"
KEYWORDS = 'open source, stanford, python, deidentify, dicom'
DESCRIPTION = "deidentify dicom and other images with python and pydicom"
LICENSE = "LICENSE"
INSTALL_REQUIRES = (
('matplotlib', {'min_version': None}),
('requests', {'min_version': '2.12.4'}),
('retrying', {'min_version': '1.3.3'}),
('simplejson', {'min_version': '3.10.0'}),
('six', {'min_version': '1.10'}),
('pygments', {'min_version': '2.1.3'}),
('python-dateutil',{'min_version': None }),
('urllib3',{'min_version': "1.15" }),
('validator.py',{'min_version': None })
)
DEPENDENCY_LINKS = ['https://github.com/pydicom/pydicom/tarball/master']
| [
"[email protected]"
] | |
16d8e89e918f02b740fb31f6d8b1d19b9d2dfda4 | e6e57bf7d4eda37f1188ab72ff249675f40029ee | /cs61a/projects/ants/ants.py | a8a120ed09ea374651365b3c40741aaa2b2431a7 | [] | no_license | juanpedrovel/bomboclap | 4e186331ef1c26c8522e44c21d6a33358471786b | 99db02266c31dd14357ef6a575d35fcf55718617 | refs/heads/master | 2020-04-19T21:16:38.141830 | 2019-01-31T00:31:24 | 2019-01-31T00:31:24 | 168,436,881 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 30,739 | py | """CS 61A presents Ants Vs. SomeBees."""
import random
from ucb import main, interact, trace
from collections import OrderedDict
################
# Core Classes #
################
class Place(object):
"""A Place holds insects and has an exit to another Place."""
def __init__(self, name, exit=None):
"""Create a Place with the given NAME and EXIT.
name -- A string; the name of this Place.
exit -- The Place reached by exiting this Place (may be None).
"""
self.name = name
self.exit = exit
self.bees = [] # A list of Bees
self.ant = None # An Ant
self.entrance = None # A Place
# Phase 1: Add an entrance to the exit
# BEGIN Problem 2
if self.exit:
self.exit.entrance = self
# END Problem 2
def add_insect(self, insect):
"""Add an Insect to this Place.
There can be at most one Ant in a Place, unless exactly one of them is
a container ant (Problem 9), in which case there can be two. If add_insect
tries to add more Ants than is allowed, an assertion error is raised.
There can be any number of Bees in a Place.
"""
if insect.is_ant:
if self.ant is None:
self.ant = insect
else:
# BEGIN Problem 9
if self.ant.can_contain(insect):
self.ant.contain_ant(insect)
elif insect.can_contain(self.ant):
insect.contain_ant(self.ant)
self.ant = insect
else:
assert self.ant is None, 'Two ants in {0}'.format(self)
# END Problem 9
else:
self.bees.append(insect)
insect.place = self
def remove_insect(self, insect):
"""Remove an INSECT from this Place.
A target Ant may either be directly in the Place, or be contained by a
container Ant at this place. The true QueenAnt may not be removed. If
remove_insect tries to remove an Ant that is not anywhere in this
Place, an AssertionError is raised.
A Bee is just removed from the list of Bees.
"""
if insect.is_ant:
# Special handling for QueenAnt
# BEGIN Problem 13
if isinstance(insect, QueenAnt) and insect.TrueQueen:
return
# END Problem 13
# Special handling for container ants
if self.ant is insect:
# Bodyguard was removed. Contained ant should remain in the game
if hasattr(self.ant, 'is_container') and self.ant.is_container:
self.ant = self.ant.contained_ant
else:
self.ant = None
else:
# Contained ant was removed. Bodyguard should remain
if hasattr(self.ant, 'is_container') and self.ant.is_container \
and self.ant.contained_ant is insect:
self.ant.contained_ant = None
else:
assert False, '{0} is not in {1}'.format(insect, self)
else:
self.bees.remove(insect)
insect.place = None
def __str__(self):
return self.name
class Insect(object):
"""An Insect, the base class of Ant and Bee, has armor and a Place."""
is_ant = False
damage = 0
is_watersafe = False
# ADD CLASS ATTRIBUTES HERE
def __init__(self, armor, place=None):
"""Create an Insect with an ARMOR amount and a starting PLACE."""
self.armor = armor
self.place = place # set by Place.add_insect and Place.remove_insect
def reduce_armor(self, amount):
"""Reduce armor by AMOUNT, and remove the insect from its place if it
has no armor remaining.
>>> test_insect = Insect(5)
>>> test_insect.reduce_armor(2)
>>> test_insect.armor
3
"""
self.armor -= amount
if self.armor <= 0:
self.place.remove_insect(self)
def action(self, colony):
"""The action performed each turn.
colony -- The AntColony, used to access game state information.
"""
def __repr__(self):
cname = type(self).__name__
return '{0}({1}, {2})'.format(cname, self.armor, self.place)
class Bee(Insect):
"""A Bee moves from place to place, following exits and stinging ants."""
name = 'Bee'
damage = 1
is_watersafe = True
# OVERRIDE CLASS ATTRIBUTES HERE
def sting(self, ant):
"""Attack an ANT, reducing its armor by 1."""
ant.reduce_armor(self.damage)
def move_to(self, place):
"""Move from the Bee's current Place to a new PLACE."""
self.place.remove_insect(self)
place.add_insect(self)
def blocked(self):
"""Return True if this Bee cannot advance to the next Place."""
# Phase 4: Special handling for NinjaAnt
# BEGIN Problem 7
return not(self.place.ant is None or not self.place.ant.blocks_path)
# END Problem 7
def action(self, colony):
"""A Bee's action stings the Ant that blocks its exit if it is blocked,
or moves to the exit of its current place otherwise.
colony -- The AntColony, used to access game state information.
"""
destination = self.place.exit
# Extra credit: Special handling for bee direction
# BEGIN EC
"*** YOUR CODE HERE ***"
# END EC
if self.blocked():
self.sting(self.place.ant)
elif self.armor > 0 and destination is not None:
self.move_to(destination)
class Ant(Insect):
"""An Ant occupies a place and does work for the colony."""
is_ant = True
implemented = False # Only implemented Ant classes should be instantiated
food_cost = 0
# ADD CLASS ATTRIBUTES HERE
blocks_path = True
is_container = False
def __init__(self, armor=1):
"""Create an Ant with an ARMOR quantity."""
Insect.__init__(self, armor)
def can_contain(self, other):
return False
class HarvesterAnt(Ant):
"""HarvesterAnt produces 1 additional food per turn for the colony."""
name = 'Harvester'
implemented = True
food_cost = 2
armor = 1
def action(self, colony):
"""Produce 1 additional food for the COLONY.
colony -- The AntColony, used to access game state information.
"""
# BEGIN Problem 1
colony.food += 1
# END Problem 1
class ThrowerAnt(Ant):
"""ThrowerAnt throws a leaf each turn at the nearest Bee in its range."""
name = 'Thrower'
implemented = True
damage = 1
food_cost = 3
armor = 1
def nearest_bee(self, hive, min_range=0, max_range=float('inf')):
"""Return the nearest Bee in a Place that is not the HIVE, connected to
the ThrowerAnt's Place by following entrances.
This method returns None if there is no such Bee (or none in range).
"""
# BEGIN Problem 3 and 4
place = self.place
current_range = 0
while place is not hive and current_range <= max_range:
if not place.bees or current_range < min_range:
place = place.entrance
current_range += 1
else:
return random_or_none(place.bees)
return None
# END Problem 3 and 4
def throw_at(self, target):
"""Throw a leaf at the TARGET Bee, reducing its armor."""
if target is not None:
target.reduce_armor(self.damage)
def action(self, colony):
"""Throw a leaf at the nearest Bee in range."""
self.throw_at(self.nearest_bee(colony.hive))
def random_or_none(s):
"""Return a random element of sequence S, or return None if S is empty."""
if s:
return random.choice(s)
##############
# Extensions #
##############
class ShortThrower(ThrowerAnt):
"""A ThrowerAnt that only throws leaves at Bees at most 3 places away."""
name = 'Short'
# OVERRIDE CLASS ATTRIBUTES HERE
# BEGIN Problem 4
implemented = True # Change to True to view in the GUI
max_range = 3
food_cost = 2
def nearest_bee(self, hive):
return ThrowerAnt.nearest_bee(self, hive, max_range=self.max_range)
# END Problem 4
class LongThrower(ThrowerAnt):
"""A ThrowerAnt that only throws leaves at Bees at least 5 places away."""
name = 'Long'
# OVERRIDE CLASS ATTRIBUTES HERE
# BEGIN Problem 4
implemented = True # Change to True to view in the GUI
min_range = 5
food_cost = 2
def nearest_bee(self, hive):
return ThrowerAnt.nearest_bee(self, hive, min_range=self.min_range)
# END Problem 4
class FireAnt(Ant):
"""FireAnt cooks any Bee in its Place when it expires."""
name = 'Fire'
damage = 3
# OVERRIDE CLASS ATTRIBUTES HERE
# BEGIN Problem 5
implemented = True # Change to True to view in the GUI
food_cost = 5
armor = 1
# END Problem 5
def reduce_armor(self, amount):
"""Reduce armor by AMOUNT, and remove the FireAnt from its place if it
has no armor remaining. If the FireAnt dies, damage each of the bees in
the current place.
"""
# BEGIN Problem 5
if self.armor <= amount:
bees_copy = list(self.place.bees)
for bee in bees_copy:
bee.reduce_armor(self.damage)
Ant.reduce_armor(self, amount)
# END Problem 5
class HungryAnt(Ant):
"""HungryAnt will take three turns to digest a Bee in its place.
While digesting, the HungryAnt can't eat another Bee.
"""
name = 'Hungry'
# OVERRIDE CLASS ATTRIBUTES HERE
# BEGIN Problem 6
implemented = True # Change to True to view in the GUI
time_to_digest = 3
food_cost = 4
armor = 1
# END Problem 6
def __init__(self, armor=1):
# BEGIN Problem 6
self.digesting = 0
# END Problem 6
def eat_bee(self, bee):
# BEGIN Problem 6
bee.reduce_armor(bee.armor)
self.digesting = self.time_to_digest
# END Problem 6
def action(self, colony):
# BEGIN Problem 6
if self.digesting:
self.digesting -= 1
elif self.place.bees:
self.eat_bee(random_or_none(self.place.bees))
# END Problem 6
class NinjaAnt(Ant):
"""NinjaAnt does not block the path and damages all bees in its place."""
name = 'Ninja'
damage = 1
# OVERRIDE CLASS ATTRIBUTES HERE
# BEGIN Problem 7
implemented = True # Change to True to view in the GUI
food_cost = 5
armor = 1
blocks_path = False
# END Problem 7
def action(self, colony):
# BEGIN Problem 7
bees_copy = list(self.place.bees)
for bee in bees_copy:
bee.reduce_armor(self.damage)
# END Problem 7
# BEGIN Problem 8
# The WallAnt class
# END Problem 8
class WallAnt(Ant):
"""WallAnt blocks bees with large armor"""
name = "Wall"
implemented = True
food_cost = 4
def __init__(self, armor=4):
Ant.__init__(self, armor)
class BodyguardAnt(Ant):
"""BodyguardAnt provides protection to other Ants."""
name = 'Bodyguard'
# OVERRIDE CLASS ATTRIBUTES HERE
# BEGIN Problem 9
implemented = True # Change to True to view in the GUI
is_container = True
food_cost = 4
# END Problem 9
def __init__(self, armor=2):
Ant.__init__(self, armor)
self.contained_ant = None # The Ant hidden in this bodyguard
def can_contain(self, other):
# BEGIN Problem 9
if self.contained_ant == None and other.is_container == False:
return True
return False
# END Problem 9
def contain_ant(self, ant):
# BEGIN Problem 9
self.contained_ant = ant
# END Problem 9
def action(self, colony):
# BEGIN Problem 9
if self.contained_ant:
self.contained_ant.action(colony)
# END Problem 9
class TankAnt(BodyguardAnt):
"""TankAnt provides both offensive and defensive capabilities."""
name = 'Tank'
damage = 1
# OVERRIDE CLASS ATTRIBUTES HERE
# BEGIN Problem 10
implemented = True # Change to True to view in the GUI
food_cost = 6
is_container = True
# END Problem 10
def action(self, colony):
# BEGIN Problem 10
bees_copy = list(self.place.bees)
for bee in bees_copy:
bee.reduce_armor(self.damage)
BodyguardAnt.action(self, colony)
# END Problem 10
class Water(Place):
"""Water is a place that can only hold watersafe insects."""
def add_insect(self, insect):
"""Add an Insect to this place. If the insect is not watersafe, reduce
its armor to 0."""
# BEGIN Problem 11
Place.add_insect(self, insect)
if not insect.is_watersafe:
insect.reduce_armor(insect.armor)
# END Problem 11
# BEGIN Problem 12
class ScubaThrower(ThrowerAnt):
name = 'Scuba'
implemented = True
is_watersafe = True
food_cost = 6
# END Problem 12
# BEGIN Problem 13
class QueenAnt(ScubaThrower): # You should change this line
# END Problem 13
"""The Queen of the colony. The game is over if a bee enters her place."""
name = 'Queen'
# OVERRIDE CLASS ATTRIBUTES HERE
# BEGIN Problem 13
implemented = True # Change to True to view in the GUI
food_cost = 7
TrueQueen = True
# END Problem 13
def __init__(self, armor=1):
# BEGIN Problem 13
if self.TrueQueen == True:
QueenAnt.TrueQueen = False
self.TrueQueen = True
self.ants_behind = []
# END Problem 13
def action(self, colony):
"""A queen ant throws a leaf, but also doubles the damage of ants
in her tunnel.
Impostor queens do only one thing: reduce their own armor to 0.
"""
# BEGIN Problem 13
if not self.TrueQueen:
Insect.reduce_armor(self, self.armor)
else:
current_place = self.place.exit
while current_place != None:
place_ant = current_place.ant
if place_ant:
if place_ant not in self.ants_behind:
place_ant.damage *= 2
self.ants_behind.append(place_ant)
if place_ant.is_container and place_ant.contained_ant:
if place_ant.contained_ant not in self.ants_behind:
place_ant.contained_ant.damage *= 2
self.ants_behind.append(place_ant.contained_ant)
current_place = current_place.exit
ThrowerAnt.action(self, colony)
# END Problem 13
def reduce_armor(self, amount):
"""Reduce armor by AMOUNT, and if the True QueenAnt has no armor
remaining, signal the end of the game.
"""
# BEGIN Problem 13
if self.armor <= amount and self.TrueQueen:
bees_win()
else:
Insect.reduce_armor(self, amount)
# END Problem 13
class AntRemover(Ant):
"""Allows the player to remove ants from the board in the GUI."""
name = 'Remover'
implemented = False
def __init__(self):
Ant.__init__(self, 0)
##################
# Status Effects #
##################
def make_slow(action, bee):
"""Return a new action method that calls ACTION every other turn.
action -- An action method of some Bee
"""
# BEGIN Problem EC
"*** YOUR CODE HERE ***"
# END Problem EC
def make_scare(action, bee):
"""Return a new action method that makes the bee go backwards.
action -- An action method of some Bee
"""
# BEGIN Problem EC
"*** YOUR CODE HERE ***"
# END Problem EC
def apply_effect(effect, bee, duration):
"""Apply a status effect to a BEE that lasts for DURATION turns."""
# BEGIN Problem EC
"*** YOUR CODE HERE ***"
# END Problem EC
class SlowThrower(ThrowerAnt):
"""ThrowerAnt that causes Slow on Bees."""
name = 'Slow'
# BEGIN Problem EC
implemented = False # Change to True to view in the GUI
# END Problem EC
def throw_at(self, target):
if target:
apply_effect(make_slow, target, 3)
class ScaryThrower(ThrowerAnt):
"""ThrowerAnt that intimidates Bees, making them back away instead of advancing."""
name = 'Scary'
# BEGIN Problem EC
implemented = False # Change to True to view in the GUI
# END Problem EC
def throw_at(self, target):
# BEGIN Problem EC
"*** YOUR CODE HERE ***"
# END Problem EC
class LaserAnt(ThrowerAnt):
# This class is optional. Only one test is provided for this class.
name = 'Laser'
# OVERRIDE CLASS ATTRIBUTES HERE
# BEGIN Problem OPTIONAL
implemented = False # Change to True to view in the GUI
# END Problem OPTIONAL
def __init__(self, armor=1):
ThrowerAnt.__init__(self, armor)
self.insects_shot = 0
def insects_in_front(self, hive):
# BEGIN Problem OPTIONAL
return {}
# END Problem OPTIONAL
def calculate_damage(self, distance):
# BEGIN Problem OPTIONAL
return 0
# END Problem OPTIONAL
def action(self, colony):
insects_and_distances = self.insects_in_front(colony.hive)
for insect, distance in insects_and_distances.items():
damage = self.calculate_damage(distance)
insect.reduce_armor(damage)
if damage:
self.insects_shot += 1
##################
# Bees Extension #
##################
class Wasp(Bee):
"""Class of Bee that has higher damage."""
name = 'Wasp'
damage = 2
class Hornet(Bee):
"""Class of bee that is capable of taking two actions per turn, although
its overall damage output is lower. Immune to status effects.
"""
name = 'Hornet'
damage = 0.25
def action(self, colony):
for i in range(2):
if self.armor > 0:
super().action(colony)
def __setattr__(self, name, value):
if name != 'action':
object.__setattr__(self, name, value)
class NinjaBee(Bee):
"""A Bee that cannot be blocked. Is capable of moving past all defenses to
assassinate the Queen.
"""
name = 'NinjaBee'
def blocked(self):
return False
class Boss(Wasp, Hornet):
"""The leader of the bees. Combines the high damage of the Wasp along with
status effect immunity of Hornets. Damage to the boss is capped up to 8
damage by a single attack.
"""
name = 'Boss'
damage_cap = 8
action = Wasp.action
def reduce_armor(self, amount):
super().reduce_armor(self.damage_modifier(amount))
def damage_modifier(self, amount):
return amount * self.damage_cap/(self.damage_cap + amount)
class Hive(Place):
"""The Place from which the Bees launch their assault.
assault_plan -- An AssaultPlan; when & where bees enter the colony.
"""
def __init__(self, assault_plan):
self.name = 'Hive'
self.assault_plan = assault_plan
self.bees = []
for bee in assault_plan.all_bees:
self.add_insect(bee)
# The following attributes are always None for a Hive
self.entrance = None
self.ant = None
self.exit = None
def strategy(self, colony):
exits = [p for p in colony.places.values() if p.entrance is self]
for bee in self.assault_plan.get(colony.time, []):
bee.move_to(random.choice(exits))
colony.active_bees.append(bee)
class AntColony(object):
"""An ant collective that manages global game state and simulates time.
Attributes:
time -- elapsed time
food -- the colony's available food total
queen -- the place where the queen resides
places -- A list of all places in the colony (including a Hive)
bee_entrances -- A list of places that bees can enter
"""
def __init__(self, strategy, hive, ant_types, create_places, dimensions, food=2):
"""Create an AntColony for simulating a game.
Arguments:
strategy -- a function to deploy ants to places
hive -- a Hive full of bees
ant_types -- a list of ant constructors
create_places -- a function that creates the set of places
dimensions -- a pair containing the dimensions of the game layout
"""
self.time = 0
self.food = food
self.strategy = strategy
self.hive = hive
self.ant_types = OrderedDict((a.name, a) for a in ant_types)
self.dimensions = dimensions
self.active_bees = []
self.configure(hive, create_places)
def configure(self, hive, create_places):
"""Configure the places in the colony."""
self.queen = QueenPlace('AntQueen')
self.places = OrderedDict()
self.bee_entrances = []
def register_place(place, is_bee_entrance):
self.places[place.name] = place
if is_bee_entrance:
place.entrance = hive
self.bee_entrances.append(place)
register_place(self.hive, False)
create_places(self.queen, register_place, self.dimensions[0], self.dimensions[1])
def simulate(self):
"""Simulate an attack on the ant colony (i.e., play the game)."""
num_bees = len(self.bees)
try:
while True:
self.hive.strategy(self) # Bees invade
self.strategy(self) # Ants deploy
for ant in self.ants: # Ants take actions
if ant.armor > 0:
ant.action(self)
for bee in self.active_bees[:]: # Bees take actions
if bee.armor > 0:
bee.action(self)
if bee.armor <= 0:
num_bees -= 1
self.active_bees.remove(bee)
if num_bees == 0:
raise AntsWinException()
self.time += 1
except AntsWinException:
print('All bees are vanquished. You win!')
return True
except BeesWinException:
print('The ant queen has perished. Please try again.')
return False
def deploy_ant(self, place_name, ant_type_name):
"""Place an ant if enough food is available.
This method is called by the current strategy to deploy ants.
"""
constructor = self.ant_types[ant_type_name]
if self.food < constructor.food_cost:
print('Not enough food remains to place ' + ant_type_name)
else:
ant = constructor()
self.places[place_name].add_insect(ant)
self.food -= constructor.food_cost
return ant
def remove_ant(self, place_name):
"""Remove an Ant from the Colony."""
place = self.places[place_name]
if place.ant is not None:
place.remove_insect(place.ant)
@property
def ants(self):
return [p.ant for p in self.places.values() if p.ant is not None]
@property
def bees(self):
return [b for p in self.places.values() for b in p.bees]
@property
def insects(self):
return self.ants + self.bees
def __str__(self):
status = ' (Food: {0}, Time: {1})'.format(self.food, self.time)
return str([str(i) for i in self.ants + self.bees]) + status
class QueenPlace(Place):
"""QueenPlace at the end of the tunnel, where the queen resides."""
def add_insect(self, insect):
"""Add an Insect to this Place.
Can't actually add Ants to a QueenPlace. However, if a Bee attempts to
enter the QueenPlace, a BeesWinException is raised, signaling the end
of a game.
"""
assert not insect.is_ant, 'Cannot add {0} to QueenPlace'
raise BeesWinException()
def ants_win():
"""Signal that Ants win."""
raise AntsWinException()
def bees_win():
"""Signal that Bees win."""
raise BeesWinException()
def ant_types():
"""Return a list of all implemented Ant classes."""
all_ant_types = []
new_types = [Ant]
while new_types:
new_types = [t for c in new_types for t in c.__subclasses__()]
all_ant_types.extend(new_types)
return [t for t in all_ant_types if t.implemented]
class GameOverException(Exception):
"""Base game over Exception."""
pass
class AntsWinException(GameOverException):
"""Exception to signal that the ants win."""
pass
class BeesWinException(GameOverException):
"""Exception to signal that the bees win."""
pass
def interactive_strategy(colony):
"""A strategy that starts an interactive session and lets the user make
changes to the colony.
For example, one might deploy a ThrowerAnt to the first tunnel by invoking
colony.deploy_ant('tunnel_0_0', 'Thrower')
"""
print('colony: ' + str(colony))
msg = '<Control>-D (<Control>-Z <Enter> on Windows) completes a turn.\n'
interact(msg)
def start_with_strategy(args, strategy):
"""Reads command-line arguments and starts a game with those options."""
import argparse
parser = argparse.ArgumentParser(description="Play Ants vs. SomeBees")
parser.add_argument('-d', type=str, metavar='DIFFICULTY',
help='sets difficulty of game (test/easy/medium/hard/extra-hard)')
parser.add_argument('-w', '--water', action='store_true',
help='loads a full layout with water')
parser.add_argument('--food', type=int,
help='number of food to start with when testing', default=2)
args = parser.parse_args()
assault_plan = make_normal_assault_plan()
layout = dry_layout
tunnel_length = 9
num_tunnels = 3
food = args.food
if args.water:
layout = wet_layout
if args.d in ['t', 'test']:
assault_plan = make_test_assault_plan()
num_tunnels = 1
elif args.d in ['e', 'easy']:
assault_plan = make_easy_assault_plan()
num_tunnels = 2
elif args.d in ['n', 'normal']:
assault_plan = make_normal_assault_plan()
num_tunnels = 3
elif args.d in ['h', 'hard']:
assault_plan = make_hard_assault_plan()
num_tunnels = 4
elif args.d in ['i', 'extra-hard']:
assault_plan = make_extra_hard_assault_plan()
num_tunnels = 4
hive = Hive(assault_plan)
dimensions = (num_tunnels, tunnel_length)
return AntColony(strategy, hive, ant_types(), layout, dimensions, food).simulate()
###########
# Layouts #
###########
def wet_layout(queen, register_place, tunnels=3, length=9, moat_frequency=3):
"""Register a mix of wet and and dry places."""
for tunnel in range(tunnels):
exit = queen
for step in range(length):
if moat_frequency != 0 and (step + 1) % moat_frequency == 0:
exit = Water('water_{0}_{1}'.format(tunnel, step), exit)
else:
exit = Place('tunnel_{0}_{1}'.format(tunnel, step), exit)
register_place(exit, step == length - 1)
def dry_layout(queen, register_place, tunnels=3, length=9):
"""Register dry tunnels."""
wet_layout(queen, register_place, tunnels, length, 0)
#################
# Assault Plans #
#################
class AssaultPlan(dict):
"""The Bees' plan of attack for the Colony. Attacks come in timed waves.
An AssaultPlan is a dictionary from times (int) to waves (list of Bees).
>>> AssaultPlan().add_wave(4, 2)
{4: [Bee(3, None), Bee(3, None)]}
"""
def add_wave(self, bee_type, bee_armor, time, count):
"""Add a wave at time with count Bees that have the specified armor."""
bees = [bee_type(bee_armor) for _ in range(count)]
self.setdefault(time, []).extend(bees)
return self
@property
def all_bees(self):
"""Place all Bees in the hive and return the list of Bees."""
return [bee for wave in self.values() for bee in wave]
def make_test_assault_plan():
return AssaultPlan().add_wave(Bee, 3, 2, 1).add_wave(Bee, 3, 3, 1)
def make_easy_assault_plan():
plan = AssaultPlan()
for time in range(3, 16, 2):
plan.add_wave(Bee, 3, time, 1)
plan.add_wave(Wasp, 3, 4, 1)
plan.add_wave(NinjaBee, 3, 8, 1)
plan.add_wave(Hornet, 3, 12, 1)
plan.add_wave(Boss, 15, 16, 1)
return plan
def make_normal_assault_plan():
plan = AssaultPlan()
for time in range(3, 16, 2):
plan.add_wave(Bee, 3, time, 2)
plan.add_wave(Wasp, 3, 4, 1)
plan.add_wave(NinjaBee, 3, 8, 1)
plan.add_wave(Hornet, 3, 12, 1)
plan.add_wave(Wasp, 3, 16, 1)
#Boss Stage
for time in range(21, 30, 2):
plan.add_wave(Bee, 3, time, 2)
plan.add_wave(Wasp, 3, 22, 2)
plan.add_wave(Hornet, 3, 24, 2)
plan.add_wave(NinjaBee, 3, 26, 2)
plan.add_wave(Hornet, 3, 28, 2)
plan.add_wave(Boss, 20, 30, 1)
return plan
def make_hard_assault_plan():
plan = AssaultPlan()
for time in range(3, 16, 2):
plan.add_wave(Bee, 4, time, 2)
plan.add_wave(Hornet, 4, 4, 2)
plan.add_wave(Wasp, 4, 8, 2)
plan.add_wave(NinjaBee, 4, 12, 2)
plan.add_wave(Wasp, 4, 16, 2)
#Boss Stage
for time in range(21, 30, 2):
plan.add_wave(Bee, 4, time, 3)
plan.add_wave(Wasp, 4, 22, 2)
plan.add_wave(Hornet, 4, 24, 2)
plan.add_wave(NinjaBee, 4, 26, 2)
plan.add_wave(Hornet, 4, 28, 2)
plan.add_wave(Boss, 30, 30, 1)
return plan
def make_extra_hard_assault_plan():
plan = AssaultPlan()
plan.add_wave(Hornet, 5, 2, 2)
for time in range(3, 16, 2):
plan.add_wave(Bee, 5, time, 2)
plan.add_wave(Hornet, 5, 4, 2)
plan.add_wave(Wasp, 5, 8, 2)
plan.add_wave(NinjaBee, 5, 12, 2)
plan.add_wave(Wasp, 5, 16, 2)
#Boss Stage
for time in range(21, 30, 2):
plan.add_wave(Bee, 5, time, 3)
plan.add_wave(Wasp, 5, 22, 2)
plan.add_wave(Hornet, 5, 24, 2)
plan.add_wave(NinjaBee, 5, 26, 2)
plan.add_wave(Hornet, 5, 28, 2)
plan.add_wave(Boss, 30, 30, 2)
return plan
from utils import *
@main
def run(*args):
Insect.reduce_armor = class_method_wrapper(Insect.reduce_armor,
pre=print_expired_insects)
start_with_strategy(args, interactive_strategy) | [
"[email protected]"
] | |
0a4e3dfacfddb5d405649e73397541348816d65c | b72f9d9f0769265cdea2b8caff145af9c532ea09 | /practice/abc058_b.py | 284a2391e2511feb8baa6dffec908c7c9b7fbf63 | [] | no_license | ritzcr/AtCoder | 3335fefa8fb1989a0f9da80fe6d0902b46aa2d1f | 15097b0c2568ace653e5080d789047531e50edde | refs/heads/master | 2021-02-12T19:16:41.757421 | 2020-07-05T06:30:57 | 2020-07-05T06:30:57 | 244,620,726 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 123 | py | O = input()
E = input()
out = ""
for x in range(len(O)):
out += O[x]
if len(E) > x:
out += E[x]
print(out)
| [
"[email protected]"
] | |
52b1e429db9ff264272850ea168eeb1c2de376d2 | a3e926f8547f04184c79bdd28b0f886a77778700 | /Lib/fontbakery/reporters/ghmarkdown.py | b7376a00473362e6d22d640af646c8bc5277277e | [
"Apache-2.0"
] | permissive | m4rc1e/fontbakery | 0150a17547b53d6dc79e81407b0374950f90cd16 | da4c4b69abdd41314f9bdb58d9e47722e0680816 | refs/heads/master | 2023-08-02T14:18:00.077821 | 2018-10-17T01:47:51 | 2018-10-17T03:53:06 | 67,598,331 | 0 | 0 | Apache-2.0 | 2018-10-18T09:34:10 | 2016-09-07T10:52:14 | C | UTF-8 | Python | false | false | 3,976 | py | import os
from fontbakery.reporters.serialize import SerializeReporter
from fontbakery.checkrunner import Status
LOGLEVELS=["ERROR","FAIL","WARN","SKIP","INFO","PASS"]
class GHMarkdownReporter(SerializeReporter):
def __init__(self, loglevels, **kwd):
super(GHMarkdownReporter, self).__init__(**kwd)
self.loglevels = loglevels
def emoticon(self, name):
return {
'ERROR': ':broken_heart:',
'FAIL': ':fire:',
'WARN': ':warning:',
'INFO': ':information_source:',
'SKIP': ':zzz:',
'PASS': ':bread:',
}[name]
def html5_collapsible(self, summary, details):
return ("<details>\n"
"<summary>{}</summary>\n"
"{}\n"
"</details>\n").format(summary, details)
def log_md(self, log):
if not self.omit_loglevel(log["status"]):
return "* {} **{}** {}\n".format(self.emoticon(log["status"]),
log["status"],
log["message"])
else:
return ""
def check_md(self, check):
checkid = check["key"][1].split(":")[1].split(">")[0]
check["logs"].sort(key=lambda c: c["status"])
logs = "".join(map(self.log_md, check["logs"]))
github_search_url = ("[{}](https://github.com/googlefonts/fontbakery/"
"search?q={})").format(checkid, checkid)
return self.html5_collapsible("{} <b>{}:</b> {}".format(self.emoticon(check["result"]),
check["result"],
check["description"]),
f"\n* {github_search_url}\n{logs}")
def omit_loglevel(self, msg):
return self.loglevels and (self.loglevels[0] > Status(msg))
def get_markdown(self):
checks = {}
family_checks = []
data = self.getdoc()
num_checks = 0
for section in data["sections"]:
for cluster in section["checks"]:
if not isinstance(cluster, list):
cluster = [cluster]
num_checks += len(cluster)
for check in cluster:
if self.omit_loglevel(check["result"]):
continue
if "filename" not in check.keys():
# That's a family check!
family_checks.append(check)
else:
key = os.path.basename(check["filename"])
if key not in checks:
checks[key] = []
checks[key].append(check)
md = "## Fontbakery report\n\n"
if family_checks:
family_checks.sort(key=lambda c: c["result"])
md += self.html5_collapsible("<b>[{}] Family checks</b>".format(len(family_checks)),
"".join(map(self.check_md, family_checks)) + "<br>")
for filename in checks.keys():
checks[filename].sort(key=lambda c: LOGLEVELS.index(c["result"]))
md += self.html5_collapsible("<b>[{}] {}</b>".format(len(checks[filename]),
filename),
"".join(map(self.check_md, checks[filename])) + "<br>")
if num_checks != 0:
summary_table = "### Summary\n\n" + \
("| {} " + " | {} ".join(LOGLEVELS) + " |\n").format(*[self.emoticon(k) for k in LOGLEVELS]) + \
("|:-----:|:----:|:----:|:----:|:----:|:----:|\n"
"| {} | {} | {} | {} | {} | {} |\n"
"").format(*[data["result"][k] for k in LOGLEVELS]) +\
("| {:.0f}% | {:.0f}% | {:.0f}% | {:.0f}% | {:.0f}% | {:.0f}% |\n"
"").format(*[100*data["result"][k]/num_checks for k in LOGLEVELS])
md += "\n" + summary_table
omitted = [l for l in LOGLEVELS if self.omit_loglevel(l)]
if omitted:
md += "\n" + \
"**Note:** The following loglevels were omitted in this report:\n" + \
"".join(map("* **{}**\n".format, omitted))
return md
| [
"[email protected]"
] | |
0d9c8f3dbbc299c369c4ac837ee49b743180106e | 084db5e25626908a5352339900f12f0000a25a4a | /crediteuropebank/items.py | 9770e32bfb15c3f15bbc7ea5982eda1f5486b696 | [] | no_license | hristo-grudev/crediteuropebank | f60a4c444b9aca06b2e44b699c2ce84703a3382d | 82646cef961dfb318f33ef6a9dd44801a945494a | refs/heads/main | 2023-03-10T08:29:04.156974 | 2021-02-25T09:30:57 | 2021-02-25T09:30:57 | 342,191,469 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | import scrapy
class CrediteuropebankItem(scrapy.Item):
title = scrapy.Field()
description = scrapy.Field()
date = scrapy.Field()
| [
"[email protected]"
] | |
1b8fb8d7b10372b608afaa5f628de8f096425737 | f9c2f77fea6ffdf820867f02805c7a037627f235 | /PythonBasics/03_Volleyball.py | 0f1cb9aab9fd10836c1d9eb2eb0e9fc07e0f77e6 | [] | no_license | Nikolov-A/SoftUni | 6f253694757f195a5c0df8f24b12dbb4ad4d76c6 | 351b0b970da84e5d930a235fce76853c4dcaa365 | refs/heads/master | 2022-01-12T13:57:11.842394 | 2019-07-07T10:53:48 | 2019-07-07T10:53:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 453 | py | from math import floor
year = input()
holiday = int(input())
weekend = int(input())
games_in_sofia = (48 - weekend) * (3 / 4)
games_in_home = weekend
games_in_holiday_sofia = holiday * (2 / 3)
total_games = games_in_sofia + games_in_home + games_in_holiday_sofia
if year == "leap":
additional_games = 0.15 * total_games
total_games = additional_games + total_games
print(f"{floor(total_games)}")
else:
print(f"{floor(total_games)}")
| [
"[email protected]"
] | |
4cdaf7a82e2c58ba6dd327460842b08cd2a84836 | fb63b9a6f0fb2a61718133b6c73cf88d6d86b473 | /tests/unit/test_conditions.py | 6e54ec94e09d80b3d8b1c82bb90e99a82b98fea8 | [
"MIT"
] | permissive | Sazpaimon/bloop | e3f15d55253b077e6bb4764e3a3cf614726f33e9 | e5eee6a1c5c46ecbb9a6a3517cca345d756ecc53 | refs/heads/master | 2021-07-12T12:16:59.748176 | 2017-09-23T01:33:21 | 2017-09-23T01:33:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 51,169 | py | import logging
import operator
import pytest
from bloop.conditions import (
AndCondition,
BaseCondition,
BeginsWithCondition,
BetweenCondition,
ComparisonCondition,
ComparisonMixin,
Condition,
ConditionRenderer,
ContainsCondition,
InCondition,
InvalidCondition,
NotCondition,
OrCondition,
Proxy,
Reference,
ReferenceTracker,
get_marked,
get_snapshot,
iter_columns,
iter_conditions,
printable_column_name,
render,
)
from bloop.models import BaseModel, Column
from bloop.signals import object_deleted, object_loaded, object_saved
from bloop.types import Binary, Boolean, Integer, List, Map, Set, String
from ..helpers.models import Document, User
class MockColumn(Column):
"""model, model_name, dynamo_name, __repr__"""
def __init__(self, name):
super().__init__(String(), name="d_" + name)
self.model_name = name
# Mock model so this can render as M.name
self.model = type("M", tuple(), {})
c = MockColumn("c")
d = MockColumn("d")
def condition_for(operation, column=None):
return conditions_for(operation, column=column)[0]
def conditions_for(*operations, column=None):
column = column or MockColumn("c")
value = 0
values = [1, 2]
conditions = []
if None in operations:
conditions.append(Condition())
if "and" in operations:
left = ComparisonCondition("==", column, value)
right = ComparisonCondition("!=", column, value)
conditions.append(AndCondition(left, right))
if "or" in operations:
left = ComparisonCondition("==", column, value)
right = ComparisonCondition("!=", column, value)
conditions.append(OrCondition(left, right))
if "not" in operations:
inner = ComparisonCondition("==", column, value)
conditions.append(NotCondition(inner))
if "begins_with" in operations:
conditions.append(BeginsWithCondition(column, value))
if "between" in operations:
conditions.append(BetweenCondition(column, *values))
if "contains" in operations:
conditions.append(ContainsCondition(column, value))
if "in" in operations:
conditions.append(InCondition(column, values))
for operation in ("<", "<=", ">", ">=", "!=", "=="):
if operation in operations:
conditions.append(ComparisonCondition(operation, column, value))
return conditions
def non_meta_conditions(column=None):
return conditions_for(
"begins_with", "between", "contains", "in",
">", "<", ">=", "<=", "==", "!=",
column=column
)
def meta_conditions(column=None):
return conditions_for("and", "or", "not", column=column)
def empty_conditions():
return [Condition(), AndCondition(), OrCondition(), NotCondition(Condition())]
@pytest.fixture
def reference_tracker(engine):
return ReferenceTracker(engine)
@pytest.fixture
def renderer(engine):
return ConditionRenderer(engine)
# TRACKING SIGNALS ================================================================================== TRACKING SIGNALS
# Columns are sorted by model name
empty_user_condition = (
User.age.is_(None) &
User.email.is_(None) &
User.id.is_(None) &
User.joined.is_(None) &
User.name.is_(None)
)
def test_on_deleted(engine):
"""When an object is deleted, the snapshot expects all columns to be empty"""
user = User(age=3, name="foo")
object_deleted.send(engine, engine=engine, obj=user)
assert get_snapshot(user) == empty_user_condition
# It doesn't matter if the object had non-empty values saved from a previous sync
object_saved.send(engine, engine=engine, obj=user)
assert get_snapshot(user) == (
User.age.is_({"N": "3"}) &
User.name.is_({"S": "foo"})
)
# The deleted signal still clears everything
object_deleted.send(engine, engine=engine, obj=user)
assert get_snapshot(user) == empty_user_condition
# But the current values aren't replaced
assert user.age == 3
assert user.name == "foo"
def test_on_loaded_partial(engine):
"""When an object is loaded, the state after loading is snapshotted for future atomic calls"""
# Creating an instance doesn't snapshot anything
user = User(age=3, name="foo")
assert get_snapshot(user) == empty_user_condition
# Pretend the user was just loaded. Because only
# age and name are marked, they will be the only
# columns included in the snapshot. A normal load
# would set the other values to None, and the
# snapshot would expect those.
object_loaded.send(engine, engine=engine, obj=user)
# Values are stored dumped. Since the dumped flag isn't checked as
# part of equality testing, we can simply construct the dumped
# representations to compare.
assert get_snapshot(user) == (
User.age.is_({"N": "3"}) &
User.name.is_({"S": "foo"})
)
def test_on_loaded_full(engine):
"""Same as the partial test, but with explicit Nones to simulate a real engine.load"""
user = User(age=3, email=None, id=None, joined=None, name="foo")
object_loaded.send(engine, engine=engine, obj=user)
assert get_snapshot(user) == (
User.age.is_({"N": "3"}) &
User.email.is_(None) &
User.id.is_(None) &
User.joined.is_(None) &
User.name.is_({"S": "foo"})
)
def test_on_modified():
"""When an object's values are set or deleted, those columns are marked for tracking"""
# Creating an instance doesn't mark anything
user = User()
assert get_marked(user) == set()
user.id = "foo"
assert get_marked(user) == {User.id}
# Deleting the value does not clear it from the set of marked columns
del user.id
assert get_marked(user) == {User.id}
# Even when the delete fails, the column is marked.
# We're tracking intention, not state change.
with pytest.raises(AttributeError):
del user.age
assert get_marked(user) == {User.id, User.age}
def test_on_saved(engine):
"""Saving is equivalent to loading w.r.t. tracking.
The state after saving is snapshotted for future atomic operations."""
user = User(name="foo", age=3)
object_saved.send(engine, engine=engine, obj=user)
# Since "name" and "age" were the only marked columns saved to DynamoDB,
# they are the only columns that must match for an atomic save. The
# state of the other columns wasn't specified, so it's not safe to
# assume the intended value (missing vs empty)
assert get_snapshot(user) == (
User.age.is_({"N": "3"}) &
User.name.is_({"S": "foo"})
)
# END TRACKING SIGNALS ========================================================================== END TRACKING SIGNALS
# REFERENCE TRACKER ================================================================================ REFERENCE TRACKER
def test_ref_index_always_increments(reference_tracker):
"""Don't risk forgetting to increment it - ALWAYS increment after getting."""
assert reference_tracker.next_index == 0
assert reference_tracker.next_index == 1
def test_ref_same_name(reference_tracker):
"""Don't create two references for the same name string"""
name = "foo"
expected_ref = "#n0"
ref = reference_tracker._name_ref(name)
same_ref = reference_tracker._name_ref(name)
assert ref == same_ref == expected_ref
assert reference_tracker.attr_names[ref] == name
assert reference_tracker.name_attr_index[name] == ref
assert reference_tracker.counts[ref] == 2
def test_ref_path_empty(reference_tracker):
"""Path reference without a path (column only) is just a name ref"""
column = MockColumn("column")
expected_name = "d_column"
expected_ref = "#n0"
ref = reference_tracker._path_ref(column)
assert ref == expected_ref
assert reference_tracker.attr_names[ref] == expected_name
assert reference_tracker.name_attr_index[expected_name] == ref
assert reference_tracker.counts[ref] == 1
def test_ref_path_complex(reference_tracker):
"""Path reference with integer and string indexes. Strings include duplicates and literal periods."""
column = MockColumn("column")["foo"][3][4]["repeat"]["has.period"]["repeat"]
expected_ref = "#n0.#n1[3][4].#n2.#n3.#n2"
expected_names = {
"#n0": "d_column",
"#n1": "foo",
"#n2": "repeat",
"#n3": "has.period"
}
ref = reference_tracker._path_ref(column)
assert ref == expected_ref
assert reference_tracker.attr_names == expected_names
def test_ref_path_reuse(reference_tracker):
"""paths are re-used, even across columns"""
first = MockColumn("first")[3]["foo"]
second = MockColumn("second")[3]["foo"]
expected_first = "#n0[3].#n1"
expected_second = "#n2[3].#n1"
expected_names = {
"#n0": "d_first",
"#n1": "foo",
"#n2": "d_second"
}
first_ref = reference_tracker._path_ref(first)
second_ref = reference_tracker._path_ref(second)
assert first_ref == expected_first
assert second_ref == expected_second
assert reference_tracker.attr_names == expected_names
def test_ref_path_periods(reference_tracker):
"""Path segments with periods aren't de-duped with each individual segment"""
column = MockColumn("column")["foo"]["foo.bar"]["bar"]
expected_ref = "#n0.#n1.#n2.#n3"
expected_names = {
"#n0": "d_column",
"#n1": "foo",
"#n2": "foo.bar",
"#n3": "bar",
}
ref = reference_tracker._path_ref(column)
assert ref == expected_ref
assert reference_tracker.attr_names == expected_names
def test_ref_value(reference_tracker):
"""no path, value not dumped"""
column = User.age
value = 3
expected_ref = ":v0"
expected_value = {"N": "3"}
expected_values = {":v0": expected_value}
ref, value = reference_tracker._value_ref(column, value)
assert ref == expected_ref
assert value == expected_value
assert reference_tracker.attr_values == expected_values
def test_ref_value_path(reference_tracker):
"""has path, value not dumped"""
column = Document.data["Description"]["Body"]
value = "value"
expected_ref = ":v0"
expected_value = {"S": value}
expected_values = {":v0": expected_value}
ref, value = reference_tracker._value_ref(column, value)
assert ref == expected_ref
assert value == expected_value
assert reference_tracker.attr_values == expected_values
def test_ref_value_dumped(reference_tracker):
"""no path, value already dumped"""
column = Document.id
# This shouldn't be dumped, so we use an impossible value for the type
dumped_value = object()
expected_ref = ":v0"
expected_values = {":v0": dumped_value}
ref, value = reference_tracker._value_ref(column, dumped_value, dumped=True)
assert ref == expected_ref
assert value == dumped_value
assert reference_tracker.attr_values == expected_values
def test_ref_value_dumped_path(reference_tracker):
"""has path, value already dumped"""
column = Document.data["Description"]
# Description's typedef is Map, wich can't dump an object
# This shouldn't be dumped, so we use an impossible value for the type
dumped_value = object()
expected_ref = ":v0"
expected_values = {":v0": dumped_value}
ref, value = reference_tracker._value_ref(column, dumped_value, dumped=True)
assert ref == expected_ref
assert value == dumped_value
assert reference_tracker.attr_values == expected_values
def test_ref_any_column_name(reference_tracker):
"""Render a reference to the column name (and path) when there's no value"""
column = Document.data["Description"]["Body"]
expected_ref = Reference(name="#n0.#n1.#n2", type="name", value=None)
expected_names = {
"#n0": "data",
"#n1": "Description",
"#n2": "Body"
}
ref = reference_tracker.any_ref(column=column)
assert ref == expected_ref
assert reference_tracker.attr_names == expected_names
def test_ref_any_value_is_column(reference_tracker):
"""Render a reference to a value that is also a column"""
column = Document.id["Description"]["Rating"]
# value has its own path
value = Document.data["Description"]["Body"]
expected_ref = Reference(name="#n0.#n1.#n2", type="name", value=None)
expected_names = {
"#n0": "data",
"#n1": "Description",
"#n2": "Body"
}
ref = reference_tracker.any_ref(column=column, value=value)
assert ref == expected_ref
assert reference_tracker.attr_names == expected_names
def test_ref_any_value_not_column(reference_tracker):
"""Render a reference to a regular value"""
column = Document.id
value = 3
expected_value = {"N": "3"}
expected_ref = Reference(name=":v0", type="value", value=expected_value)
expected_values = {":v0": expected_value}
ref = reference_tracker.any_ref(column=column, value=value)
assert ref == expected_ref
assert reference_tracker.attr_values == expected_values
def test_ref_pop_none(reference_tracker):
"""pop_refs without args doesn't pop any refs"""
# Add a name and value ref so we can make sure nothing is cleared
name = reference_tracker.any_ref(column=Document.id).name
value = reference_tracker.any_ref(column=Document.id, value=3).name
reference_tracker.pop_refs()
assert name in reference_tracker.attr_names
assert value in reference_tracker.attr_values
def test_ref_pop_unknown(reference_tracker):
"""Popping an unknown ref doesn't do anything"""
# Add a name and value ref so we can make sure nothing is cleared
name = reference_tracker.any_ref(column=Document.id).name
value = reference_tracker.any_ref(column=Document.id, value=3).name
unknown_name_ref = Reference(name="foo", type="value", value=None)
unknown_value_ref = Reference(name="bar", type="name", value=None)
reference_tracker.pop_refs(unknown_name_ref, unknown_value_ref)
assert name in reference_tracker.attr_names
assert value in reference_tracker.attr_values
def test_ref_pop_name(reference_tracker):
"""References aren't removed until they're popped as many times as they're used"""
name_ref = reference_tracker.any_ref(column=Document.id)
same_name_ref = reference_tracker.any_ref(column=Document.id)
assert reference_tracker.counts[name_ref.name] == 2
# Still in attr_names, name_attr_index
reference_tracker.pop_refs(same_name_ref)
assert reference_tracker.counts[name_ref.name] == 1
assert reference_tracker.attr_names[name_ref.name] == "id"
assert reference_tracker.name_attr_index["id"] == name_ref.name
# Not in attr_names, name_attr_index
reference_tracker.pop_refs(same_name_ref)
assert reference_tracker.counts[name_ref.name] == 0
assert name_ref.name not in reference_tracker.attr_names
assert "id" not in reference_tracker.name_attr_index
# Count doesn't go below 0
reference_tracker.pop_refs(name_ref)
assert reference_tracker.counts[name_ref.name] == 0
def test_ref_pop_value(reference_tracker):
"""Same pop test, for values"""
value_ref = reference_tracker.any_ref(column=Document.id, value=3)
# Have to fake this out a bit, because there's no de-duping for values
# This test exists to guard incorrect pop behavior, in case values are
# ever de-duped.
reference_tracker.counts[value_ref.name] += 1
assert reference_tracker.counts[value_ref.name] == 2
# Still in attr_names, name_attr_index
reference_tracker.pop_refs(value_ref)
assert reference_tracker.counts[value_ref.name] == 1
assert reference_tracker.attr_values[value_ref.name] == {"N": "3"}
# Not in attr_names, name_attr_index
reference_tracker.pop_refs(value_ref)
assert reference_tracker.counts[value_ref.name] == 0
assert value_ref.name not in reference_tracker.attr_values
# Count doesn't go below 0
reference_tracker.pop_refs(value_ref)
assert reference_tracker.counts[value_ref.name] == 0
# END REFERENCE TRACKER ======================================================================== END REFERENCE TRACKER
# RENDERER ================================================================================================== RENDERER
def test_render_missing_object(engine):
"""Can't render atomic or update without an object"""
with pytest.raises(InvalidCondition):
render(engine, update=True)
with pytest.raises(InvalidCondition):
render(engine, atomic=True)
@pytest.mark.parametrize("kwarg_name, expression_key", [
("filter", "FilterExpression"),
("key", "KeyConditionExpression"),
("condition", "ConditionExpression"),
])
def test_render_condition_only(kwarg_name, expression_key, engine, caplog):
"""Only renders the given condition"""
condition = (User.email == "@") & (User.name.is_(None))
rendered = render(engine, **{kwarg_name: condition})
assert rendered == {
"ExpressionAttributeNames": {"#n0": "email", "#n2": "name"},
"ExpressionAttributeValues": {":v1": {"S": "@"}},
expression_key: "((#n0 = :v1) AND (attribute_not_exists(#n2)))"
}
assert caplog.record_tuples == [
("bloop.conditions", logging.DEBUG, "popping last usage of Reference(name=':v3', type='value', value=None)"),
("bloop.conditions", logging.DEBUG, "rendering \"==\" as attribute_not_exists"),
]
def test_render_projection_only(engine):
columns = [User.id, User.email, User.id, User.age]
rendered = render(engine, projection=columns)
assert rendered == {
"ExpressionAttributeNames": {"#n0": "id", "#n1": "email", "#n2": "age"},
"ProjectionExpression": "#n0, #n1, #n2",
}
def test_render_atomic_only_new(engine):
"""Atomic condition on a new object only -> all attribute_not_exists"""
rendered = render(engine, obj=User(), atomic=True)
assert rendered == {
"ExpressionAttributeNames": {"#n0": "age", "#n2": "email", "#n4": "id", "#n6": "j", "#n8": "name"},
"ConditionExpression": (
"((attribute_not_exists(#n0)) AND (attribute_not_exists(#n2)) AND"
" (attribute_not_exists(#n4)) AND (attribute_not_exists(#n6)) AND"
" (attribute_not_exists(#n8)))"
)
}
def test_render_atomic_only_partial(engine):
"""Atomic condition on an object already partially synced"""
user = User(id="user_id", age=3, email=None)
# Sync gives us an atomic condition
object_saved.send(engine, engine=engine, obj=user)
# Unlike a new save, this one has no expectation about the values of "joined" or "name"
rendered = render(engine, obj=user, atomic=True)
assert rendered == {
"ExpressionAttributeNames": {"#n0": "age", "#n2": "email", "#n4": "id"},
"ExpressionAttributeValues": {":v1": {"N": "3"}, ":v5": {"S": "user_id"}},
"ConditionExpression": "((#n0 = :v1) AND (attribute_not_exists(#n2)) AND (#n4 = :v5))"
}
def test_render_atomic_and_condition(engine):
"""Atomic condition and condition are ANDed together (condition first)"""
user = User(id="user_id", age=3, email=None)
# Sync gives us an atomic condition
object_saved.send(engine, engine=engine, obj=user)
# Value ref isn't re-used
condition = User.email.contains("@")
rendered = render(engine, obj=user, condition=condition, atomic=True)
assert rendered == {
"ExpressionAttributeNames": {"#n0": "email", "#n2": "age", "#n5": "id"},
"ExpressionAttributeValues": {":v1": {"S": "@"}, ":v3": {"N": "3"}, ":v6": {"S": "user_id"}},
"ConditionExpression": "((contains(#n0, :v1)) AND (#n2 = :v3) AND (attribute_not_exists(#n0)) AND (#n5 = :v6))"
}
def test_render_update_only(engine):
user = User(email="@", age=3)
rendered = render(engine, obj=user, update=True)
assert rendered == {
"ExpressionAttributeNames": {"#n0": "age", "#n2": "email"},
"ExpressionAttributeValues": {":v1": {"N": "3"}, ":v3": {"S": "@"}},
"UpdateExpression": "SET #n0=:v1, #n2=:v3",
}
def test_render_complex(engine):
"""Render a filter condition, key condition, projection, condition, atomic and update"""
user = User(id="uid", age=3, email=None)
# Sync gives us an atomic condition on id, age, email (sorted)
object_saved.send(engine, engine=engine, obj=user)
filter_condition = User.email.contains("@")
key_condition = User.age == 4
# projection isn't sorted by name
projection = [User.name, User.id]
condition = User.age <= User.id
# SET name, REMOVE age
# (in addition to REMOVE email, from email=None)
user.name = "bill"
del user.age
rendered = render(engine, obj=user,
filter=filter_condition, projection=projection, key=key_condition,
atomic=True, condition=condition, update=True)
# Render order: filter, projection, key, (condition & atomic), update
assert rendered == {
"ExpressionAttributeNames": {"#n0": "email", "#n2": "name", "#n3": "id", "#n4": "age"},
"ExpressionAttributeValues": {
":v1": {"S": "@"},
":v5": {"N": "4"},
":v6": {"N": "3"},
":v8": {"S": "uid"},
":v11": {"S": "bill"}
},
"FilterExpression": "(contains(#n0, :v1))",
"ProjectionExpression": "#n2, #n3",
"KeyConditionExpression": "(#n4 = :v5)",
"ConditionExpression": "((#n4 <= #n3) AND (#n4 = :v6) AND (attribute_not_exists(#n0)) AND (#n3 = :v8))",
"UpdateExpression": "SET #n2=:v11 REMOVE #n4, #n0",
}
@pytest.mark.parametrize("func_name, expression_key", [
("render_condition_expression", "ConditionExpression"),
("render_filter_expression", "FilterExpression"),
("render_key_expression", "KeyConditionExpression"),
])
def test_render_simple_conditions(func_name, expression_key, renderer):
"""condition, filter, key expression rendering simply defers to the condition"""
condition = User.name.between("foo", User.age)
render = getattr(renderer, func_name)
render(condition)
assert renderer.rendered == {
"ExpressionAttributeNames": {"#n0": "name", "#n2": "age"},
"ExpressionAttributeValues": {":v1": {"S": "foo"}},
expression_key: "(#n0 BETWEEN :v1 AND #n2)"
}
def test_render_projection_dedupes_names(renderer):
"""Duplicate columns are filtered when rendering the projection expression"""
columns = [User.id, User.email, User.id, User.age]
renderer.render_projection_expression(columns)
assert renderer.rendered == {
"ExpressionAttributeNames": {"#n0": "id", "#n1": "email", "#n2": "age"},
"ProjectionExpression": "#n0, #n1, #n2",
}
def test_render_update_no_changes(renderer):
"""When there aren't any marked *non-key* columns on an object, there's no update expression"""
user = User(id="user_id")
renderer.render_update_expression(user)
assert not renderer.rendered
def test_render_update_set_only(renderer):
"""Only updates are where values were set (none of the values were None or rendered as None)"""
user = User(email="@", age=3)
renderer.render_update_expression(user)
assert renderer.rendered == {
"ExpressionAttributeNames": {"#n0": "age", "#n2": "email"},
"ExpressionAttributeValues": {":v1": {"N": "3"}, ":v3": {"S": "@"}},
"UpdateExpression": "SET #n0=:v1, #n2=:v3",
}
def test_render_update_remove_only(renderer):
"""Only updates were del'd values, values set to None, or values that render as None"""
document = Document()
# Renders as None
document.data = dict()
# Deleted, even though it wasn't set
with pytest.raises(AttributeError):
del document.numbers
# Explicit None
document.value = None
renderer.render_update_expression(document)
assert renderer.rendered == {
"ExpressionAttributeNames": {"#n0": "data", "#n2": "numbers", "#n4": "value"},
"UpdateExpression": "REMOVE #n0, #n2, #n4",
}
def test_render_update_set_and_remove(renderer):
"""Some values set, some values removed"""
document = Document()
# Renders as None -> removed
document.data = dict()
# Deleted, even though it wasn't set
with pytest.raises(AttributeError):
del document.numbers
# Both set
document.value = 3
document.another_value = 4
renderer.render_update_expression(document)
# Ordering is alphabetical by model name: another_value, data, numbers, value
# REMOVE statements will cause a skip in index (because value renders empty and pops the ref)
assert renderer.rendered == {
"ExpressionAttributeNames": {"#n0": "another_value", "#n2": "data", "#n4": "numbers", "#n6": "value"},
"ExpressionAttributeValues": {":v1": {"N": "4"}, ":v7": {"N": "3"}},
"UpdateExpression": "SET #n0=:v1, #n6=:v7 REMOVE #n2, #n4",
}
# END RENDERER ========================================================================================== END RENDERER
# CONDITIONS ============================================================================================== CONDITIONS
def test_abstract_base(renderer):
"""BaseCondition requires 4 methods for subclasses"""
condition = BaseCondition(None)
with pytest.raises(NotImplementedError):
len(condition)
with pytest.raises(NotImplementedError):
repr(condition)
with pytest.raises(NotImplementedError):
condition.render(renderer)
def test_empty_condition():
assert Condition().operation is None
@pytest.mark.parametrize("condition", empty_conditions())
def test_len_empty(condition):
assert len(condition) == 0
def test_iter_empty():
condition = Condition()
assert set(iter_conditions(condition)) == {condition}
assert next(iter_columns(condition), None) is None
def test_render_empty(renderer):
condition = Condition()
condition.render(renderer)
assert not renderer.rendered
@pytest.mark.parametrize("condition", non_meta_conditions())
def test_len_non_empty(condition):
assert len(condition) == 1
@pytest.mark.parametrize("condition", non_meta_conditions())
def test_len_non_meta(condition):
"""Non-meta conditions *must* have exactly 1 condition"""
assert len(condition) == 1
@pytest.mark.parametrize("condition", meta_conditions())
def test_len_meta(condition):
if condition.operation == "not":
assert len(condition) == 1
else:
assert len(condition) == 2
def test_len_cyclic():
"""Cyclic conditions count the cyclic reference"""
# Here's the structure to create:
# root
# / \
# a b
# / \
# c root
root = AndCondition()
a = ComparisonCondition("<", MockColumn("a"), 3)
b = OrCondition()
c = ComparisonCondition(">", MockColumn("c"), 3)
root.values.extend([a, b])
b.values.extend([c, root])
assert len(root) == 4
def test_len_unpack_not():
"""Even though not(not(x)) -> x shouldn't exist, its length should be the inner length"""
lt, gt = conditions_for("<", ">")
outer = NotCondition(lt)
condition = NotCondition(outer)
assert len(condition) == len(outer) == 1
# Swap inner for an AND with length 2
and_ = AndCondition(lt, gt)
outer.values[0] = and_
assert len(condition) == len(outer) == len(and_) == 2
@pytest.mark.parametrize("condition", conditions_for(
"begins_with", "between", "contains", "in",
">", "<", ">=", "<=", "==", "!=",
"and", "or"))
def test_invert_wraps(condition):
"""everything but not and () are wrapped in a not"""
wrapped = ~condition
assert wrapped.operation == "not"
assert wrapped.values[0] is condition
def test_invert_empty():
"""~() -> ()"""
empty = Condition()
assert (~empty) is empty
def test_invert_simplifies():
"""~~x -> x"""
condition = ComparisonCondition(">", MockColumn("c"), 3)
assert (~~condition) is condition
def test_invert_empty_not():
"""~not() -> ()"""
condition = condition_for("not")
assert (~condition).operation == condition.values[0].operation
# CONDITIONS AND/IAND ============================================================================ CONDITIONS AND/IAND
@pytest.mark.parametrize("empty", empty_conditions())
def test_and_empty_conditions(empty):
"""When conditions are falsey (literal empty or meta with no inner value), simplify instead of nesting:
()_1 & ()_2 -> ()_1
x & () -> x
() & x -> x
"""
also_empty = Condition()
not_empty = condition_for(">")
assert (empty & not_empty) is not_empty
assert (not_empty & empty) is not_empty
assert (empty & also_empty) is empty
assert (also_empty & empty) is also_empty
def test_and_both_and():
"""(a & b) & (c & d) -> (a & b & c & d)"""
a, b, c, d = [condition_for(">") for _ in range(4)]
left = AndCondition(a, b)
right = AndCondition(c, d)
assert (left & right).operation == "and"
assert (left & right).values == [a, b, c, d]
assert (right & left).values == [c, d, a, b]
@pytest.mark.parametrize("other", non_meta_conditions())
def test_and_simplifies(other):
"""When only one condition is an and, the other is put in a new and, in the correct place
(a & b) & (c > 2) -> (a & b & (c > 2))
(a > 2) & (b & c) -> ((a > 2) & b & c)
"""
a, b, = [condition_for(">"), condition_for("<")]
and_condition = AndCondition(a, b)
assert (and_condition & other).operation == "and"
assert (and_condition & other).values == [a, b, other]
assert (other & and_condition).values == [other, a, b]
def test_and_basic():
a = condition_for(">")
b = condition_for("<")
assert (a & b).operation == "and"
assert (a & b).values == [a, b]
assert (b & a).values == [b, a]
@pytest.mark.parametrize("empty", empty_conditions())
def test_iand_empty_conditions(empty):
"""Similar to and, empty values don't change the non-empty values. LHS always wins if both empty."""
also_empty = Condition()
not_empty = condition_for(">")
# None of the following modify the object
original_empty = empty
empty &= also_empty
assert empty is original_empty
original_also_empty = also_empty
also_empty &= empty
assert also_empty is original_also_empty
original_not_empty = not_empty
not_empty &= empty
assert not_empty is original_not_empty
# The only modifying __iand__
empty &= not_empty
assert empty is not_empty
def test_iand_both_and():
"""other's conditions are appended to self's conditions"""
a, b, c, d = [condition_for(">") for _ in range(4)]
left = AndCondition(a, b)
right = AndCondition(c, d)
original_left = left
left &= right
assert left is original_left
assert left.values == [a, b, c, d]
assert right.values == [c, d]
@pytest.mark.parametrize("other", non_meta_conditions())
def test_iand_simplifies(other):
"""Similar to and, other value is pushed into the and (on LHS) or front of a new and (on RHS)"""
a, b, = [condition_for(">"), condition_for("<")]
and_condition = AndCondition(a, b)
original_other = other
other &= and_condition
assert other is not original_other
assert other.values == [original_other, a, b]
original_and_condition = and_condition
and_condition &= original_other
assert and_condition is original_and_condition
assert and_condition.values == [a, b, original_other]
def test_iand_basic():
a = condition_for(">")
b = condition_for("<")
original_a = a
original_b = b
a &= original_b
assert a is not original_a
assert a.operation == "and"
assert a.values == [original_a, original_b]
b &= original_a
assert b is not original_b
assert b.operation == "and"
assert b.values == [original_b, original_a]
# CONDITIONS OR/IOR ================================================================================ CONDITIONS OR/IOR
@pytest.mark.parametrize("empty", empty_conditions())
def test_or_empty_conditions(empty):
"""When conditions are falsey (literal empty or meta with no inner value), simplify instead of nesting:
()_1 | ()_2 -> ()_1
x | () -> x
() | x -> x
"""
also_empty = Condition()
not_empty = condition_for(">")
assert (empty | not_empty) is not_empty
assert (not_empty | empty) is not_empty
assert (empty | also_empty) is empty
assert (also_empty | empty) is also_empty
def test_or_both_or():
"""(a | b) | (c | d) -> (a | b | c | d)"""
a, b, c, d = [condition_for(">") for _ in range(4)]
left = OrCondition(a, b)
right = OrCondition(c, d)
assert (left | right).operation == "or"
assert (left | right).values == [a, b, c, d]
assert (right | left).values == [c, d, a, b]
@pytest.mark.parametrize("other", non_meta_conditions())
def test_or_simplifies(other):
"""When only one condition is an or, the other is put in a new or, in the correct place
(a | b) | (c > 2) -> (a | b | (c > 2))
(a > 2) | (b | c) -> ((a > 2) | b | c)
"""
a, b, = [condition_for(">"), condition_for("<")]
or_condition = OrCondition(a, b)
assert (or_condition | other).operation == "or"
assert (or_condition | other).values == [a, b, other]
assert (other | or_condition).values == [other, a, b]
def test_or_basic():
a = condition_for(">")
b = condition_for("<")
assert (a | b).operation == "or"
assert (a | b).values == [a, b]
assert (b | a).values == [b, a]
@pytest.mark.parametrize("empty", empty_conditions())
def test_ior_empty_conditions(empty):
"""Similar to or, empty values don't change the non-empty values. LHS always wins if both empty."""
also_empty = Condition()
not_empty = condition_for(">")
# None of the following modify the object
original_empty = empty
empty |= also_empty
assert empty is original_empty
original_also_empty = also_empty
also_empty |= empty
assert also_empty is original_also_empty
original_not_empty = not_empty
not_empty |= empty
assert not_empty is original_not_empty
# The only modifying __ior__
empty |= not_empty
assert empty is not_empty
def test_ior_both_or():
"""other's conditions are appended to self's conditions"""
a, b, c, d = [condition_for(">") for _ in range(4)]
left = OrCondition(a, b)
right = OrCondition(c, d)
original_left = left
left |= right
assert left is original_left
assert left.values == [a, b, c, d]
assert right.values == [c, d]
@pytest.mark.parametrize("other", non_meta_conditions())
def test_ior_simplifies(other):
"""Similar to or, other value is pushed into the or (on LHS) or front of a new or (on RHS)"""
a, b, = [condition_for(">"), condition_for("<")]
or_condition = OrCondition(a, b)
original_other = other
other |= or_condition
assert other is not original_other
assert other.values == [original_other, a, b]
original_or_condition = or_condition
or_condition |= original_other
assert or_condition is original_or_condition
assert or_condition.values == [a, b, original_other]
def test_ior_basic():
a = condition_for(">")
b = condition_for("<")
original_a = a
original_b = b
a |= original_b
assert a is not original_a
assert a.operation == "or"
assert a.values == [original_a, original_b]
b |= original_a
assert b is not original_b
assert b.operation == "or"
assert b.values == [original_b, original_a]
# CONDITIONS REPR ==================================================================================== CONDITIONS REPR
@pytest.mark.parametrize("condition, expected", [
# and
(AndCondition(), "( & )"),
(AndCondition("foo"), "('foo' &)"),
(AndCondition("a", "b", "c"), "('a' & 'b' & 'c')"),
# or
(OrCondition(), "( | )"),
(OrCondition("foo"), "('foo' |)"),
(OrCondition("a", "b", "c"), "('a' | 'b' | 'c')"),
# not
(NotCondition("a"), "(~'a')"),
# comparisons
(ComparisonCondition("<", column=c, value=3), "(M.c < 3)"),
(ComparisonCondition(">", column=c, value=3), "(M.c > 3)"),
(ComparisonCondition("<=", column=c, value=3), "(M.c <= 3)"),
(ComparisonCondition(">=", column=c, value=3), "(M.c >= 3)"),
(ComparisonCondition("==", column=c, value=3), "(M.c == 3)"),
(ComparisonCondition("!=", column=c, value=3), "(M.c != 3)"),
# begins_with, contains
(BeginsWithCondition(column=c, value=2), "begins_with(M.c, 2)"),
(ContainsCondition(column=c, value=2), "contains(M.c, 2)"),
# between
(BetweenCondition(column=c, lower=2, upper=3), "(M.c between [2, 3])"),
# in
(InCondition(column=c, values=[]), "(M.c in [])"),
(InCondition(column=c, values=[2, 3]), "(M.c in [2, 3])"),
(InCondition(column=c, values=[MockColumn("d"), 3]), "(M.c in [<Column[M.d]>, 3])"),
# empty
(Condition(), "()")
])
def test_repr(condition, expected):
assert repr(condition) == expected
# CONDITIONS EQUALITY ============================================================================ CONDITIONS EQUALITY
def test_eq_empty():
empty = Condition()
assert empty == empty
also_empty = Condition()
assert empty is not also_empty
assert empty == also_empty
def test_eq_wrong_type():
"""AttributeError returns False"""
assert not (Condition() == object())
@pytest.mark.parametrize("other", [
BaseCondition("op", values=list("xy"), column=c["wrong"]["path"]),
BaseCondition("??", values=list("xy"), column=c["foo"]["bar"]),
BaseCondition("op", values=list("xy"), column=None),
# Need to attach a path to the wrong proxy object
BaseCondition("op", values=list("xy"), column=Proxy(obj=None, path=["foo", "bar"])),
BaseCondition("op", values=list("xyz"), column=c["foo"]["bar"]),
BaseCondition("op", values=list("yx"), column=c["foo"]["bar"]),
])
def test_eq_one_wrong_field(other):
"""All four of operation, value, column, and path must match"""
self = BaseCondition("op", values=list("xy"), column=c["foo"]["bar"])
assert not (self == other)
@pytest.mark.parametrize("other", [
BaseCondition("op", values=[c]),
BaseCondition("op", values=["x"]),
BaseCondition("op", values=[c, c]),
BaseCondition("op", values=["x", "x"]),
BaseCondition("op", values=["x", c]),
BaseCondition("op", values=[d, "x"]),
])
def test_eq_values_mismatch(other):
condition = BaseCondition("op", values=[c, "x"])
assert not (condition == other)
# CONDITIONS RENDER ================================================================================ CONDITIONS RENDER
@pytest.mark.parametrize("condition, as_str, expected_names, expected_values", [
# Comparison - all operations
(User.age == 3, "(#n0 = :v1)", {"#n0": "age"}, {":v1": {"N": "3"}}),
(User.age != 3, "(#n0 <> :v1)", {"#n0": "age"}, {":v1": {"N": "3"}}),
(User.age < 3, "(#n0 < :v1)", {"#n0": "age"}, {":v1": {"N": "3"}}),
(User.age > 3, "(#n0 > :v1)", {"#n0": "age"}, {":v1": {"N": "3"}}),
(User.age <= 3, "(#n0 <= :v1)", {"#n0": "age"}, {":v1": {"N": "3"}}),
(User.age >= 3, "(#n0 >= :v1)", {"#n0": "age"}, {":v1": {"N": "3"}}),
# Comparison - against None -> attribute_* functions
(User.age.is_(None), "(attribute_not_exists(#n0))", {"#n0": "age"}, None),
(User.age.is_not(None), "(attribute_exists(#n0))", {"#n0": "age"}, None),
# Comparison - against things that become None -> attribute_* functions
(Document.data == dict(), "(attribute_not_exists(#n0))", {"#n0": "data"}, None),
(Document.data != dict(), "(attribute_exists(#n0))", {"#n0": "data"}, None),
# Comparison - against another Column
(User.name == User.email, "(#n0 = #n1)", {"#n0": "name", "#n1": "email"}, None),
# BeginsWith - against value, Column
(User.name.begins_with("foo"), "(begins_with(#n0, :v1))", {"#n0": "name"}, {":v1": {"S": "foo"}}),
(User.name.begins_with(User.email), "(begins_with(#n0, #n1))", {"#n0": "name", "#n1": "email"}, None),
# Between - against value, Column
(User.age.between(3, 4), "(#n0 BETWEEN :v1 AND :v2)", {"#n0": "age"}, {":v1": {"N": "3"}, ":v2": {"N": "4"}}),
(User.age.between(3, User.age), "(#n0 BETWEEN :v1 AND #n0)", {"#n0": "age"}, {":v1": {"N": "3"}}),
(User.age.between(User.age, 4), "(#n0 BETWEEN #n0 AND :v1)", {"#n0": "age"}, {":v1": {"N": "4"}}),
# Contains - against value, Column
(User.name.contains("foo"), "(contains(#n0, :v1))", {"#n0": "name"}, {":v1": {"S": "foo"}}),
(User.name.contains(User.email), "(contains(#n0, #n1))", {"#n0": "name", "#n1": "email"}, None),
# In - mixed values, Column
(User.age.in_(3, User.age, 4), "(#n1 IN (:v0, #n1, :v2))", {"#n1": "age"}, {":v0": {"N": "3"}, ":v2": {"N": "4"}})
])
def test_render_valid_condition(condition, as_str, expected_names, expected_values, renderer):
assert condition.render(renderer) == as_str
if expected_names:
assert renderer.rendered["ExpressionAttributeNames"] == expected_names
else:
assert "ExpressionAttributeNames" not in renderer.rendered
if expected_values:
assert renderer.rendered["ExpressionAttributeValues"] == expected_values
else:
assert "ExpressionAttributeValues" not in renderer.rendered
@pytest.mark.parametrize("condition", [
# Value is None
User.age < None,
User.age > None,
User.age <= None,
User.age >= None,
User.email.begins_with(None),
# At least one None
User.age.between(3, None),
User.age.between(None, 4),
User.age.between(None, None),
User.email.contains(None),
# No values
User.age.in_(),
# At least one None
User.age.in_(None, 4),
User.age.in_(3, None),
User.age.in_(None, None),
# Not literal None, but becomes None
Document.nested_numbers.contains([]),
# Empty meta conditions
AndCondition(),
OrCondition()
])
def test_render_invalid_condition(condition, renderer):
"""After a condition fails to render, all of its name and value refs should be popped."""
with pytest.raises(InvalidCondition):
condition.render(renderer)
assert not renderer.rendered
def test_render_nested_meta_condition(renderer):
"""Test meta conditions AND, OR, NOT"""
has_name = User.name.is_not(None)
is_foo = User.name == "foo"
is_3 = User.age != 3
is_email_address = User.email.contains("@")
# There's no ref with '1' because the first equality condition (is_not) renders a value ref, and then pops it.
expected = "(((attribute_exists(#n0)) AND (#n0 = :v2)) OR (NOT (#n3 <> :v4)) OR (contains(#n5, :v6)))"
expected_names = {"#n0": "name", "#n3": "age", "#n5": "email"}
expected_values = {":v2": {"S": "foo"}, ":v4": {"N": "3"}, ":v6": {"S": "@"}}
condition = (has_name & is_foo) | (~is_3) | is_email_address
assert condition.render(renderer) == expected
assert renderer.rendered == {
"ExpressionAttributeNames": expected_names,
"ExpressionAttributeValues": expected_values
}
@pytest.mark.parametrize("condition_cls", [AndCondition, OrCondition])
def test_render_and_or_simplify(condition_cls, renderer):
"""When AND/OR have exactly one condition, they only render that condition (without an AND/OR)"""
inner = User.age < 3
condition = condition_cls(inner)
expected = "(#n0 < :v1)"
assert condition.render(renderer) == expected
assert renderer.rendered == {
"ExpressionAttributeNames": {"#n0": "age"},
"ExpressionAttributeValues": {":v1": {"N": "3"}}
}
# END CONDITIONS ====================================================================================== END CONDITIONS
# COMPARISON MIXIN ================================================================================== COMPARISON MIXIN
def test_mixin_repr():
assert repr(ComparisonMixin()) == "<ComparisonMixin>"
def test_mixin_path():
mixin = ComparisonMixin()
proxy = mixin["some_attribute"][3]
assert isinstance(proxy, Proxy)
assert proxy._obj is mixin
assert proxy._path == ["some_attribute", 3]
@pytest.mark.parametrize("op, expected", [
(operator.eq, "=="),
(operator.ne, "!="),
(operator.lt, "<"),
(operator.gt, ">"),
(operator.le, "<="),
(operator.ge, ">="),
])
def test_mixin_magic_comparisons(op, expected):
"""==, !=, <, >, <=, >= create condition objects with the corresponding operation"""
condition = op(c, 3)
assert condition.operation == expected
assert condition.column is c
assert condition.values == [3]
def test_mixin_begins_with():
condition = c.begins_with(3)
assert condition.operation == "begins_with"
assert condition.column is c
assert condition.values == [3]
def test_mixin_between():
condition = c.between(3, 4)
assert condition.operation == "between"
assert condition.column is c
assert condition.values == [3, 4]
def test_mixin_contains():
condition = c.contains(3)
assert condition.operation == "contains"
assert condition.column is c
assert condition.values == [3]
def test_mixin_in_():
condition = c.in_(3, 4)
assert condition.operation == "in"
assert condition.column is c
assert condition.values == [3, 4]
def test_mixin_is_():
condition = c.is_(3)
assert condition.operation == "=="
assert condition.column is c
assert condition.values == [3]
condition = c.is_not(3)
assert condition.operation == "!="
assert condition.column is c
assert condition.values == [3]
@pytest.mark.parametrize("op, typedefs, args", [
(
"begins_with",
[
Integer(), List(String), Map(s=String), Boolean(),
Set(Integer), Set(Binary), Set(String)
],
("one-arg",)
),
(
"contains",
[
Integer(), Boolean(), Map(s=String)
],
("one-arg",)
),
(
"between",
[
Set(String), Set(Binary), Set(String),
List(String), Map(s=String), Boolean()
],
("first-arg", "second-arg")
)
])
def test_unsupported_mixin_function_conditions(op, typedefs, args):
class Model(BaseModel):
id = Column(Integer, hash_key=True)
for typedef in typedefs:
column = Column(typedef, name="d")
column.model = Model
column.model_name = "c"
with pytest.raises(InvalidCondition):
getattr(column, op)(*args)
column.begins_with(object())
@pytest.mark.parametrize("typedef", [
Set(Integer), Set(Binary), Set(String),
List(String), Map(s=String), Boolean()
])
@pytest.mark.parametrize("op", [
operator.lt,
operator.gt,
operator.le,
operator.ge
])
def test_unsupported_mixin_comparison_conditions(op, typedef):
class Model(BaseModel):
id = Column(Integer, hash_key=True)
column = Column(typedef, name="d")
column.model = Model
column.model_name = "c"
with pytest.raises(InvalidCondition):
op(column, "value")
def test_printable_column_no_path():
"""Model.column"""
assert printable_column_name(User.email) == "email"
def test_printable_column_mixed_path():
"""Model.column[3].foo[1]"""
assert printable_column_name(User.id, path=[3, "foo", "bar", 0, 1]) == "id[3].foo.bar[0][1]"
def test_printable_column_included_path():
"""Path is part of the 'column' that's provided"""
assert printable_column_name(User.id[3]["foo"]["bar"][0][1]) == "id[3].foo.bar[0][1]"
def test_printable_column_both_paths():
"""When both paths are provided, the explicit path wins"""
assert printable_column_name(User.id["not used"], path=[3, "foo", "bar", 0, 1]) == "id[3].foo.bar[0][1]"
# END COMPARISON MIXIN ========================================================================== END COMPARISON MIXIN
# PROXY ======================================================================================================== PROXY
def test_proxy_delegates_getattr():
sentinel = object()
column = MockColumn("col")
column.attribute = sentinel
proxy = column["some"]["path"]
assert proxy.attribute is sentinel
def test_proxy_masks_protected_path_attr():
"""If a proxied object has a _path or _obj attribute, it's not returned through the proxy"""
sentinel = object()
column = MockColumn("col")
column._obj = sentinel
column._path = sentinel
proxy = column["some"]["path"]
assert proxy._obj is not column._obj
assert proxy._path is not column._path
def test_proxy_repr():
column = MockColumn("col")
proxy = column["some"][2]["path"]
assert repr(proxy) == "<Proxy[M.col.some[2].path]>"
# END PROXY ================================================================================================ END PROXY
# ITERATORS ================================================================================================ ITERATORS
@pytest.mark.parametrize("condition", non_meta_conditions())
def test_iter_conditions_non_meta(condition):
"""These conditions aren't and/or/not, so they can't yield any inner conditions"""
assert set(iter_conditions(condition)) == {condition}
@pytest.mark.parametrize("condition", meta_conditions())
def test_iter_conditions_non_cyclic_meta(condition):
"""Yield the inner conditions for each of these meta conditions"""
expected = condition.values
actual = list(iter_conditions(condition))
assert actual == expected
def test_iter_conditions_cyclic():
"""Cyclic conditions can be iterated safely"""
# Here's the structure to create:
# root
# / \
# a b
# / \
# c root
root = AndCondition()
a = ComparisonCondition("<", MockColumn("a"), 3)
b = OrCondition()
c = ComparisonCondition(">", MockColumn("c"), 3)
root.values.extend([a, b])
b.values.extend([c, root])
expected = {root, a, b, c}
actual = set(iter_conditions(root))
assert actual == expected
@pytest.mark.parametrize("condition", [*non_meta_conditions(column=User.age), *meta_conditions(column=User.age)])
def test_iter_columns_single(condition):
assert set(iter_columns(condition)) == {User.age}
def test_iter_columns_nested():
"""Nested AND, OR, NOT are unpacked"""
a = User.age == 3
b = User.name == "foo"
c = User.email.in_(User.age, User.id, "bar")
# Here's the structure to create:
# __root__
# / | \
# a branch \
# | leaf
# b / \
# c root
branch = ~b
root = a & branch
leaf = c | root
root.values.append(leaf)
assert set(iter_columns(root)) == {User.age, User.name, User.email, User.id}
# END ITERATORS ======================================================================================== END ITERATORS
| [
"[email protected]"
] | |
654bde5deddbb976c2e3fe5e7a9a4b33bd606463 | e780a5bd72f98ca2513c993d64a85b08578166a6 | /buildout-cache/eggs/Zope2-2.13.26-py2.7.egg/App/Permission.py | 26fc6c96cef75bd35a47508c6bf2a627db0822a3 | [] | no_license | vedantc98/Plone-test | 023246597ffe848e2a49b9f65742ff49127b190b | 9fd520fc78481e2c0b9b7ec427821e7f961c777e | refs/heads/master | 2021-03-30T22:14:33.368739 | 2018-03-11T19:22:58 | 2018-03-11T19:22:58 | 124,671,713 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,468 | py | ##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
##############################################################################
'''Zope registerable permissions
'''
from AccessControl.class_init import InitializeClass
from AccessControl.SecurityInfo import ClassSecurityInfo
from Acquisition import Implicit
from OFS.role import RoleManager
from OFS.SimpleItem import Item
from Persistence import Persistent
class Permission(RoleManager,
Persistent,
Implicit,
Item
):
"""Model Permission meta-data
"""
meta_type = 'Zope Permission'
icon = 'p_/Permission_icon'
index_html = None
security = ClassSecurityInfo()
manage_options=(
RoleManager.manage_options
+ Item.manage_options
)
def __init__(self, id, title, name):
self.id=id
self.title=title
self.name=name
InitializeClass(Permission)
| [
"[email protected]"
] | |
16174f6a0ceaacfd5739e6f757c7da92e64ce151 | ca8d183f5d6f1f260483a3555efd05870fe1d891 | /com_blacktensor/cop/cov/status/model/status_tf.py | 5e77659c4866db16ffcc2845eb4984f7d7c1aec3 | [
"MIT"
] | permissive | Jelly6489/Stock-Proj | b559304f10614122ddaa00e39c821a65faa9f91d | 3e7b1ad5cddc5b142f0069e024199fe969c7c7e8 | refs/heads/main | 2023-01-13T17:18:33.729747 | 2020-11-13T08:19:33 | 2020-11-13T08:19:33 | 312,512,688 | 0 | 0 | MIT | 2020-11-13T08:11:04 | 2020-11-13T08:11:04 | null | UTF-8 | Python | false | false | 37 | py |
class CovidStatusTf(object):
... | [
"[email protected]"
] | |
4a9cd2050ce1ad1ddda5ed230b8ca4bad878934d | 9183379a07d1d8936d8205d99ecd0e40269e667a | /sphinx/source/exercises/solution/05_encapsulation/printer.py | 414590fa8dc069be2a003ab1ed68e1baaddb3428 | [] | no_license | boegeskov/fall2020 | 477983eb97568e274d3cef9ee22706de172b6046 | 9e50030e3fa99cc5ddb95ff46f93c1a530d256b1 | refs/heads/master | 2023-01-23T18:30:19.893424 | 2020-12-09T07:16:20 | 2020-12-09T07:16:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,540 | py | # printer.py (solution)
"""
3. Machine -> printer
Create a Machine class that takes care of powering on and off a the machine.
Create a printer class that is a subclass of the Machine super class.
The printer should be able to print to console.
The printer should have a papertray, which should be in its own class. The papertray class should keep track of the paper, it should have the abillity to use paper and and load new paper in the tray if empty.
"""
class Machine:
""" takes care of turning on and off """
def __init__(self):
self.__is_on = False
@property
def is_on(self):
return self.__is_on
def power(self):
self.__is_on = not self.__is_on
class Printer(Machine):
def __init__(self):
# 1.
super().__init__()
# 2.
# Machine.__init__(self)
self.__pt = Papertray()
def print(self, text):
if self.__pt.paper == 0:
print('Papertray is empty')
else:
if self.is_on:
print(text)
self.__pt.paper = self.__pt.paper - 1
else:
print('Printer is off')
@property
def load(self):
return self.__pt.paper
load.setter
def load(self, no):
self.__pt.paper = no
class Papertray:
def __init__(self):
self.paper = 2
@property
def paper(self):
return self.__paper
@paper.setter
def paper(self, paper):
self.__paper = paper
| [
"[email protected]"
] | |
52080a362e4c3ceb2822f229da8005edd6ef036e | 4a5f11b55e23999a82b62f5c72b44e9a36d24f63 | /simplemooc/forum/admin.py | 7c813d107c771cc9ce0f430c826d0736f3a53f31 | [] | no_license | diogo-alves/simplemooc | dca62bfcb2ea6357a551a5760778537f083b675c | cfec59f99888e4e23d41f020ff06bfdf39f70203 | refs/heads/master | 2022-05-10T10:32:18.686313 | 2019-06-04T19:30:43 | 2019-06-04T19:30:43 | 190,260,470 | 0 | 0 | null | 2022-04-22T21:34:44 | 2019-06-04T18:46:43 | Python | UTF-8 | Python | false | false | 585 | py | from django.contrib import admin
from .models import Thread, Reply
class ThreadAdmin(admin.ModelAdmin):
list_display = ['title', 'body', 'author', 'updated_at']
search_fields = ['title', 'body', 'author__username']
prepopulated_fields = {'slug': ('title',)}
class ReplyAdmin(admin.ModelAdmin):
list_display = ['thread', 'reply', 'author', 'correct', 'updated_at']
search_fields = ['thread', 'reply', 'author__username']
list_filter = ['thread__title', 'author__username']
admin.site.register(Thread, ThreadAdmin)
admin.site.register(Reply, ReplyAdmin)
| [
"[email protected]"
] | |
9a8e5ff5ac645a3cc48a2db51ef611314f4736f6 | 20a358db6e9e9872453a7fb36ef21268054b241d | /pyml/ditech/database/insert_traffic.py | 95f8193ac0e10728700c619c82578331c5c5dc3e | [] | no_license | fengkaicnic/pyml | ee654cdef2ba107e1c1e8d598691af3accb96b3c | a19865cdb9eb69517258416a2b08b86f9d43a023 | refs/heads/master | 2021-01-21T04:40:44.659607 | 2016-07-29T08:33:07 | 2016-07-29T08:33:07 | 44,159,061 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,001 | py | import utils
import traceback
import os
import time
import pdb
start = time.time()
try:
path = 'D:/ditech/citydata/season_2/test_set_2/traffic_data'
conn = utils.persist.connection()
cur = conn.cursor()
num = 0
for pl in os.listdir(path):
if not '.' in pl:
with open(path + '/' + pl) as file:
lines = file.readlines()
for line in lines:
lst = line.split('\t')
lst = map(lambda x:x.strip(), lst)
for tline in lst[1:-1]:
sql = 'insert into traffic_test2(district_hash, tj_level, tj_time) \
values("%s", "%s", "%s")' % (lst[0], tline, lst[-1])
cur.execute(sql)
conn.commit()
conn.close()
except:
traceback.print_exc()
print sql
conn.commit()
conn.close()
end = time.time()
print end - start
| [
"[email protected]"
] | |
13304ad34c9181779d72a2811439ff96eabc20cf | f8201014d20832d4cc217b473500501cf16df8ba | /virtool/genbank.py | 7035b74b89e201906c6cfa858afebbf05f253176 | [
"MIT"
] | permissive | gitter-badger/virtool | abc996ef8dc160f1fe879a55d6eec4e9043c9840 | 628acc377fb0497c2bfe75e9fa0a61decc59e0e6 | refs/heads/master | 2020-04-23T04:47:02.186926 | 2019-02-15T03:01:12 | 2019-02-15T03:01:12 | 170,919,108 | 0 | 0 | null | 2019-02-15T19:42:26 | 2019-02-15T19:42:25 | null | UTF-8 | Python | false | false | 1,933 | py | import logging
import string
import virtool.http.proxy
logger = logging.getLogger(__name__)
EMAIL = "[email protected]"
TOOL = "virtool"
FETCH_URL = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi"
async def fetch(settings, session, accession):
"""
Fetch the Genbank record for the passed `accession`.
:param settings: the application settings object
:type settings: :class:`virtool.app_settings.Settings`
:param session: an aiohttp client session
:type session: :class:`aiohttp.ClientSession`
:param accession: the accession to fetch
:type accession: Union[int,str]
:return: parsed Genbank data
:rtype: dict
"""
params = {
"db": "nuccore",
"email": EMAIL,
"id": accession,
"retmode": "text",
"rettype": "gb",
"tool": TOOL
}
async with virtool.http.proxy.ProxyRequest(settings, session.get, FETCH_URL, params=params) as resp:
body = await resp.text()
if resp.status != 200:
if "Failed to retrieve sequence" not in body:
logger.warning("Unexpected Genbank error: {}".format(body))
return None
data = {
"host": ""
}
for line in body.split("\n"):
if line.startswith("VERSION"):
data["accession"] = line.replace("VERSION", "").lstrip(" ")
if line.startswith("DEFINITION"):
data["definition"] = line.replace("DEFINITION", "").lstrip(" ")
if "/host=" in line:
data["host"] = line.lstrip(" ").replace("/host=", "").replace('"', "")
# Extract sequence
sequence_field = body.split("ORIGIN")[1].lower()
for char in [" ", "/", "\n"] + list(string.digits):
sequence_field = sequence_field.replace(char, "")
data["sequence"] = sequence_field.upper()
return data
| [
"[email protected]"
] | |
d631c815c2c1ba0870f891182e8369ce24c3be49 | 278060c3e3fce8c2d78640ac748188e80758deac | /tax_app/migrations/0002_auto_20191020_1607.py | d78e86314c315ed836c08685fd62b3ca35a1e8d3 | [] | no_license | ajisaq/BusinessTaxApp | 33507bb64cfabc4a84a56826db3ae90d55539359 | 08031f03a7018c59b2e9b0095e80a5ff0b7b0b70 | refs/heads/master | 2022-05-03T17:29:47.635710 | 2019-12-02T09:25:14 | 2019-12-02T09:25:14 | 219,758,403 | 1 | 3 | null | 2022-04-22T22:50:39 | 2019-11-05T13:59:07 | Python | UTF-8 | Python | false | false | 1,131 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-10-20 15:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tax_app', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Business_Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150)),
],
),
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=15)),
('name', models.CharField(max_length=150)),
],
),
migrations.AddField(
model_name='profile',
name='contact',
field=models.CharField(default='9340-505', max_length=150),
preserve_default=False,
),
]
| [
"[email protected]"
] | |
371b70b5199d49ec2db85f7e1ccd506400ea44d0 | c2ae65792af1fab2e7843303ef90790819f872e8 | /testing/ds/bin/jupyter-troubleshoot | 7134b7d07b8dd90f5c6d6f159e2fc0a8167a0183 | [] | no_license | behappyyoung/PythonSampleCodes | 47c224ca76ce509a03c8b75ef6b4bf7f49ebdd7f | f7640467273fa8ea3c7e443e798737ca5bcea6f9 | refs/heads/master | 2023-03-15T00:53:21.034605 | 2023-02-13T17:12:32 | 2023-02-13T17:12:32 | 26,919,763 | 3 | 3 | null | 2023-03-07T12:45:21 | 2014-11-20T15:57:16 | Python | UTF-8 | Python | false | false | 274 | #!/Users/s0199669/github/PythonSampleCodes/testing/ds/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from jupyter_core.troubleshoot import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
c68d6ebbadb6d5ca9c872511c913b706c9693f5b | 6fb4419f219fcf2453becfd3fe2d31dca3401da6 | /get-influences.py | 6df1a95dda8a74b2d99570fca626c49ecff004b1 | [] | no_license | christopher-beckham/wiki-lang-influence | dccc04e3565a9df408353a247058a74a9c44f5bb | 9c2832cafc5d5c25f39aff739b0004af08a5234b | refs/heads/master | 2020-04-14T23:53:33.941193 | 2014-06-19T09:57:59 | 2014-06-19T09:57:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,123 | py | #!/usr/bin/python
from cz import cz
import sys
import re
import time
import urllib2
from sys import stdin
def get_langs(st):
st = "".join(cz.striphtml(st))
st = re.sub('\\[.*?\\]', '', st).replace('\n', '')
st = st.split(',')
st = [ st[0] ] + [ name[1::] for name in st[1::] ]
return st
def fe(arr):
print ",".join(arr)
for url in stdin.readlines():
try:
url = url.rstrip()
body = cz.geturl(url)
print url[ url.rfind('/')+1 :: ].replace("_(programming_language)","")
in_by = cz.getbetween2(body, '<th scope="row" style="text-align:left;">Influenced by</th>', '</tr>')
if len(in_by) > 0:
in_by = get_langs(in_by[0])
in_by = [ val.encode('ascii','ignore') for val in in_by ]
fe(in_by)
else:
print
in_to = cz.getbetween2(body, '<th scope="row" style="text-align:left;">Influenced</th>', '</tr>')
if len(in_to) > 0:
in_to = get_langs(in_to[0])
in_to = [ val.encode('ascii','ignore') for val in in_to ]
fe(in_to)
else:
print
except urllib2.HTTPError as e:
print "DONT_USE"
print
print
time.sleep(0.2) | [
"[email protected]"
] | |
3571c8cc983bb908e5fefc686b7dd1d85062152c | 530201d1bf8370a94ddf6ffcffd0c256389b42c9 | /mazeclass.py | 9d240b9505411691b0fd735472fb78dd60b9e784 | [] | no_license | chefakshito/cs520 | 1169a714c1e93bfb546df62b71662ff307a8de98 | 97b81f619e6f54f5125d14b58f04faa325227bd1 | refs/heads/master | 2021-01-21T06:39:35.828236 | 2017-02-27T04:22:37 | 2017-02-27T04:22:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,431 | py | from random import randint
from PIL import Image
imgx = 500; imgy = 500
image = Image.new("RGB", (imgx, imgy))
pixels = image.load()
color = [(0,0, 0), (255, 255, 255)]
sx=101
sy=101;
nm=50;
maze = [[[0 for x in range(sx)] for y in range(sy)] for z in range(nm)]
dx=[0,1,0,-1]
dy=[-1,0,1,0]
"""
cx=randint(0,mx-1)
cy=randint(0,my-1)
stack.append((cx,cy))
print(stack)
"""
sState=[]
gState=[]
class mazeClass:
def __init__(self):
global imgx; global imgy;
global image;
global pixels;
global color;
global sx
global sy
global maze
global dx
global dy
global nm;
for x in range(nm):
stack = [(randint(0, sx - 1),randint(0, sy - 1))]
sState.append(stack[-1]) #The start state is assigned.
while len(stack) > 0:
(cx, cy) = stack[-1];
maze[x][cy][cx] = 1
# find a new cell to add
nlst = [] # list of available neighbors
for i in range(4):
ch = randint(0,11)
if ch<6:
choice=1
else:
choice=randint(0,11)
nx = cx + dx[i]; ny = cy + dy[i]
if nx >= 0 and nx < sx and ny >= 0 and ny < sy:
if maze[x][ny][nx] == 0:
# print(maze[x][ny][nx],'check1') #--CHECK--1--
if choice==1:
# print('Entered Choice 1') #--CHECK--3--
# of occupied neighbors must be 1
ctr = 0
for j in range(4):
ex = nx + dx[j]; ey = ny + dy[j]
if ex >= 0 and ex < sx and ey >= 0 and ey < sy:
if maze[x][ey][ex] == 1: ctr += 1
if ctr == 1: nlst.append(i)
if choice>1:
# print('Entered Choice 2') #--CHECK--4--
luck=randint(1,11)
# print(luck,"CHECK 5") #--CHECK--5--
if luck>choice:
nlst.append(i)
# if 1 or more neighbors available then randomly select one and move
# print(nlst,'check2') #--CHECK--2--
if len(nlst) > 0:
ir = nlst[randint(0, len(nlst) - 1)]
cx += dx[ir]; cy += dy[ir]
stack.append((cx, cy))
else: stack.pop()
#A random goal state is generated
while len(gState)!=x+1:
gx=randint(0,sx-1)
gy=randint(0,sy-1)
if maze[x][gx][gy]==1:
gState.append((gx,gy))
# # paint the maze
# for ky in range(imgy):
# for kx in range(imgx):
# pixels[kx, ky] = color[maze[x][sy * ky // imgy][sx * kx // imgx]]
# image.save("Maze_" + str(x) + ".png", "PNG")
def getMaze(self):
c = randint(0,50)
return (maze[c], c, sState[c], gState[c]);
| [
"="
] | = |
a1fd5d1ba5523c8ee5579338e6ee4707b5c82688 | a89dfda3732eb73863b3e2fb1ebb46f1cb40973a | /txweb/lib/str_request.py | 0f90d8107e18a716cf7df54ddaea62f846c72d1f | [
"MIT"
] | permissive | devdave/txWeb | 543ccb7be0671a5e83959bb7cfc8e7804f04a74a | e447fbefd16134cb2f83323c04c20c41638d7da3 | refs/heads/master | 2022-12-15T18:11:50.880675 | 2021-03-24T18:48:16 | 2021-03-24T18:48:16 | 2,116,693 | 1 | 0 | MIT | 2022-12-08T04:28:41 | 2011-07-28T03:55:43 | Python | UTF-8 | Python | false | false | 12,567 | py | """
STATUS PENDING
Redo web Request to act as a str<->bytes proxy between our
application and twisted library.
Since Py3, all strings are unicode which is problematic for twisted as it
only works with bytes (and to some extent ascii). Instead of rewriting the entire library
and bedazzling it with flaky string encode/decode logic, the twisted maintainers
enforced bytes (or gtfo) only.
In this case, I am making a proxy request to catch str and convert to bytes before it moves upward
and into the twisted library. Unfortunately this is a doozy of a sub-project as its not just Request but also
headers logic.
"""
from __future__ import annotations
# import cgi
import json
from urllib.parse import parse_qs
import typing as T
from twisted.web.server import Request, NOT_DONE_YET
# from twisted.web.server import supportedMethods
from twisted.web.http import FOUND
from twisted.web import resource
from twisted.python.compat import intToBytes
from werkzeug.formparser import FormDataParser
from werkzeug.datastructures import MultiDict
from werkzeug.datastructures import FileStorage
from ..log import getLogger
from ..http_codes import HTTP500
log = getLogger(__name__)
class StrRequest(Request):
"""
Request is actually a merger of three different topics.
1. StrRequest contains all of the request data: headers & request body.
2. StrRequest holds the connection API.
3. StrRequest holds the response headers, http code, and response body until finalization.
"""
NOT_DONE_YET: T.Union[int, bool] = NOT_DONE_YET
def __init__(self, *args, **kwargs):
Request.__init__(self, *args, **kwargs)
# self.args = {} is already defined in Request's init
self.form = {} # type: T.Dict[str, str]
self.files = {} # type: T.Dict[str, FileStorage]
self._call_before_render = None
self._call_after_render = None
def getCookie(self, cookie_name: T.Union[str, bytes]) -> T.Union[str, bytes]:
"""
Wrapper around Request's getCookie to convert to and from byte strings
to unicode/str's
Parameters
----------
cookie_name: str
Returns
-------
If cookie_name argument is bytes, returns a byte string else returns str/unicode string
"""
expect_bytes = isinstance(cookie_name, bytes)
if expect_bytes:
return Request.getCookie(self, cookie_name)
else:
byte_name = cookie_name.encode("ascii")
retval = Request.getCookie(self, byte_name)
if retval is not None:
return retval.decode("utf-8")
else:
return None
def add_before_render(self, func):
"""
Utility intended solely to make testing easier
"""
self._call_before_render = func
return func
def add_after_render(self, func):
"""
Utility intended solely to make testing easier
"""
self._call_after_render = func
return func
def write(self, data: T.Union[bytes, str]):
"""
Wrapper to prevent unicode/str's from going to Request's write method
"""
if isinstance(data, str):
data = data.encode("utf-8")
elif isinstance(data, bytes) is False:
raise ValueError(f"Attempting to write to transport {type(data)}-{data!r}"
" must be bytes or Str")
return Request.write(self, data)
def writeTotal(self, response_body: T.Union[bytes, str], code: T.Union[int, str, bytes] = None,
message: T.Union[bytes, str] = None) -> T.NoReturn:
"""
Utility to write and then close the connection in one go.
Especially useful for error handling events.
Parameters
----------
response_body:
Content intended to be sent to the client browser
code:
Optional HTTP Code to use
message:
Optional HTTP response message to use
"""
content_length = intToBytes(len(response_body))
self.setHeader("Content-Length", content_length)
if code is not None:
self.setResponseCode(code, message=message)
self.write(response_body)
self.ensureFinished()
def writeJSON(self, data: T.Dict):
"""
Utility to take a dictionary and convert it to a JSON string
"""
payload = json.dumps(data)
content_length = intToBytes(len(payload))
self.setHeader("Content-Type", "application/json")
self.setHeader("Content-Length", content_length)
return self.write(payload)
def setHeader(self, name: T.Union[str, bytes], value: T.Union[str, bytes]):
"""
A quick wrapper to convert unicode inputs to utf-8 bytes
Set's a header for the RESPONSE
Parameters
----------
name:
A valid HTTP header
value
Syntactically correct value for the provided header name
"""
if isinstance(name, str):
name = name.encode("utf-8")
if isinstance(value, str):
value = value.encode("utf-8")
return Request.setHeader(self, name, value)
def setResponseCode(self,
code: int = 500,
message: T.Optional[T.Union[str, bytes]] = b"Failure processing request") -> T.NoReturn:
"""
Str to unicode wrapper around twisted.web's Request class.
Parameters
----------
code
message
Returns
-------
"""
if message and not isinstance(message, bytes):
message = message.encode("utf-8")
Request.setResponseCode(self, code, message)
def ensureFinished(self) -> None:
"""
Ensure's the connection has been flushed and closed without throwing an error.
"""
if self.finished not in [1, True]:
self.finish()
def requestReceived(self, command, path, version):
"""
Looks for POST'd arguments in form format (eg multipart).
Allows for file uploads and adds them to .args
"""
self.content.seek(0, 0)
self.args = {}
self.form = {}
self.method, self.uri = command, path
self.clientproto = version
x = self.uri.split(b"?", 1)
if len(x) == 1:
self.path = self.uri
else:
self.path, arg_string = x
self.args = parse_qs(arg_string.decode())
ctype = self.requestHeaders.getRawHeaders(b'content-type')
clength = self.requestHeaders.getRawHeaders(b'content-length')
if ctype is not None:
ctype = ctype[0]
if clength is not None:
clength = clength[0]
if self.method == b"POST" and ctype and clength:
self._processFormData(ctype, clength)
self.content.seek(0, 0)
# Args are going to userland, switch bytes back to str
query_args = self.args.copy()
def query_iter(arguments):
for key, values in arguments.items():
key = key.decode("utf-8") if isinstance(key, bytes) else key
for val in values:
val = val.decode("utf-8") if isinstance(val, bytes) else val
yield key, val
self.args = MultiDict(list(query_iter(query_args)))
self.process()
@property
def methodIsPost(self) -> bool:
"""
Utility method
Returns
-------
bool - Is the current request a POST request
"""
return self.method == b"POST"
@property
def methodIsGet(self) -> bool:
"""
Utility method
Returns
-------
True if the current request is a HTTP GET request.
"""
return self.method == b"GET"
def render(self, resrc: resource.Resource) -> None:
"""
Ask a resource to render itself unless a prefilter returns a string/bytes
body which will be rendered instead.
Parameters
----------
resrc: Resource
The resource to be rendered.
Returns
-------
None, output is written directly to the underlying HTTP channel.
"""
body = None
if self._call_before_render is not None:
body = self._call_before_render(self)
if body is None:
body = resrc.render(self)
if self._call_after_render is not None:
self._call_after_render(self, body)
# TODO deal with HEAD requests or leave it to the Application developer to deal with?
if body is NOT_DONE_YET:
return
if not isinstance(body, bytes):
log.error(
f"<{type(resrc)}{resrc!r}>"
f"- uri={self.uri} returned {type(body)}:{len(body)} but MUST return a byte string")
raise HTTP500()
if self.method == b"HEAD":
if len(body) > 0:
# This is a Bad Thing (RFC 2616, 9.4)
self._log.info(
"Warning: HEAD request {slf} for resource {resrc} is"
" returning a message body. I think I'll eat it.",
slf=self,
resrc=resrc
)
self.setHeader(b'content-length',
intToBytes(len(body)))
self.write(b'')
else:
self.setHeader(b'content-length',
intToBytes(len(body)))
self.write(body)
self.finish()
def _processFormData(self, content_type, content_length):
"""
Processes POST requests and puts POST'd arguments into args.
Thank you Cristina - http://www.cristinagreen.com/uploading-files-using-twisted-web.html
TODO this can be problematic if a large binary file is being uploaded
TODO verify Twisted HTTP channel/transport blows up if file upload size is "too big"
"""
options = {}
if isinstance(content_type, bytes):
content_type = content_type.decode("utf-8") # type: str
if ";" in content_type:
# TODO Possible need to replace some of the header processing logic as boundary part of content-type
# leaks through. eg "Content-type": "some/mime_type;boundary=----BLAH"
content_type, boundary = content_type.split(";", 1)
if "=" in boundary:
_, boundary = boundary.split("=", 1)
options['boundary'] = boundary
content_length = int(content_length)
self.content.seek(0, 0)
parser = FormDataParser()
_, self.form, self.files = parser.parse(self.content, content_type, content_length, options=options)
self.content.seek(0, 0)
def processingFailed(self, reason):
"""
Start of the error handling chain that leads from here all the way up to Application.processingFailed
:param reason:
:return:
"""
self.site.processingFailed(self, reason)
@property
def json(self) -> T.Any:
"""
Is this a JSON posted request?
Returns
-------
Ideally returns a dict object as I cannot think of what else a sane client would send in JSON format.
"""
if self.getHeader("Content-Type") in ["application/json", "text/json"]:
return json.loads(self.content.read())
else:
return None
def get_json(self) -> T.Any:
"""
Intended to mimic Flask api
Returns
-------
dict - a json decoded object
"""
return self.json
def redirect(self, url: T.Union[str, bytes], code=FOUND) -> T.NoReturn:
"""
Utility function that does a redirect.
Set the response code to L{FOUND} and the I{Location} header to the
given URL.
The request should have C{finish()} called after this.
Parameters
----------
url: bytes
What to set the LOCATION http response header to
code: int
What to set the HTTP response code to (eg 3xx)
"""
self.setResponseCode(code)
self.setHeader(b"location", url)
#self.ensureFinished()
| [
"[email protected]"
] | |
630ff6a5ad626ea10a5e3ddb440d4b01416a9d3b | 0533d0ceb5966f7327f40d54bbd17e08e13d36bf | /python/LinkedList/Linked List Cycle II/Linked List Cycle II.py | 996a20582aa17746b392099fe2d2bb7ca6441e83 | [] | no_license | danwaterfield/LeetCode-Solution | 0c6178952ca8ca879763a87db958ef98eb9c2c75 | d89ebad5305e4d1a185b0c6f101a88691602b523 | refs/heads/master | 2023-03-19T01:51:49.417877 | 2020-01-11T14:17:42 | 2020-01-11T14:17:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 711 | py | # class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def detectCycle(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
slow = head
fast = head
step = 0
while slow and fast and fast.next:
slow = slow.next
fast = fast.next.next
step += 1
if slow == fast:
break
if not fast or not fast.next:
return None
slow2 = head
index = 0
while slow != slow2:
slow = slow.next
slow2 = slow2.next
index += 1
return slow | [
"[email protected]"
] | |
4df7849c6844bd581bb8841111f635cbbab50830 | 4dfd539c530c5cff6874f2fa0c06ffd893212ad3 | /tencentcloud/chdfs/v20201112/errorcodes.py | d4604add29d3d07f8131cc49457ff2038e6d3425 | [] | no_license | TencentCloud/tencentcloud-sdk-python-intl-en | aac605d1a0458b637ba29eb49f6f166fe844a269 | 042b4d7fb609d4d240728197901b46008b35d4b0 | refs/heads/master | 2023-09-01T19:39:27.436454 | 2023-09-01T04:02:15 | 2023-09-01T04:02:15 | 227,834,644 | 4 | 6 | null | 2023-07-17T08:56:56 | 2019-12-13T12:23:52 | Python | UTF-8 | Python | false | false | 3,853 | py | # -*- coding: utf8 -*-
# Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Operation failed.
FAILEDOPERATION = 'FailedOperation'
# The permission group has been bound.
FAILEDOPERATION_ACCESSGROUPBOUND = 'FailedOperation.AccessGroupBound'
# The account balance is insufficient.
FAILEDOPERATION_ACCOUNTINSUFFICIENTBALANCE = 'FailedOperation.AccountInsufficientBalance'
# The account identity is not verified.
FAILEDOPERATION_ACCOUNTUNAUTHENTICATED = 'FailedOperation.AccountUnauthenticated'
# The file system is not empty.
FAILEDOPERATION_FILESYSTEMNOTEMPTY = 'FailedOperation.FileSystemNotEmpty'
# The file system capacity after change is smaller than the currently used capacity.
FAILEDOPERATION_QUOTALESSTHANCURRENTUSED = 'FailedOperation.QuotaLessThanCurrentUsed'
# Internal error.
INTERNALERROR = 'InternalError'
# Incorrect parameter.
INVALIDPARAMETER = 'InvalidParameter'
# Incorrect parameter value.
INVALIDPARAMETERVALUE = 'InvalidParameterValue'
# Incorrect parameter value: AccessGroupId.
INVALIDPARAMETERVALUE_INVALIDACCESSGROUPID = 'InvalidParameterValue.InvalidAccessGroupId'
# Incorrect parameter value: AccessGroupName.
INVALIDPARAMETERVALUE_INVALIDACCESSGROUPNAME = 'InvalidParameterValue.InvalidAccessGroupName'
# Incorrect parameter value: `Address` of the permission rule.
INVALIDPARAMETERVALUE_INVALIDACCESSRULEADDRESS = 'InvalidParameterValue.InvalidAccessRuleAddress'
# Incorrect parameter value: CapacityQuota.
INVALIDPARAMETERVALUE_INVALIDCAPACITYQUOTA = 'InvalidParameterValue.InvalidCapacityQuota'
# Incorrect parameter value: Description.
INVALIDPARAMETERVALUE_INVALIDDESCRIPTION = 'InvalidParameterValue.InvalidDescription'
# Incorrect parameter value: FileSystemId.
INVALIDPARAMETERVALUE_INVALIDFILESYSTEMID = 'InvalidParameterValue.InvalidFileSystemId'
# Incorrect parameter value: FileSystemName.
INVALIDPARAMETERVALUE_INVALIDFILESYSTEMNAME = 'InvalidParameterValue.InvalidFileSystemName'
# Incorrect parameter value: MountPointId.
INVALIDPARAMETERVALUE_INVALIDMOUNTPOINTID = 'InvalidParameterValue.InvalidMountPointId'
# Incorrect parameter value: MountPointName.
INVALIDPARAMETERVALUE_INVALIDMOUNTPOINTNAME = 'InvalidParameterValue.InvalidMountPointName'
# Incorrect parameter value: VpcId.
INVALIDPARAMETERVALUE_INVALIDVPCID = 'InvalidParameterValue.InvalidVpcId'
# The quota limit is exceeded.
LIMITEXCEEDED = 'LimitExceeded'
# Missing parameter.
MISSINGPARAMETER = 'MissingParameter'
# The resource is in use.
RESOURCEINUSE = 'ResourceInUse'
# The resource does not exist.
RESOURCENOTFOUND = 'ResourceNotFound'
# The permission group does not exist.
RESOURCENOTFOUND_ACCESSGROUPNOTEXISTS = 'ResourceNotFound.AccessGroupNotExists'
# The permission rule does not exist.
RESOURCENOTFOUND_ACCESSRULENOTEXISTS = 'ResourceNotFound.AccessRuleNotExists'
# The file system does not exist.
RESOURCENOTFOUND_FILESYSTEMNOTEXISTS = 'ResourceNotFound.FileSystemNotExists'
# The mount point does not exist.
RESOURCENOTFOUND_MOUNTPOINTNOTEXISTS = 'ResourceNotFound.MountPointNotExists'
# The VPC does not exist.
RESOURCENOTFOUND_VPCNOTEXISTS = 'ResourceNotFound.VpcNotExists'
# The resource is unavailable.
RESOURCEUNAVAILABLE = 'ResourceUnavailable'
# Unauthorized operation.
UNAUTHORIZEDOPERATION = 'UnauthorizedOperation'
| [
"[email protected]"
] | |
873f399a3fc2fb55ed3c9320f9bdce8d298bc065 | 474e74c654916d0a1b0311fc80eff206968539b1 | /venv/Lib/site-packages/asposewordscloud/models/paragraph_link_collection_response.py | f18fa21cf6270818d46552834022303a45595eff | [] | no_license | viktor-tchemodanov/Training_Tasks_Python_Cloud | 4592cf61c2f017b314a009c135340b18fa23fc8f | b7e6afab4e9b76bc817ef216f12d2088447bd4cd | refs/heads/master | 2020-09-04T10:39:23.023363 | 2019-11-05T10:36:45 | 2019-11-05T10:36:45 | 219,712,295 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,084 | py | # coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose" file="ParagraphLinkCollectionResponse.py">
# Copyright (c) 2018 Aspose.Words for Cloud
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import pprint
import re # noqa: F401
import six
class ParagraphLinkCollectionResponse(object):
"""This response should be returned by the service when handling: GET http://api.aspose.com/v1.1/words/Test.doc/paragraphs
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'code': 'int',
'status': 'str',
'paragraphs': 'ParagraphLinkCollection'
}
attribute_map = {
'code': 'Code',
'status': 'Status',
'paragraphs': 'Paragraphs'
}
def __init__(self, code=None, status=None, paragraphs=None): # noqa: E501
"""ParagraphLinkCollectionResponse - a model defined in Swagger""" # noqa: E501
self._code = None
self._status = None
self._paragraphs = None
self.discriminator = None
if code is not None:
self.code = code
if status is not None:
self.status = status
if paragraphs is not None:
self.paragraphs = paragraphs
@property
def code(self):
"""Gets the code of this ParagraphLinkCollectionResponse. # noqa: E501
Response status code. # noqa: E501
:return: The code of this ParagraphLinkCollectionResponse. # noqa: E501
:rtype: int
"""
return self._code
@code.setter
def code(self, code):
"""Sets the code of this ParagraphLinkCollectionResponse.
Response status code. # noqa: E501
:param code: The code of this ParagraphLinkCollectionResponse. # noqa: E501
:type: int
"""
if code is None:
raise ValueError("Invalid value for `code`, must not be `None`") # noqa: E501
self._code = code
@property
def status(self):
"""Gets the status of this ParagraphLinkCollectionResponse. # noqa: E501
Response status. # noqa: E501
:return: The status of this ParagraphLinkCollectionResponse. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this ParagraphLinkCollectionResponse.
Response status. # noqa: E501
:param status: The status of this ParagraphLinkCollectionResponse. # noqa: E501
:type: str
"""
self._status = status
@property
def paragraphs(self):
"""Gets the paragraphs of this ParagraphLinkCollectionResponse. # noqa: E501
Collection of paragraphs # noqa: E501
:return: The paragraphs of this ParagraphLinkCollectionResponse. # noqa: E501
:rtype: ParagraphLinkCollection
"""
return self._paragraphs
@paragraphs.setter
def paragraphs(self, paragraphs):
"""Sets the paragraphs of this ParagraphLinkCollectionResponse.
Collection of paragraphs # noqa: E501
:param paragraphs: The paragraphs of this ParagraphLinkCollectionResponse. # noqa: E501
:type: ParagraphLinkCollection
"""
self._paragraphs = paragraphs
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ParagraphLinkCollectionResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
b2001f4905ca18d64754a9a6aafb71893fbb0f10 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_3/Luca.Paterlini/C.py | cd79d12c9e0577d934dba12922fbf43c13a8215c | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 1,364 | py | import math
def AtkinSieve (limit):
results = [2,3,5]
sieve = [False]*(limit+1)
factor = int(math.sqrt(limit))+1
for i in range(1,factor):
for j in range(1, factor):
n = 4*i**2+j**2
if (n <= limit) and (n % 12 == 1 or n % 12 == 5):
sieve[n] = not sieve[n]
n = 3*i**2+j**2
if (n <= limit) and (n % 12 == 7):
sieve[n] = not sieve[n]
if i>j:
n = 3*i**2-j**2
if (n <= limit) and (n % 12 == 11):
sieve[n] = not sieve[n]
for index in range(5,factor):
if sieve[index]:
for jndex in range(index**2, limit, index**2):
sieve[jndex] = False
for index in range(7,limit):
if sieve[index]:
results.append(index)
return results
def conv_base(s,b,l):
r=0
for i in xrange(l):r=r*b+int(s[i])
return r
def lowest_div(n,ps):
for c in ps:
if n%c==0: return c
return -1
prime_sieve=AtkinSieve(10**6)
input()
N,J=map(int,raw_input().split())
u=0
print "Case #1:"
while J>0:
u+=1
q=bin(u)[2:]
s='1'+'0'*(N-2-len(q))+q+'1'
v=[]
for c in xrange(2,11): v.append(conv_base(s,c,N))
v=[lowest_div(x,prime_sieve) for x in v]
if all(i>0 for i in v):
print s,' '.join([str(x) for x in v]);J-=1
| [
"[[email protected]]"
] | |
b7e6ccbf91282fd4b1135b33210324ead1541bbf | 50008b3b7fb7e14f793e92f5b27bf302112a3cb4 | /recipes/Python/577619_user_and_root_directory_logfile/recipe-577619.py | a1ea4b4ab355197464452fb26ca1eb8516cd6dac | [
"MIT"
] | permissive | betty29/code-1 | db56807e19ac9cfe711b41d475a322c168cfdca6 | d097ca0ad6a6aee2180d32dce6a3322621f655fd | refs/heads/master | 2023-03-14T08:15:47.492844 | 2021-02-24T15:39:59 | 2021-02-24T15:39:59 | 341,878,663 | 0 | 0 | MIT | 2021-02-24T15:40:00 | 2021-02-24T11:31:15 | Python | UTF-8 | Python | false | false | 339 | py | #! usr/bin/python
import dircache
import getpass
import time
logfile = open("spam.txt", "w+")
localtime = time.asctime( time.localtime(time.time()) )
print >> logfile, 'local current time :', localtime
usr = getpass.getuser()
print >> logfile, 'current user :' + usr
lst = dircache.listdir('/')
print >> logfile, lst
logfile.close()
| [
"[email protected]"
] | |
f61dd5f504fce6b9b5c5368af402735f80c34ca2 | 7d85c42e99e8009f63eade5aa54979abbbe4c350 | /game/lib/coginvasion/distributed/PlayGame.py | 82e93f8b2d73561ef231f29acb3acbd8bdb2e18f | [] | no_license | ToontownServerArchive/Cog-Invasion-Online-Alpha | 19c0454da87e47f864c0a5cb8c6835bca6923f0e | 40498d115ed716f1dec12cf40144015c806cc21f | refs/heads/master | 2023-03-25T08:49:40.878384 | 2016-07-05T07:09:36 | 2016-07-05T07:09:36 | 348,172,701 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,364 | py | # Filename: PlayGame.py
# Created by: blach (28Nov14)
from lib.coginvasion.globals import CIGlobals
from lib.coginvasion.distributed.CogInvasionMsgTypes import *
from direct.fsm.ClassicFSM import ClassicFSM
from direct.fsm.State import State
from direct.fsm.StateData import StateData
from direct.directnotify.DirectNotifyGlobal import directNotify
from lib.coginvasion.hood import ZoneUtil
from lib.coginvasion.hood import TTHood
from lib.coginvasion.hood import MGHood
from lib.coginvasion.hood import BRHood
from lib.coginvasion.hood import DLHood
from lib.coginvasion.hood import MLHood
from lib.coginvasion.hood import DGHood
from lib.coginvasion.hood import DDHood
from lib.coginvasion.hood import CTCHood
from lib.coginvasion.hood.QuietZoneState import QuietZoneState
from lib.coginvasion.dna.DNALoader import *
from panda3d.core import *
class PlayGame(StateData):
notify = directNotify.newCategory('PlayGame')
Hood2HoodClass = {CIGlobals.ToontownCentral: TTHood.TTHood,
CIGlobals.MinigameArea: MGHood.MGHood,
CIGlobals.TheBrrrgh: BRHood.BRHood,
CIGlobals.DonaldsDreamland: DLHood.DLHood,
CIGlobals.MinniesMelodyland: MLHood.MLHood,
CIGlobals.DaisyGardens: DGHood.DGHood,
CIGlobals.DonaldsDock: DDHood.DDHood,
CIGlobals.BattleTTC: CTCHood.CTCHood}
Hood2HoodState = {CIGlobals.ToontownCentral: 'TTHood',
CIGlobals.MinigameArea: 'MGHood',
CIGlobals.TheBrrrgh: 'BRHood',
CIGlobals.DonaldsDreamland: 'DLHood',
CIGlobals.MinniesMelodyland: 'MLHood',
CIGlobals.DaisyGardens: 'DGHood',
CIGlobals.DonaldsDock: 'DDHood',
CIGlobals.BattleTTC: 'CTCHood'}
def __init__(self, parentFSM, doneEvent):
StateData.__init__(self, "playGameDone")
self.doneEvent = doneEvent
self.fsm = ClassicFSM('World', [State('off', self.enterOff, self.exitOff, ['quietZone']),
State('quietZone', self.enterQuietZone, self.exitQuietZone, ['TTHood',
'BRHood', 'DLHood', 'MLHood', 'DGHood', 'DDHood', 'MGHood', 'CTCHood']),
State('TTHood', self.enterTTHood, self.exitTTHood, ['quietZone']),
State('BRHood', self.enterBRHood, self.exitBRHood, ['quietZone']),
State('DLHood', self.enterDLHood, self.exitDLHood, ['quietZone']),
State('MLHood', self.enterMLHood, self.exitMLHood, ['quietZone']),
State('DGHood', self.enterDGHood, self.exitDGHood, ['quietZone']),
State('DDHood', self.enterDDHood, self.exitDDHood, ['quietZone']),
State('MGHood', self.enterMGHood, self.exitMGHood, ['quietZone']),
State('CTCHood', self.enterCTCHood, self.exitCTCHood, ['quietZone'])],
'off', 'off')
self.fsm.enterInitialState()
self.parentFSM = parentFSM
self.parentFSM.getStateNamed('playGame').addChild(self.fsm)
self.hoodDoneEvent = 'hoodDone'
self.hood = None
self.quietZoneDoneEvent = uniqueName('quietZoneDone')
self.quietZoneStateData = None
self.place = None
self.lastHood = None
self.suitManager = None
def enter(self, hoodId, zoneId, avId):
StateData.enter(self)
whereName = ZoneUtil.getWhereName(zoneId)
loaderName = ZoneUtil.getLoaderName(zoneId)
self.fsm.request('quietZone', [{'zoneId': zoneId,
'hoodId': hoodId,
'where': whereName,
'how': 'teleportIn',
'avId': avId,
'shardId': None,
'loader': loaderName}])
def exit(self):
StateData.exit(self)
def getCurrentWorldName(self):
return self.fsm.getCurrentState().getName()
def enterOff(self):
pass
def exitOff(self):
pass
def enterCTCHood(self, requestStatus):
self.accept(self.hoodDoneEvent, self.handleHoodDone)
self.hood.enter(requestStatus)
def exitCTCHood(self):
self.ignore(self.hoodDoneEvent)
self.hood.exit()
self.hood.unload()
self.hood = None
self.lastHood = CIGlobals.ToontownCentral
def enterDDHood(self, requestStatus):
self.accept(self.hoodDoneEvent, self.handleHoodDone)
self.hood.enter(requestStatus)
def exitDDHood(self):
self.ignore(self.hoodDoneEvent)
self.hood.exit()
self.hood.unload()
self.hood = None
self.lastHood = CIGlobals.DonaldsDock
def enterDGHood(self, requestStatus):
self.accept(self.hoodDoneEvent, self.handleHoodDone)
self.hood.enter(requestStatus)
def exitDGHood(self):
self.ignore(self.hoodDoneEvent)
self.hood.exit()
self.hood.unload()
self.hood = None
self.lastHood = CIGlobals.DaisyGardens
def enterMLHood(self, requestStatus):
self.accept(self.hoodDoneEvent, self.handleHoodDone)
self.hood.enter(requestStatus)
def exitMLHood(self):
self.ignore(self.hoodDoneEvent)
self.hood.exit()
self.hood.unload()
self.hood = None
self.lastHood = CIGlobals.MinniesMelodyland
def enterDLHood(self, requestStatus):
self.accept(self.hoodDoneEvent, self.handleHoodDone)
self.hood.enter(requestStatus)
def exitDLHood(self):
self.ignore(self.hoodDoneEvent)
self.hood.exit()
self.hood.unload()
self.hood = None
self.lastHood = CIGlobals.DonaldsDreamland
def enterBRHood(self, requestStatus):
self.accept(self.hoodDoneEvent, self.handleHoodDone)
self.hood.enter(requestStatus)
def exitBRHood(self):
self.ignore(self.hoodDoneEvent)
self.hood.exit()
self.hood.unload()
self.hood = None
self.lastHood = CIGlobals.TheBrrrgh
def enterTTHood(self, requestStatus):
self.accept(self.hoodDoneEvent, self.handleHoodDone)
self.hood.enter(requestStatus)
def exitTTHood(self):
self.ignore(self.hoodDoneEvent)
self.hood.exit()
self.hood.unload()
self.hood = None
self.lastHood = CIGlobals.ToontownCentral
def enterMGHood(self, requestStatus):
self.accept(self.hoodDoneEvent, self.handleHoodDone)
self.hood.enter(requestStatus)
def exitMGHood(self):
self.ignore(self.hoodDoneEvent)
self.hood.exit()
self.hood.unload()
self.hood = None
self.lastHood = CIGlobals.MinigameArea
def handleHoodDone(self):
doneStatus = self.hood.getDoneStatus()
if doneStatus['zoneId'] == None:
self.doneStatus = doneStatus
messenger.send(self.doneEvent)
else:
self.fsm.request('quietZone', [doneStatus])
def loadDNAStore(self):
if hasattr(self, 'dnaStore'):
self.dnaStore.reset_nodes()
self.dnaStore.reset_hood_nodes()
self.dnaStore.reset_place_nodes()
self.dnaStore.reset_hood()
self.dnaStore.reset_fonts()
self.dnaStore.reset_DNA_vis_groups()
self.dnaStore.reset_textures()
self.dnaStore.reset_block_numbers()
self.dnaStore.reset_block_zones()
self.dnaStore.reset_suit_points()
del self.dnaStore
self.dnaStore = DNAStorage()
loadDNAFile(self.dnaStore, 'phase_4/dna/storage.pdna')
self.dnaStore.storeFont('humanist', CIGlobals.getToonFont())
self.dnaStore.storeFont('mickey', CIGlobals.getMickeyFont())
self.dnaStore.storeFont('suit', CIGlobals.getSuitFont())
loadDNAFile(self.dnaStore, 'phase_3.5/dna/storage_interior.pdna')
def enterQuietZone(self, requestStatus):
self.acceptOnce(self.quietZoneDoneEvent, self.handleQuietZoneDone, [requestStatus])
self.acceptOnce('enteredQuietZone', self.handleEnteredQuietZone, [requestStatus])
self.quietZoneStateData = QuietZoneState(self.quietZoneDoneEvent, 0)
self.quietZoneStateData.load()
self.quietZoneStateData.enter(requestStatus)
def handleEnteredQuietZone(self, requestStatus):
hoodId = requestStatus['hoodId']
hoodClass = self.Hood2HoodClass[hoodId]
base.transitions.noTransitions()
loader.beginBulkLoad('hood', hoodId, 100)
self.loadDNAStore()
self.hood = hoodClass(self.fsm, self.hoodDoneEvent, self.dnaStore, hoodId)
self.hood.load()
hoodId = requestStatus['hoodId']
hoodState = self.Hood2HoodState[hoodId]
self.fsm.request(hoodState, [requestStatus], exitCurrent = 0)
self.quietZoneStateData.fsm.request('waitForSetZoneResponse')
def handleQuietZoneDone(self, requestStatus):
self.hood.enterTheLoader(requestStatus)
self.hood.loader.enterThePlace(requestStatus)
loader.endBulkLoad('hood')
self.exitQuietZone()
def exitQuietZone(self):
self.ignore('enteredQuietZone')
self.ignore(self.quietZoneDoneEvent)
self.quietZoneStateData.exit()
self.quietZoneStateData.unload()
self.quietZoneStateData = None
def setPlace(self, place):
self.place = place
def getPlace(self):
return self.place
| [
"[email protected]"
] | |
3e237a3b618f6babfcc45fed3d29a91f5c1caf5e | 5cb9dccbcccb8a2137368dd0615fe3e3c7761707 | /simulations/kinova/build/moveit_ros_visualization/catkin_generated/pkg.installspace.context.pc.py | be8bbf82db3fed001402ea244273aafcf024b20f | [] | no_license | Simon-Steinmann/sim2real-modular-RL-project | b2467a393014e106043f6128a026f5eac934a83d | 4027590ac94de2d5c914731c09efcf2f318b9ca3 | refs/heads/master | 2020-07-29T01:30:56.450919 | 2019-10-12T09:33:00 | 2019-10-12T09:33:00 | 209,605,548 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,016 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/acis/sim2real/simulations/kinova/install/include;/usr/include/eigen3".split(';') if "/home/acis/sim2real/simulations/kinova/install/include;/usr/include/eigen3" != "" else []
PROJECT_CATKIN_DEPENDS = "moveit_ros_planning_interface;moveit_ros_robot_interaction;object_recognition_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lmoveit_motion_planning_rviz_plugin_core;-lmoveit_planning_scene_rviz_plugin_core;-lmoveit_robot_state_rviz_plugin_core;-lmoveit_rviz_plugin_render_tools;-lmoveit_trajectory_rviz_plugin_core".split(';') if "-lmoveit_motion_planning_rviz_plugin_core;-lmoveit_planning_scene_rviz_plugin_core;-lmoveit_robot_state_rviz_plugin_core;-lmoveit_rviz_plugin_render_tools;-lmoveit_trajectory_rviz_plugin_core" != "" else []
PROJECT_NAME = "moveit_ros_visualization"
PROJECT_SPACE_DIR = "/home/acis/sim2real/simulations/kinova/install"
PROJECT_VERSION = "1.0.1"
| [
"[email protected]"
] | |
052c2a2cb51a4e27408d96c8675bf650c28a11d6 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /3pzKWEg5oiaMemDdP_20.py | 8a2a185b2383fd368b4c800327192066c7c46a25 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 407 | py |
def most_expensive_item(products):
Things = []
for x in products.keys():
Things.append(x)
Worth = []
for y in products.values():
Worth.append(y)
Highest = max(Worth)
Counter = 0
Length = len(Things)
while (Counter < Length):
Item = Things[Counter]
Money = Worth[Counter]
if (Money == Highest):
return Item
else:
Counter += 1
| [
"[email protected]"
] | |
906cd4d8ad7c433c507a091c53dfd90fe4514f34 | 7f53a1ba1920a5301ca325d4faf480f3799c0a48 | /merger_2012_emb.py | 654e169fbd8f372fa53edddcf0d02d83b14ee90c | [] | no_license | rmanzoni/tools | a7fe8083628954f7f02e80add1d3dd761720e8e6 | e2189860d26be2a4276ec2ca3fe220e90adf9158 | refs/heads/master | 2021-01-01T18:37:33.731578 | 2015-04-15T13:46:12 | 2015-04-15T13:46:12 | 18,681,748 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,254 | py | import os
import ROOT
from ROOT import gROOT, gStyle, TFile, gDirectory
gROOT.SetBatch(True)
#for mass in [110,115,120,125,130,135,140,145] :
for mass in [125] :
print "Higgs mass =", str(mass)
# search in current dir
matches = []
dirList = os.listdir(os.getcwd())
for fname in dirList:
if str(fname).find('mH'+str(mass)) > 0 and str(fname).find('for_smoothing_') < 0 :
if ( str(fname).find("BOOSTED") > 0 or str(fname).find("VBF") > 0 ) :
matches.append(fname)
for t in ["VBF","BOOSTED"] :
Files = []
for m in matches :
if str(m).find(t) > 0 :
if str(m).find("svfitMass.root") > 0 :
noShift = TFile.Open(m,'read')
Files.append(noShift)
elif str(m).find("svfitMass*1.03.root") > 0 :
upShift = TFile.Open(m,'read')
Files.append(upShift)
elif str(m).find("svfitMass*0.97.root") > 0 :
doShift = TFile.Open(m,'read')
Files.append(doShift)
elif str(m).find("svfitMass*1.06.root") > 0 :
upShiftem = TFile.Open(m,'read')
Files.append(upShiftem)
if t == "VBF" :
cat = "SM2"
elif t == "BOOSTED" :
cat = "SM1"
print 'category: ',t, cat
folderName = "LimitInputs"
folderList = os.listdir(os.getcwd())
found = False
for f1 in folderList :
if str(f1) == folderName :
found = True
if found == False :
os.mkdir(folderName)
if str(m).find(t) < 0 : continue
Shifted = TFile.Open(str(folderName+"/tauTau_2012_"+cat+"_mH"+str(mass)+".root"),'recreate')
Shifted.mkdir(str("tauTau_2012_"+cat))
for h in Files :
print 'File name: ',h.GetName()
h.cd(str("tauTau_"+cat))
dirList = gDirectory.GetListOfKeys()
for k1 in dirList :
histo = k1.ReadObj()
Shifted.cd(str("tauTau_2012_"+cat))
histo.Write()
for j in Files :
j.Close()
Shifted.Close()
print '+++++++++++'
print '+ end job +'
print '+++++++++++'
# import fnmatch
# search through dir and subdirs
# matches = []
# for root, dirnames, filenames in os.walk(os.getcwd()):
# for filename in fnmatch.filter(filenames, '*VBF*'):
# matches.append(os.path.join(root, filename))
| [
"[email protected]"
] | |
f80430b48ec9e0b71e51fbfed5dd8c8bcdabbbe4 | 42e8c0992fd845237fa7b1baef494bfb6abc9dba | /ui/data_input_panel.py | 7dd2b4764971de4b2bd9fc109be70c082724291f | [] | no_license | mx1001/animation_nodes | b5ae336512bb43f40e6ca5276a4e05acb5fdc81b | b77b96d991f2b26c03bcbeef4a9fa8a09173ea4f | refs/heads/master | 2020-02-26T17:46:05.676451 | 2016-03-09T15:22:01 | 2016-03-09T15:22:01 | 54,067,761 | 5 | 0 | null | 2016-03-16T21:27:54 | 2016-03-16T21:27:54 | null | UTF-8 | Python | false | false | 554 | py | import bpy
from .. tree_info import getNodesByType
class DataInputPanel(bpy.types.Panel):
bl_idname = "an_data_input_panel"
bl_label = "Data Input"
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
bl_category = "AN"
def draw(self, context):
layout = self.layout
nodes = getNodesByType("an_DataInputNode")
for node in nodes:
if not node.showInViewport: continue
socket = node.inputs[0]
socket.drawSocket(layout, text = node.label, drawType = "TEXT_PROPERTY_OR_NONE")
| [
"[email protected]"
] | |
62c8e000ff730bcbea4570291d047b650df3c345 | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog/optimized_44485.py | ad2e839330e7dfc437fe5ef7dea9d16f2ba0db61 | [] | no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,844 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((448.215, 373.671, 502.62), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_0" not in marker_sets:
s=new_marker_set('Cog2_0')
marker_sets["Cog2_0"]=s
s= marker_sets["Cog2_0"]
mark=s.place_marker((464.79, 386.681, 568.915), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_1" not in marker_sets:
s=new_marker_set('Cog2_1')
marker_sets["Cog2_1"]=s
s= marker_sets["Cog2_1"]
mark=s.place_marker((478.084, 395.023, 648.801), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((402.568, 287.673, 599.847), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((530.933, 451.66, 826.114), (0.89, 0.1, 0.1), 18.4716)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((451.427, 387.613, 546.741), (1, 1, 0), 18.4716)
if "Cog3_0" not in marker_sets:
s=new_marker_set('Cog3_0')
marker_sets["Cog3_0"]=s
s= marker_sets["Cog3_0"]
mark=s.place_marker((450.962, 387.769, 545.766), (1, 1, 0.2), 17.1475)
if "Cog3_1" not in marker_sets:
s=new_marker_set('Cog3_1')
marker_sets["Cog3_1"]=s
s= marker_sets["Cog3_1"]
mark=s.place_marker((446.504, 405.272, 524.516), (1, 1, 0.2), 17.1475)
if "Cog3_2" not in marker_sets:
s=new_marker_set('Cog3_2')
marker_sets["Cog3_2"]=s
s= marker_sets["Cog3_2"]
mark=s.place_marker((421.767, 415.544, 533.154), (1, 1, 0.2), 17.1475)
if "Cog3_3" not in marker_sets:
s=new_marker_set('Cog3_3')
marker_sets["Cog3_3"]=s
s= marker_sets["Cog3_3"]
mark=s.place_marker((398.971, 403.391, 544.622), (1, 1, 0.2), 17.1475)
if "Cog3_4" not in marker_sets:
s=new_marker_set('Cog3_4')
marker_sets["Cog3_4"]=s
s= marker_sets["Cog3_4"]
mark=s.place_marker((383.678, 413.102, 523.117), (1, 1, 0.2), 17.1475)
if "Cog3_5" not in marker_sets:
s=new_marker_set('Cog3_5')
marker_sets["Cog3_5"]=s
s= marker_sets["Cog3_5"]
mark=s.place_marker((368.115, 429.272, 540.32), (1, 1, 0.2), 17.1475)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((463.815, 385.186, 521.177), (1, 1, 0.4), 18.4716)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((277.179, 480.543, 560.233), (1, 1, 0.4), 18.4716)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((389.144, 515.459, 724.846), (0, 0, 0.8), 18.4716)
if "Cog4_0" not in marker_sets:
s=new_marker_set('Cog4_0')
marker_sets["Cog4_0"]=s
s= marker_sets["Cog4_0"]
mark=s.place_marker((389.144, 515.459, 724.846), (0, 0, 0.8), 17.1475)
if "Cog4_1" not in marker_sets:
s=new_marker_set('Cog4_1')
marker_sets["Cog4_1"]=s
s= marker_sets["Cog4_1"]
mark=s.place_marker((400.019, 499.856, 704.16), (0, 0, 0.8), 17.1475)
if "Cog4_2" not in marker_sets:
s=new_marker_set('Cog4_2')
marker_sets["Cog4_2"]=s
s= marker_sets["Cog4_2"]
mark=s.place_marker((407.894, 480.448, 685.382), (0, 0, 0.8), 17.1475)
if "Cog4_3" not in marker_sets:
s=new_marker_set('Cog4_3')
marker_sets["Cog4_3"]=s
s= marker_sets["Cog4_3"]
mark=s.place_marker((419.822, 481.174, 659.925), (0, 0, 0.8), 17.1475)
if "Cog4_4" not in marker_sets:
s=new_marker_set('Cog4_4')
marker_sets["Cog4_4"]=s
s= marker_sets["Cog4_4"]
mark=s.place_marker((430.114, 457.122, 648.593), (0, 0, 0.8), 17.1475)
if "Cog4_5" not in marker_sets:
s=new_marker_set('Cog4_5')
marker_sets["Cog4_5"]=s
s= marker_sets["Cog4_5"]
mark=s.place_marker((443.95, 449.348, 625.252), (0, 0, 0.8), 17.1475)
if "Cog4_6" not in marker_sets:
s=new_marker_set('Cog4_6')
marker_sets["Cog4_6"]=s
s= marker_sets["Cog4_6"]
mark=s.place_marker((455.987, 439.318, 601.527), (0, 0, 0.8), 17.1475)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((233.64, 508.462, 710.824), (0, 0, 0.8), 18.4716)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((666.939, 349.889, 475.745), (0, 0, 0.8), 18.4716)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((485.233, 433.619, 621.765), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_0" not in marker_sets:
s=new_marker_set('Cog5_0')
marker_sets["Cog5_0"]=s
s= marker_sets["Cog5_0"]
mark=s.place_marker((485.233, 433.619, 621.765), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_1" not in marker_sets:
s=new_marker_set('Cog5_1')
marker_sets["Cog5_1"]=s
s= marker_sets["Cog5_1"]
mark=s.place_marker((464.027, 413.439, 623.683), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_2" not in marker_sets:
s=new_marker_set('Cog5_2')
marker_sets["Cog5_2"]=s
s= marker_sets["Cog5_2"]
mark=s.place_marker((458.301, 384.807, 624.985), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_3" not in marker_sets:
s=new_marker_set('Cog5_3')
marker_sets["Cog5_3"]=s
s= marker_sets["Cog5_3"]
mark=s.place_marker((465.303, 363.728, 643.478), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((444.821, 321.096, 527.813), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((488.001, 400.434, 760.614), (0.3, 0.3, 0.3), 18.4716)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((456.986, 360.146, 562.353), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_0" not in marker_sets:
s=new_marker_set('Cog6_0')
marker_sets["Cog6_0"]=s
s= marker_sets["Cog6_0"]
mark=s.place_marker((456.92, 359.898, 562.347), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_1" not in marker_sets:
s=new_marker_set('Cog6_1')
marker_sets["Cog6_1"]=s
s= marker_sets["Cog6_1"]
mark=s.place_marker((428.883, 360.578, 559.028), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_2" not in marker_sets:
s=new_marker_set('Cog6_2')
marker_sets["Cog6_2"]=s
s= marker_sets["Cog6_2"]
mark=s.place_marker((414.279, 382.726, 568.568), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_3" not in marker_sets:
s=new_marker_set('Cog6_3')
marker_sets["Cog6_3"]=s
s= marker_sets["Cog6_3"]
mark=s.place_marker((422.678, 404.638, 583.926), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_4" not in marker_sets:
s=new_marker_set('Cog6_4')
marker_sets["Cog6_4"]=s
s= marker_sets["Cog6_4"]
mark=s.place_marker((426.82, 428.293, 569.297), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_5" not in marker_sets:
s=new_marker_set('Cog6_5')
marker_sets["Cog6_5"]=s
s= marker_sets["Cog6_5"]
mark=s.place_marker((408.78, 442.883, 553.56), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_6" not in marker_sets:
s=new_marker_set('Cog6_6')
marker_sets["Cog6_6"]=s
s= marker_sets["Cog6_6"]
mark=s.place_marker((391.342, 452.132, 533.736), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((475.974, 457.917, 546.513), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((305.133, 443.399, 522.576), (0.21, 0.49, 0.72), 18.4716)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((509.721, 430.9, 562.748), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_0" not in marker_sets:
s=new_marker_set('Cog7_0')
marker_sets["Cog7_0"]=s
s= marker_sets["Cog7_0"]
mark=s.place_marker((501.996, 410.394, 574.058), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_1" not in marker_sets:
s=new_marker_set('Cog7_1')
marker_sets["Cog7_1"]=s
s= marker_sets["Cog7_1"]
mark=s.place_marker((477.358, 366.557, 603.465), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_2" not in marker_sets:
s=new_marker_set('Cog7_2')
marker_sets["Cog7_2"]=s
s= marker_sets["Cog7_2"]
mark=s.place_marker((452.066, 323.91, 634.045), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((472.756, 281.136, 569.75), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((401.913, 298.465, 723.226), (0.7, 0.7, 0.7), 18.4716)
if "Cog8_0" not in marker_sets:
s=new_marker_set('Cog8_0')
marker_sets["Cog8_0"]=s
s= marker_sets["Cog8_0"]
mark=s.place_marker((451.785, 441.396, 544.942), (1, 0.5, 0), 17.1475)
if "Cog8_1" not in marker_sets:
s=new_marker_set('Cog8_1')
marker_sets["Cog8_1"]=s
s= marker_sets["Cog8_1"]
mark=s.place_marker((461.925, 430.852, 569.157), (1, 0.5, 0), 17.1475)
if "Cog8_2" not in marker_sets:
s=new_marker_set('Cog8_2')
marker_sets["Cog8_2"]=s
s= marker_sets["Cog8_2"]
mark=s.place_marker((473.459, 413.87, 589.291), (1, 0.5, 0), 17.1475)
if "Cog8_3" not in marker_sets:
s=new_marker_set('Cog8_3')
marker_sets["Cog8_3"]=s
s= marker_sets["Cog8_3"]
mark=s.place_marker((493.392, 401.809, 606.299), (1, 0.5, 0), 17.1475)
if "Cog8_4" not in marker_sets:
s=new_marker_set('Cog8_4')
marker_sets["Cog8_4"]=s
s= marker_sets["Cog8_4"]
mark=s.place_marker((509.848, 393.316, 628.565), (1, 0.5, 0), 17.1475)
if "Cog8_5" not in marker_sets:
s=new_marker_set('Cog8_5')
marker_sets["Cog8_5"]=s
s= marker_sets["Cog8_5"]
mark=s.place_marker((524.773, 378.984, 649.202), (1, 0.5, 0), 17.1475)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((492.816, 385.418, 576.442), (1, 0.6, 0.1), 18.4716)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((559.742, 368.967, 723.785), (1, 0.6, 0.1), 18.4716)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"[email protected]"
] | |
d37d447bd7ce2b1d813f28d559dadf00e8be9f92 | df25eefe4878c08b0f51f6ff19f48054ba6dbc2a | /test/espnet2/text/test_text_converter.py | 0d1f32b94924e5d24b9c95b25a217e07483a5f3e | [
"Apache-2.0"
] | permissive | sas91/espnet | 7f14a9394469993fb948758c7b0b78f76ad12cbe | 8e263d6512eb84cebeaecf6b99204c102a8252b5 | refs/heads/master | 2021-07-13T18:45:13.981483 | 2020-06-02T08:43:25 | 2020-06-02T08:43:25 | 142,748,209 | 1 | 0 | Apache-2.0 | 2018-07-29T09:37:35 | 2018-07-29T09:37:35 | null | UTF-8 | Python | false | false | 2,565 | py | from pathlib import Path
import string
import pytest
import sentencepiece as spm
from espnet2.text.char_tokenizer import CharTokenizer
from espnet2.text.sentencepiece_tokenizer import SentencepiecesTokenizer
from espnet2.text.word_tokenizer import WordTokenizer
@pytest.fixture(params=[None, " "])
def word_converter(request):
return WordTokenizer(delimiter=request.param)
@pytest.fixture
def char_converter():
return CharTokenizer(["[foo]"])
@pytest.fixture
def spm_srcs(tmp_path: Path):
input_text = tmp_path / "text"
vocabsize = len(string.ascii_letters) + 4
model_prefix = tmp_path / "model"
model = str(model_prefix) + ".model"
input_sentence_size = 100000
with input_text.open("w") as f:
f.write(string.ascii_letters + "\n")
spm.SentencePieceTrainer.Train(
f"--input={input_text} "
f"--vocab_size={vocabsize} "
f"--model_prefix={model_prefix} "
f"--input_sentence_size={input_sentence_size}"
)
sp = spm.SentencePieceProcessor()
sp.load(model)
with input_text.open("r") as f:
vocabs = {"<unk>", "▁"}
for line in f:
tokens = sp.DecodePieces(list(line.strip()))
vocabs |= set(tokens)
return model, vocabs
@pytest.fixture
def spm_converter(tmp_path, spm_srcs):
model, vocabs = spm_srcs
sp = spm.SentencePieceProcessor()
sp.load(model)
token_list = tmp_path / "token.list"
with token_list.open("w") as f:
for v in vocabs:
f.write(f"{v}\n")
return SentencepiecesTokenizer(model=model)
def test_Text2Sentencepieces_repr(spm_converter: SentencepiecesTokenizer):
print(spm_converter)
def test_Text2Sentencepieces_text2tokens(spm_converter: SentencepiecesTokenizer):
assert spm_converter.tokens2text(spm_converter.text2tokens("Hello")) == "Hello"
def test_Text2Words_repr(word_converter: WordTokenizer):
print(word_converter)
def test_Text2Words_text2tokens(word_converter: WordTokenizer):
assert word_converter.text2tokens("Hello World!! Ummm") == [
"Hello",
"World!!",
"Ummm",
]
def test_Text2Words_tokens2text(word_converter: WordTokenizer):
assert word_converter.tokens2text("Hello World!!".split()) == "Hello World!!"
def test_Text2Chars_repr(char_converter: CharTokenizer):
print(char_converter)
def test_Text2Chars_text2tokens(char_converter: CharTokenizer):
assert char_converter.text2tokens("He[foo]llo") == [
"H",
"e",
"[foo]",
"l",
"l",
"o",
]
| [
"[email protected]"
] | |
05f8026f429941abdd6ce606b334f295694c5f27 | 72f026518a27bab1d7d260914fc366cdb8559a6f | /scripts/setup.py | c4e8832eeb5e9b9f9dd28a8dbccbd2d863940b42 | [
"MIT"
] | permissive | wenlien/pyre-check | 30ca42404740517a911fba9b2e786aef38672d77 | 5d97637bacac25f0ca7659163a8617dae1c43f0e | refs/heads/master | 2023-07-06T07:39:34.156671 | 2023-06-25T17:22:01 | 2023-06-25T17:22:01 | 133,370,640 | 1 | 0 | null | 2018-05-14T14:09:04 | 2018-05-14T14:09:04 | null | UTF-8 | Python | false | false | 13,159 | py | #!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
This script provides a the logic used to bootstrap a local opam
switch for building Pyre by collecting all dependencies, as well
as how to configure opam and then invoke dune for various flavors
of builds.
"""
import argparse
import logging
import os
import shutil
import subprocess
import sys
from enum import Enum
from pathlib import Path
from subprocess import CalledProcessError
from tempfile import mkdtemp
from typing import Dict, List, Mapping, NamedTuple, Optional, Type
LOG: logging.Logger = logging.getLogger(__name__)
COMPILER_VERSION = "4.14.0"
DEPENDENCIES = [
"base64.3.5.1",
"core.v0.15.1",
"core_unix.v0.15.2",
"re2.v0.15.0",
"dune.3.7.1",
"yojson.2.0.2",
"jsonm.1.0.2",
"ppx_deriving_yojson.3.7.0",
"ppx_yojson_conv.v0.15.1",
"ounit2.2.2.7",
"menhir.20220210",
"lwt.5.6.1",
"lwt_ppx.2.1.0",
"ounit2-lwt.2.2.7",
"pyre-ast.0.1.9",
"mtime.1.4.0",
"errpy.0.0.8",
]
class OCamlbuildAlreadyInstalled(Exception):
pass
class OldOpam(Exception):
pass
class BuildType(Enum):
EXTERNAL = "external"
FACEBOOK = "facebook"
def _custom_linker_option(pyre_directory: Path, build_type: BuildType) -> str:
# HACK: This is a temporary workaround for inconsistent OS installations
# in FB-internal CI. Can be removed once all fleets are upgraded.
if build_type == BuildType.FACEBOOK and sys.platform == "linux":
return (
(pyre_directory / "facebook" / "scripts" / "custom_linker_options.txt")
.read_text()
.rstrip()
)
else:
return ""
class Setup(NamedTuple):
opam_root: Path
release: bool = False
def switch_name(self) -> str:
return f"{COMPILER_VERSION}+flambda" if self.release else COMPILER_VERSION
def compiler_specification(self) -> str:
"""
Command-line argument to set the compiler version in `opam switch create ...`
The format for how to specify this changed in 4.12.0, see
https://discuss.ocaml.org/t/experimental-new-layout-for-the-ocaml-variants-packages-in-opam-repository/6779
"""
if not self.release:
return COMPILER_VERSION
else:
return ",".join(
[
f"--packages=ocaml-variants.{COMPILER_VERSION}+options",
"ocaml-options-only-flambda",
]
)
@property
def environment_variables(self) -> Mapping[str, str]:
return os.environ
def produce_dune_file(
self, pyre_directory: Path, build_type: Optional[BuildType] = None
) -> None:
if not build_type:
if (pyre_directory / "facebook").is_dir():
build_type = BuildType.FACEBOOK
else:
build_type = BuildType.EXTERNAL
with open(pyre_directory / "source" / "dune.in") as dune_in:
with open(pyre_directory / "source" / "dune", "w") as dune:
dune_data = dune_in.read()
dune.write(
dune_data.replace("%VERSION%", build_type.value).replace(
"%CUSTOM_LINKER_OPTION%",
_custom_linker_option(pyre_directory, build_type),
)
)
def check_if_preinstalled(self) -> None:
if self.environment_variables.get(
"CHECK_IF_PREINSTALLED"
) != "false" and shutil.which("ocamlc"):
ocamlc_location = self.run(["ocamlc", "-where"])
test_ocamlbuild_location = Path(ocamlc_location) / "ocamlbuild"
if test_ocamlbuild_location.is_dir():
LOG.error(
"OCamlbuild will refuse to install since it is already "
+ f"present at {test_ocamlbuild_location}."
)
LOG.error("If you want to bypass this safety check, run:")
LOG.error("CHECK_IF_PREINSTALLED=false ./scripts/setup.sh")
raise OCamlbuildAlreadyInstalled
def already_initialized(self) -> bool:
return Path(self.opam_root.as_posix()).is_dir()
def validate_opam_version(self) -> None:
version = self.run(["opam", "--version"])
if version[:1] != "2":
LOG.error(
"Pyre only supports opam 2.0.0 and above, please update your "
+ "opam version."
)
raise OldOpam
def opam_environment_variables(self) -> Dict[str, str]:
LOG.info("Activating opam")
opam_env_result = self.run(
[
"opam",
"env",
"--yes",
"--switch",
self.switch_name(),
"--root",
self.opam_root.as_posix(),
"--set-root",
"--set-switch",
]
)
opam_environment_variables: Dict[str, str] = {}
# `opam env` produces lines of two forms:
# - comments like ": this comment, starts with a colon;"
# - lines defining and exporting env vars like "ENV_VAR=value; export ENV_VAR;"
for line in opam_env_result.split("\n"):
if not line.startswith(":"):
environment_variable, quoted_value = line.split(";")[0].split("=")
value = quoted_value[1:-1]
LOG.info(f'{environment_variable}="{value}"')
opam_environment_variables[environment_variable] = value
return opam_environment_variables
def initialize_opam_switch(self) -> Mapping[str, str]:
self.check_if_preinstalled()
self.validate_opam_version()
self.run(
[
"opam",
"init",
"--bare",
"--yes",
"--disable-sandboxing",
"--root",
self.opam_root.as_posix(),
"default",
"https://opam.ocaml.org",
]
)
self.run(["opam", "update", "--root", self.opam_root.as_posix()])
self.run(
[
"opam",
"switch",
"create",
self.switch_name(),
self.compiler_specification(),
"--yes",
"--root",
self.opam_root.as_posix(),
]
)
opam_environment_variables = self.opam_environment_variables()
opam_install_command = ["opam", "install", "--yes"]
if sys.platform == "linux":
# setting `--assume-depexts` means that opam will not require a "system"
# installed version of Rust (e.g. via `dnf`` or `yum`) but will instead
# accept a version referenced on the system `$PATH`
opam_install_command.append("--assume-depexts")
self.run(
opam_install_command + DEPENDENCIES,
add_environment_variables=opam_environment_variables,
)
return opam_environment_variables
def set_opam_switch_and_install_dependencies(self, rust_path: Optional[Path]) -> Mapping[str, str]:
self.run(
[
"opam",
"switch",
"set",
self.switch_name(),
"--root",
self.opam_root.as_posix(),
]
)
environment_variables = self.opam_environment_variables()
if rust_path is not None:
environment_variables["PATH"] = str(rust_path) + ":" + environment_variables["PATH"]
opam_install_command = ["opam", "install", "--yes"]
if sys.platform == "linux":
# osx fails on sandcastle with exit status 2 (illegal argument) with this.
# unable to repro locally on osx.
opam_install_command.append("--assume-depexts")
opam_install_command += DEPENDENCIES
self.run(
opam_install_command,
add_environment_variables=environment_variables
)
return environment_variables
def full_setup(
self,
pyre_directory: Path,
*,
run_tests: bool = False,
run_clean: bool = False,
build_type_override: Optional[BuildType] = None,
rust_path: Optional[Path] = None
) -> None:
opam_environment_variables: Mapping[
str, str
] = self.set_opam_switch_and_install_dependencies(rust_path=rust_path)
def run_in_opam_environment(command: List[str]) -> None:
self.run(
command,
current_working_directory=pyre_directory / "source",
add_environment_variables=opam_environment_variables,
)
self.produce_dune_file(pyre_directory, build_type_override)
if run_clean:
# Note: we do not run `make clean` because we want the result of the
# explicit `produce_dune_file` to remain.
# Dune 3.7 runs into `rmdir` failure when cleaning the `_build` directory
# for some reason. Manually clean the dir to work around the issue.
run_in_opam_environment(["rm", "-rf", "_build"])
if self.release:
LOG.info("Running a release build. This may take a while.")
run_in_opam_environment(["make", "release"])
if run_tests:
run_in_opam_environment(["make", "release_test"])
else:
run_in_opam_environment(["make", "dev"])
if run_tests:
run_in_opam_environment(["make", "test"])
def run(
self,
command: List[str],
current_working_directory: Optional[Path] = None,
add_environment_variables: Optional[Mapping[str, str]] = None,
) -> str:
if add_environment_variables:
environment_variables = {
**self.environment_variables,
**add_environment_variables,
}
else:
environment_variables = self.environment_variables
LOG.info(command)
try:
output = subprocess.check_output(
command,
universal_newlines=True,
cwd=current_working_directory,
env=environment_variables,
)
except CalledProcessError as called_process_error:
LOG.info(f'Command: {command} returned non zero exit code.\n\
stdout: {called_process_error.stdout}\n\
stderr: {called_process_error.stderr}')
raise called_process_error
if output.endswith("\n"):
return output[:-1]
else:
return output
def _make_opam_root(local: bool, temporary_root: bool, default: Optional[Path]) -> Path:
home = Path.home()
home_opam = home / ".opam"
if local:
if not home_opam.is_dir():
local_opam = home / "local" / "opam"
local_opam.parent.mkdir(parents=True, exist_ok=True)
local_opam.symlink_to(home_opam, target_is_directory=True)
return home_opam
if temporary_root:
return Path(mkdtemp())
return default or home_opam
def setup(runner_type: Type[Setup]) -> None:
logging.basicConfig(
level=logging.INFO, format="[%(asctime)s] [%(levelname)s] %(message)s"
)
parser = argparse.ArgumentParser(description="Set up Pyre.")
parser.add_argument("--pyre-directory", type=Path)
parser.add_argument("--local", action="store_true")
parser.add_argument("--temporary_root", action="store_true")
parser.add_argument("--opam-root", type=Path)
parser.add_argument("--configure", action="store_true")
parser.add_argument("--environment-only", action="store_true")
parser.add_argument("--release", action="store_true")
parser.add_argument("--build-type", type=BuildType)
parser.add_argument("--no-tests", action="store_true")
parser.add_argument("--rust-path", type=Path)
parsed = parser.parse_args()
pyre_directory = parsed.pyre_directory
if not pyre_directory:
pyre_directory = Path(__file__).parent.parent.absolute()
opam_root = _make_opam_root(parsed.local, parsed.temporary_root, parsed.opam_root)
runner = runner_type(opam_root=opam_root, release=parsed.release)
if parsed.configure:
runner.produce_dune_file(pyre_directory, parsed.build_type)
elif parsed.environment_only:
runner.produce_dune_file(pyre_directory, parsed.build_type)
runner.initialize_opam_switch()
LOG.info("Environment built successfully, stopping here as requested.")
else:
if not runner.already_initialized():
runner.initialize_opam_switch()
runner.full_setup(
pyre_directory,
run_tests=not parsed.no_tests,
build_type_override=parsed.build_type,
rust_path=parsed.rust_path
)
if __name__ == "__main__":
setup(Setup)
| [
"[email protected]"
] | |
561fbf76952e72959088ff99ae838295f3938bc7 | 479d3414e914f144fff20ee71872472ac84ca410 | /codespace/python/telegram/_files/inputfile.py | 730301869bd5e67593a4565ada2e146058b8f953 | [
"MIT",
"LicenseRef-scancode-proprietary-license"
] | permissive | tzpBingo/github-trending | 0fa4e0e08743f0683f68fd54d74eec466bc525e0 | 505014e84bdea7e2732296821028df20c0305390 | refs/heads/master | 2023-07-24T13:29:47.393940 | 2023-07-19T09:39:29 | 2023-07-19T09:39:29 | 102,687,887 | 49 | 20 | MIT | 2023-05-22T21:33:53 | 2017-09-07T03:39:42 | Python | UTF-8 | Python | false | false | 4,191 | py | #!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2023
# Leandro Toledo de Souza <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains an object that represents a Telegram InputFile."""
import mimetypes
from typing import IO, Optional, Union
from uuid import uuid4
from telegram._utils.files import load_file
from telegram._utils.types import FieldTuple
_DEFAULT_MIME_TYPE = "application/octet-stream"
class InputFile:
"""This object represents a Telegram InputFile.
.. versionchanged:: 20.0
* The former attribute ``attach`` was renamed to :attr:`attach_name`.
* Method ``is_image`` was removed. If you pass :obj:`bytes` to :paramref:`obj` and would
like to have the mime type automatically guessed, please pass :paramref:`filename`
in addition.
Args:
obj (:term:`file object` | :obj:`bytes` | :obj:`str`): An open file descriptor or the files
content as bytes or string.
Note:
If :paramref:`obj` is a string, it will be encoded as bytes via
:external:obj:`obj.encode('utf-8') <str.encode>`.
.. versionchanged:: 20.0
Accept string input.
filename (:obj:`str`, optional): Filename for this InputFile.
attach (:obj:`bool`, optional): Pass :obj:`True` if the parameter this file belongs to in
the request to Telegram should point to the multipart data via an ``attach://`` URI.
Defaults to `False`.
Attributes:
input_file_content (:obj:`bytes`): The binary content of the file to send.
attach_name (:obj:`str`): Optional. If present, the parameter this file belongs to in
the request to Telegram should point to the multipart data via a an URI of the form
``attach://<attach_name>`` URI.
filename (:obj:`str`): Filename for the file to be sent.
mimetype (:obj:`str`): The mimetype inferred from the file to be sent.
"""
__slots__ = ("filename", "attach_name", "input_file_content", "mimetype")
def __init__(
self,
obj: Union[IO[bytes], bytes, str],
filename: Optional[str] = None,
attach: bool = False,
):
if isinstance(obj, bytes):
self.input_file_content: bytes = obj
elif isinstance(obj, str):
self.input_file_content = obj.encode("utf-8")
else:
reported_filename, self.input_file_content = load_file(obj)
filename = filename or reported_filename
self.attach_name: Optional[str] = "attached" + uuid4().hex if attach else None
if filename:
self.mimetype: str = (
mimetypes.guess_type(filename, strict=False)[0] or _DEFAULT_MIME_TYPE
)
else:
self.mimetype = _DEFAULT_MIME_TYPE
self.filename: str = filename or self.mimetype.replace("/", ".")
@property
def field_tuple(self) -> FieldTuple:
"""Field tuple representing the contents of the file for upload to the Telegram servers.
Returns:
Tuple[:obj:`str`, :obj:`bytes`, :obj:`str`]:
"""
return self.filename, self.input_file_content, self.mimetype
@property
def attach_uri(self) -> Optional[str]:
"""URI to insert into the JSON data for uploading the file. Returns :obj:`None`, if
:attr:`attach_name` is :obj:`None`.
"""
return f"attach://{self.attach_name}" if self.attach_name else None
| [
"[email protected]"
] | |
6afdae640dd9ad3d9adbf1cbc0c7d8cf8b7d3466 | 491c1e520a64e3ebd5349130f35047aaed1e70ec | /two pointer/680 validPalindrome.py | 3ccf25be7a1c357ec82bfd31b9cc88e976d594fb | [] | no_license | pangyouzhen/data-structure | 33a7bd7790c8db3e018114d85a137f5f3d6b92f8 | cd46cf08a580c418cc40a68bf9b32371fc69a803 | refs/heads/master | 2023-05-26T12:02:30.800301 | 2023-05-21T08:07:57 | 2023-05-21T08:07:57 | 189,315,047 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 740 | py | class Solution:
def validPalindrome(self, s: str) -> bool:
def checkPalindrome(low, high):
i, j = low, high
while i < j:
if s[i] != s[j]:
return False
i = i + 1
j = j - 1
return True
low, high = 0, len(s) - 1
while low < high:
if s[low] == s[high]:
low = low + 1
high = high - 1
else:
return checkPalindrome(low + 1, high) or checkPalindrome(low, high - 1)
return True
sol = Solution()
print(sol.validPalindrome("abca"))
assert sol.validPalindrome("abca") == True
print(sol.validPalindrome("abcca"))
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.