blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f5baeac0738dfa8508464ce5bcfa5f41ca97435b
|
4d343b7e92a44b8c8eb3e3afeeeb0e1a7a699869
|
/ch4-practice/books/models.py
|
feed0423f49f8baba32c13a55d88e9fa25a7ef57
|
[] |
no_license
|
dev-iwin/book4-Django-redbook
|
dfca5395ae68df7536a9e7b64b73b582608b6eaa
|
64e0b260b50bae8bd35b918eb341725c3c3373b4
|
refs/heads/master
| 2023-03-07T05:12:26.990807 | 2021-02-20T21:15:12 | 2021-02-20T21:15:12 | 335,654,957 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 360 |
py
|
from django.db import models
# 예제 4-26을 위한 모델 (혼자 만들어 봄) ==========================
class Book(models.Model):
book_name = models.CharField(max_length=300)
pub_date = models.DateTimeField('publication_date')
def __str__(self):
return self.book_name
# =============================================================
|
[
"[email protected]"
] | |
1cdbe0eee6a24955bbe72e9528b58437571dd39b
|
af0b56556b747233d9085eb51991806017e2a5eb
|
/cardpay/model/payment_response_customer.py
|
ba914e59a971e9191cee9c6f161144ad9508c0f5
|
[
"MIT"
] |
permissive
|
whereisthebabki/python-sdk-v3
|
ab39809f911e80873550c44156882c8680cb6e96
|
b756cd0761fc23cb095db4801baee53c00de9241
|
refs/heads/master
| 2020-06-22T01:02:44.377584 | 2019-07-18T13:30:26 | 2019-07-18T13:30:26 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 10,414 |
py
|
# coding: utf-8
"""
CardPay REST API
Welcome to the CardPay REST API. The CardPay API uses HTTP verbs and a REST resources endpoint structure (see more info about REST). Request and response payloads are formatted as JSON. Merchant uses API to create payments, refunds, payouts or recurrings, check or update transaction status and get information about created transactions. In API authentication process based on OAuth 2.0 standard. For recent changes see changelog section. # noqa: E501
OpenAPI spec version: 3.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class PaymentResponseCustomer(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'email': 'str',
'full_name': 'str',
'id': 'str',
'ip': 'str',
'locale': 'str',
'phone': 'str'
}
attribute_map = {
'email': 'email',
'full_name': 'full_name',
'id': 'id',
'ip': 'ip',
'locale': 'locale',
'phone': 'phone'
}
def __init__(self, email=None, full_name=None, id=None, ip=None, locale=None, phone=None): # noqa: E501
"""PaymentResponseCustomer - a model defined in Swagger""" # noqa: E501
self._email = None
self._full_name = None
self._id = None
self._ip = None
self._locale = None
self._phone = None
self.discriminator = None
if email is not None:
self.email = email
if full_name is not None:
self.full_name = full_name
if id is not None:
self.id = id
if ip is not None:
self.ip = ip
if locale is not None:
self.locale = locale
if phone is not None:
self.phone = phone
@property
def email(self):
"""Gets the email of this PaymentResponseCustomer. # noqa: E501
Email address of the customer (mandatory by default for 'Asia’, 'Latin America’, 'NETELLER', 'DIRECTBANKINGNGA', 'AQRCODE', 'AIRTEL', 'MPESA', 'MTN', 'UGANDAMOBILE', 'VODAFONE', 'TIGO' payment methods only)). Can be defined as optional by CardPay manager. # noqa: E501
:return: The email of this PaymentResponseCustomer. # noqa: E501
:rtype: str
"""
return self._email
@email.setter
def email(self, email):
"""Sets the email of this PaymentResponseCustomer.
Email address of the customer (mandatory by default for 'Asia’, 'Latin America’, 'NETELLER', 'DIRECTBANKINGNGA', 'AQRCODE', 'AIRTEL', 'MPESA', 'MTN', 'UGANDAMOBILE', 'VODAFONE', 'TIGO' payment methods only)). Can be defined as optional by CardPay manager. # noqa: E501
:param email: The email of this PaymentResponseCustomer. # noqa: E501
:type: str
"""
if email is not None and len(email) > 256:
raise ValueError("Invalid value for `email`, length must be less than or equal to `256`") # noqa: E501
if email is not None and len(email) < 1:
raise ValueError("Invalid value for `email`, length must be greater than or equal to `1`") # noqa: E501
self._email = email
@property
def full_name(self):
"""Gets the full_name of this PaymentResponseCustomer. # noqa: E501
Customer's full name (mandatory for 'Asia’ payment method only) # noqa: E501
:return: The full_name of this PaymentResponseCustomer. # noqa: E501
:rtype: str
"""
return self._full_name
@full_name.setter
def full_name(self, full_name):
"""Sets the full_name of this PaymentResponseCustomer.
Customer's full name (mandatory for 'Asia’ payment method only) # noqa: E501
:param full_name: The full_name of this PaymentResponseCustomer. # noqa: E501
:type: str
"""
if full_name is not None and len(full_name) > 255:
raise ValueError("Invalid value for `full_name`, length must be less than or equal to `255`") # noqa: E501
if full_name is not None and len(full_name) < 1:
raise ValueError("Invalid value for `full_name`, length must be greater than or equal to `1`") # noqa: E501
self._full_name = full_name
@property
def id(self):
"""Gets the id of this PaymentResponseCustomer. # noqa: E501
Customer's ID in the merchant's system # noqa: E501
:return: The id of this PaymentResponseCustomer. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this PaymentResponseCustomer.
Customer's ID in the merchant's system # noqa: E501
:param id: The id of this PaymentResponseCustomer. # noqa: E501
:type: str
"""
if id is not None and len(id) > 256:
raise ValueError("Invalid value for `id`, length must be less than or equal to `256`") # noqa: E501
if id is not None and len(id) < 0:
raise ValueError("Invalid value for `id`, length must be greater than or equal to `0`") # noqa: E501
self._id = id
@property
def ip(self):
"""Gets the ip of this PaymentResponseCustomer. # noqa: E501
IP address of customer, present if wallet (terminal) settings has this option enabled. By default the option is not enabled # noqa: E501
:return: The ip of this PaymentResponseCustomer. # noqa: E501
:rtype: str
"""
return self._ip
@ip.setter
def ip(self, ip):
"""Sets the ip of this PaymentResponseCustomer.
IP address of customer, present if wallet (terminal) settings has this option enabled. By default the option is not enabled # noqa: E501
:param ip: The ip of this PaymentResponseCustomer. # noqa: E501
:type: str
"""
if ip is not None and len(ip) > 15:
raise ValueError("Invalid value for `ip`, length must be less than or equal to `15`") # noqa: E501
if ip is not None and len(ip) < 1:
raise ValueError("Invalid value for `ip`, length must be greater than or equal to `1`") # noqa: E501
self._ip = ip
@property
def locale(self):
"""Gets the locale of this PaymentResponseCustomer. # noqa: E501
Preferred locale for the payment page ([ISO 639-1](https://en.wikipedia.org/wiki/ISO_639-1) language code). The default locale will be applied if the selected locale is not supported. Supported locales are: `ru`, `en`, `zh`, `ja` # noqa: E501
:return: The locale of this PaymentResponseCustomer. # noqa: E501
:rtype: str
"""
return self._locale
@locale.setter
def locale(self, locale):
"""Sets the locale of this PaymentResponseCustomer.
Preferred locale for the payment page ([ISO 639-1](https://en.wikipedia.org/wiki/ISO_639-1) language code). The default locale will be applied if the selected locale is not supported. Supported locales are: `ru`, `en`, `zh`, `ja` # noqa: E501
:param locale: The locale of this PaymentResponseCustomer. # noqa: E501
:type: str
"""
self._locale = locale
@property
def phone(self):
"""Gets the phone of this PaymentResponseCustomer. # noqa: E501
Customer's phone number. Mandatory for 'Asia’ and DIRECTBANKINGNGA payment methods. For other payment methods: optional by default, can be defined as mandatory by CardPay manager. # noqa: E501
:return: The phone of this PaymentResponseCustomer. # noqa: E501
:rtype: str
"""
return self._phone
@phone.setter
def phone(self, phone):
"""Sets the phone of this PaymentResponseCustomer.
Customer's phone number. Mandatory for 'Asia’ and DIRECTBANKINGNGA payment methods. For other payment methods: optional by default, can be defined as mandatory by CardPay manager. # noqa: E501
:param phone: The phone of this PaymentResponseCustomer. # noqa: E501
:type: str
"""
if phone is not None and len(phone) > 13:
raise ValueError("Invalid value for `phone`, length must be less than or equal to `13`") # noqa: E501
if phone is not None and len(phone) < 10:
raise ValueError("Invalid value for `phone`, length must be greater than or equal to `10`") # noqa: E501
self._phone = phone
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if value is not None:
result[attr] = value
if issubclass(PaymentResponseCustomer, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PaymentResponseCustomer):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"[email protected]"
] | |
be1ff21d8d3789702fd02aa6333e49ef6cfe5047
|
cddfa750235344aa5e04244ce5c36871d3c6465b
|
/mayan/apps/document_states/links.py
|
f2d41f7e5898b86968e418d4195fb20c1560a36b
|
[
"Apache-2.0"
] |
permissive
|
Lomascolo/mayan-edms
|
76e0fdcad98605838df6737d109c95d67d9ebba5
|
f7f0d27a059b1e010b9bbcdf371b9867f6fcfa45
|
refs/heads/master
| 2021-01-24T08:30:07.480929 | 2017-05-30T06:01:31 | 2017-05-30T06:02:07 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,176 |
py
|
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from navigation import Link
from .permissions import (
permission_workflow_create, permission_workflow_delete,
permission_workflow_edit, permission_workflow_tools,
permission_workflow_view,
)
link_document_workflow_instance_list = Link(
icon='fa fa-sitemap', permissions=(permission_workflow_view,),
text=_('Workflows'),
view='document_states:document_workflow_instance_list',
args='resolved_object.pk'
)
link_setup_workflow_create = Link(
permissions=(permission_workflow_create,), text=_('Create workflow'),
view='document_states:setup_workflow_create'
)
link_setup_workflow_delete = Link(
permissions=(permission_workflow_delete,), tags='dangerous',
text=_('Delete'), view='document_states:setup_workflow_delete',
args='object.pk'
)
link_setup_workflow_document_types = Link(
permissions=(permission_workflow_edit,), text=_('Document types'),
view='document_states:setup_workflow_document_types', args='object.pk'
)
link_setup_workflow_edit = Link(
permissions=(permission_workflow_edit,), text=_('Edit'),
view='document_states:setup_workflow_edit', args='object.pk'
)
link_setup_workflow_list = Link(
permissions=(permission_workflow_view,), icon='fa fa-sitemap',
text=_('Workflows'), view='document_states:setup_workflow_list'
)
link_setup_workflow_state_create = Link(
permissions=(permission_workflow_edit,), text=_('Create state'),
view='document_states:setup_workflow_state_create', args='object.pk'
)
link_setup_workflow_state_delete = Link(
permissions=(permission_workflow_edit,), tags='dangerous',
text=_('Delete'), view='document_states:setup_workflow_state_delete',
args='object.pk'
)
link_setup_workflow_state_edit = Link(
permissions=(permission_workflow_edit,), text=_('Edit'),
view='document_states:setup_workflow_state_edit', args='object.pk'
)
link_setup_workflow_states = Link(
permissions=(permission_workflow_view,), text=_('States'),
view='document_states:setup_workflow_states', args='object.pk'
)
link_setup_workflow_transition_create = Link(
permissions=(permission_workflow_edit,), text=_('Create transition'),
view='document_states:setup_workflow_transition_create', args='object.pk'
)
link_setup_workflow_transition_delete = Link(
permissions=(permission_workflow_edit,), tags='dangerous',
text=_('Delete'), view='document_states:setup_workflow_transition_delete',
args='object.pk'
)
link_setup_workflow_transition_edit = Link(
permissions=(permission_workflow_edit,), text=_('Edit'),
view='document_states:setup_workflow_transition_edit', args='object.pk'
)
link_setup_workflow_transitions = Link(
permissions=(permission_workflow_view,), text=_('Transitions'),
view='document_states:setup_workflow_transitions', args='object.pk'
)
link_tool_launch_all_workflows = Link(
icon='fa fa-sitemap',
permissions=(permission_workflow_tools,),
text=_('Launch all workflows'),
view='document_states:tool_launch_all_workflows'
)
link_workflow_instance_detail = Link(
permissions=(permission_workflow_view,), text=_('Detail'),
view='document_states:workflow_instance_detail', args='resolved_object.pk'
)
link_workflow_instance_transition = Link(
text=_('Transition'),
view='document_states:workflow_instance_transition',
args='resolved_object.pk'
)
link_workflow_document_list = Link(
permissions=(permission_workflow_view,), text=_('Workflow documents'),
view='document_states:workflow_document_list', args='resolved_object.pk'
)
link_workflow_list = Link(
permissions=(permission_workflow_view,), icon='fa fa-sitemap',
text=_('Workflows'), view='document_states:workflow_list'
)
link_workflow_state_document_list = Link(
permissions=(permission_workflow_view,),
text=_('State documents'), view='document_states:workflow_state_document_list',
args='resolved_object.pk'
)
link_workflow_state_list = Link(
permissions=(permission_workflow_view,),
text=_('States'), view='document_states:workflow_state_list',
args='resolved_object.pk'
)
|
[
"[email protected]"
] | |
a6066f6913650d57e3ddb4301debeef629944d3d
|
e97fb7903336837edb6bb3db16ea48512128076e
|
/22. Database/2. Parameterized/24. RetrieveMultipleRowWhereClauseUserInputDict.py
|
c2ab4af145f726d2a9c6710e5f457dcc12b68740
|
[] |
no_license
|
satyambhatt5/Advance_Python_code
|
4081bdb1e80f05161c07f416ebade3907e83f0fd
|
58746540285f253699b73aeebd3975911a310deb
|
refs/heads/master
| 2023-05-22T20:10:54.271672 | 2021-06-16T08:55:16 | 2021-06-16T08:55:16 | 377,386,448 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 684 |
py
|
# Retrieve Multiple Rows WHERE clause - User Input - Dict
import mysql.connector
try:
conn= mysql.connector.connect(
user='root',
password='geek',
host='localhost',
database='pdb',
port=3306
)
if (conn.is_connected()):
print('Connected')
except:
print('Unable to Connect')
sql = 'SELECT * FROM student WHERE roll=%(roll)s'
myc = conn.cursor()
n = int(input('Enter Roll to Display: '))
disp_value = {'roll':n}
try:
myc.execute(sql, disp_value)
row = myc.fetchone()
while row is not None:
print(row)
row = myc.fetchone()
print('Total Rows:',myc.rowcount)
except:
print('Unable to Retrieve Data')
myc.close() # Close Cursor
conn.close() # Close Connection
|
[
"[email protected]"
] | |
8ea574857fbefb741c89a4bd87a9dd7d8dca56e0
|
0ff6198179fda1763acba69ff414c32e0f537233
|
/papers/admin.py
|
a402aa0c0c52c85351602ea2b3aef87fbfe288fe
|
[
"CC0-1.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
jc11am/Lab-Website
|
2a4d03f98f0502a2f77dacbbcd6db6714823cdab
|
111e837c957e5c73022de366985aaa6e3b3d014c
|
refs/heads/master
| 2021-05-16T22:54:04.790528 | 2016-09-10T19:28:37 | 2016-09-10T19:28:37 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 679 |
py
|
'''This package sets up the admin interface for the :mod:`papers` app.'''
from django.contrib import admin
from papers.models import Publication, AuthorDetails, AuthorContributions
class PublicationAdmin(admin.ModelAdmin):
'''The :class:`~papers.models.Publication` model admin is the default.'''
pass
admin.site.register(Publication, PublicationAdmin)
class AuthorDetailsAdmin(admin.ModelAdmin):
'''The :class:`~papers.models.AuthorDetails` model admin is the default.'''
pass
admin.site.register(AuthorDetails, AuthorDetailsAdmin)
class AuthorContributionsAdmin(admin.ModelAdmin):
pass
admin.site.register(AuthorContributions, AuthorContributionsAdmin)
|
[
"[email protected]"
] | |
75553f4f93558a9c446c561ab0cac78bb68102c8
|
b232ab24686a197a88973f26478157d05c71a930
|
/03. 파이썬 문자열/049.py
|
a0ff28c7508d71fb92da6f742be5fdf32d09267d
|
[] |
no_license
|
areum0505/python300
|
09a3ea858bb728b6e2f699a57f013457680ab1d8
|
e3b56bd653390172410b86e15c40f34ef3125787
|
refs/heads/master
| 2023-01-06T04:53:44.937116 | 2020-10-22T23:44:36 | 2020-10-22T23:44:36 | 288,321,068 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 139 |
py
|
# 다음과 같이 날짜를 표현하는 문자열이 있을 때 연도, 월, 일로 나눠보세요.
date = "2020-05-01"
date.split("-")
|
[
"[email protected]"
] | |
e98c9e6e4e8e98f0eb86148a6604600fbb0f969e
|
f445450ac693b466ca20b42f1ac82071d32dd991
|
/generated_tempdir_2019_09_15_163300/generated_part002645.py
|
26c00d84fbe342c060edabef02fe3c69582a4427
|
[] |
no_license
|
Upabjojr/rubi_generated
|
76e43cbafe70b4e1516fb761cabd9e5257691374
|
cd35e9e51722b04fb159ada3d5811d62a423e429
|
refs/heads/master
| 2020-07-25T17:26:19.227918 | 2019-09-15T15:41:48 | 2019-09-15T15:41:48 | 208,357,412 | 4 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,821 |
py
|
from sympy.abc import *
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy import *
from matchpy.utils import VariableWithCount
from collections import deque
from multiset import Multiset
from sympy.integrals.rubi.constraints import *
from sympy.integrals.rubi.utility_function import *
from sympy.integrals.rubi.rules.miscellaneous_integration import *
from sympy import *
class CommutativeMatcher122210(CommutativeMatcher):
_instance = None
patterns = {
0: (0, Multiset({}), [
(VariableWithCount('i2.2.1.2.1.0', 1, 1, None), Mul),
(VariableWithCount('i2.2.1.2.1.0_1', 1, 1, S(1)), Mul)
]),
1: (1, Multiset({0: 1}), [
(VariableWithCount('i2.2.1.2.1.0', 1, 1, S(1)), Mul)
])
}
subjects = {}
subjects_by_id = {}
bipartite = BipartiteGraph()
associative = Mul
max_optional_count = 1
anonymous_patterns = set()
def __init__(self):
self.add_subject(None)
@staticmethod
def get():
if CommutativeMatcher122210._instance is None:
CommutativeMatcher122210._instance = CommutativeMatcher122210()
return CommutativeMatcher122210._instance
@staticmethod
def get_match_iter(subject):
subjects = deque([subject]) if subject is not None else deque()
subst0 = Substitution()
# State 122209
if len(subjects) >= 1 and isinstance(subjects[0], Pow):
tmp1 = subjects.popleft()
subjects2 = deque(tmp1._args)
# State 123779
if len(subjects2) >= 1:
tmp3 = subjects2.popleft()
subst1 = Substitution(subst0)
try:
subst1.try_add_variable('i2.2.1.2.1.1', tmp3)
except ValueError:
pass
else:
pass
# State 123780
if len(subjects2) >= 1:
tmp5 = subjects2.popleft()
subst2 = Substitution(subst1)
try:
subst2.try_add_variable('i2.2.1.2.1.2', tmp5)
except ValueError:
pass
else:
pass
# State 123781
if len(subjects2) == 0:
pass
# State 123782
if len(subjects) == 0:
pass
# 0: x**n
yield 0, subst2
subjects2.appendleft(tmp5)
subjects2.appendleft(tmp3)
subjects.appendleft(tmp1)
return
yield
from collections import deque
|
[
"[email protected]"
] | |
4db5502b3cb8b1723df8a7ac89467e02e213fda7
|
d83f50302702d6bf46c266b8117514c6d2e5d863
|
/counting-bits.py
|
f875bfed4d8a2d111f435b9c52cfced316a0c179
|
[] |
no_license
|
sfdye/leetcode
|
19764a6bdb82de114a2c82986864b1b2210c6d90
|
afc686acdda4168f4384e13fb730e17f4bdcd553
|
refs/heads/master
| 2020-03-20T07:58:52.128062 | 2019-05-05T08:10:41 | 2019-05-05T08:10:41 | 137,295,892 | 3 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 250 |
py
|
class Solution:
def countBits(self, num):
"""
:type num: int
:rtype: List[int]
"""
ones = [0] * (num + 1)
for i in range(1, num + 1):
ones[i] = ones[i & (i - 1)] + 1
return ones
|
[
"[email protected]"
] | |
4f0d5c22413bdaacf869bf9cbd12d47bcc73f375
|
1dc753d68b234b10193962f58d306bd91957eb6d
|
/college/college/doctype/student_achievements/student_achievements.py
|
66884338ed30206d53469c0ed0ba413e759ab9c7
|
[
"MIT"
] |
permissive
|
harshith187/college
|
e8612134e47c48ad721840f684362f7348e9bad4
|
d9ae21734dcde70397aead827e57fbbdcdeb98c9
|
refs/heads/master
| 2020-07-20T12:36:27.601134 | 2020-05-25T13:53:57 | 2020-05-25T13:53:57 | 206,641,495 | 0 | 4 |
NOASSERTION
| 2020-05-25T15:05:16 | 2019-09-05T19:27:37 |
Python
|
UTF-8
|
Python
| false | false | 266 |
py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, mvit ise and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
# import frappe
from frappe.model.document import Document
class StudentAchievements(Document):
pass
|
[
"[email protected]"
] | |
7948a9e20dfc18adb728f35ea7d8d4a1387faf1a
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2408/60670/279400.py
|
462432fde739ac9f0e437d3408deb95a44e663a4
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 360 |
py
|
def isPrime(x):
for i in range(2,int(x**0.5)+1):
if x%i!=0:
return False
return True
def factorial(n):
t=1
for i in range(1,n):
t*=i
return t%1000000007
n=int(input())
numOfPrime=0
for i in range(1,n+1):
if isPrime(i):
numOfPrime+=1
print((factorial(numOfPrime)*factorial(n-numOfPrime))%1000000007)
|
[
"[email protected]"
] | |
955a3394f44e953f1a4c30c5c454af78e16f84da
|
a2477654a0fb85f9507389ff7a4b4a8bcc1641fa
|
/trydjango1-11/src/restaurants/migrations/0003_auto_20170926_1624.py
|
5708b2f804f86a92b2d7213e1dbc4f79de3a24b5
|
[] |
no_license
|
ervinpepic/Django-11-Restaurant-app
|
6ae1e2dec7571b0180ea991ca80b9b83d00cdb1b
|
a6bd976130c70621e6149ee64c61e1cdcec2acba
|
refs/heads/master
| 2022-10-18T08:34:11.496044 | 2017-11-25T19:57:36 | 2017-11-25T19:57:36 | 111,400,182 | 0 | 1 | null | 2022-10-10T08:12:45 | 2017-11-20T11:13:00 |
Python
|
UTF-8
|
Python
| false | false | 416 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-09-26 16:24
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('restaurants', '0002_restaurant_location'),
]
operations = [
migrations.RenameModel(
old_name='Restaurant',
new_name='RestaurantLocation',
),
]
|
[
"[email protected]"
] | |
85dedc26a7d0b18671e3606cefba8011ec6f33a6
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_156/521.py
|
ca8aafaec283d6e9fa857be6020a6168166a825e
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,635 |
py
|
#!/usr/bin/python3
import sys
import math
ncases = int(sys.stdin.readline().strip())
for t in range(1, ncases+1):
d = int(sys.stdin.readline().strip())
values = sys.stdin.readline().strip().split()
pancakes = [int(x) for x in values]
pancakes.sort(reverse=True)
best = pancakes[0]
# Node format: List of diners with pancakes, number of special minutes
initial_node = [pancakes, 0]
queue = [initial_node]
while queue:
node = queue.pop(0)
diners = node[0]
special = node[1]
top = diners[0]
#if (top + special) >= best:
# continue
if (top + special) < best:
best = top + special
if top < 4:
continue
# Let's introduce new special minutes. Note _all_ diners with
# the max number of pancakes should be split (adding more special
# minuts), as splitting just one of them is stupid
for n in [2, 3, 4]:
splits = []
remainder = top
for i in range(0, n):
split = math.floor(remainder/(n-i))
remainder -= split
splits.append(split)
diners_after_special = list(diners)
new_special = special
while diners_after_special[0] == top:
diners_after_special.pop(0)
diners_after_special += splits
new_special += (n-1)
diners_after_special.sort(reverse=True)
new_node = [diners_after_special, new_special]
queue.append(new_node)
print("Case #{0}: {1}".format(t, best))
|
[
"[email protected]"
] | |
36815ed5dbc21619f0e347fd9614d4889ea71b0d
|
bfb882c400956861fccd40bf1fb53cd6ddcba41e
|
/hagelslag/processing/__init__.py
|
947f56449e95c6deffd11da0f81a50f94c71a716
|
[
"MIT"
] |
permissive
|
stsaten6/hagelslag
|
3b1b07cf424997686b3320c538a188c790232bd7
|
6b7d0779a0b0ac4bd26fbe4931b406fad1ef9f9e
|
refs/heads/master
| 2020-03-10T17:38:44.528943 | 2018-04-12T20:50:38 | 2018-04-12T20:50:38 | 129,504,847 | 2 | 0 |
MIT
| 2018-04-14T09:58:37 | 2018-04-14T09:58:37 | null |
UTF-8
|
Python
| false | false | 524 |
py
|
from .EnhancedWatershedSegmenter import EnhancedWatershed
from .EnsembleProducts import MachineLearningEnsembleProducts, EnsembleProducts, EnsembleConsensus
from .Hysteresis import Hysteresis
from .ObjectMatcher import ObjectMatcher, TrackMatcher
from .ObjectMatcher import mean_minimum_centroid_distance, centroid_distance, shifted_centroid_distance, nonoverlap, \
mean_min_time_distance, start_centroid_distance, start_time_distance, closest_distance
from .STObject import STObject, read_geojson
from .tracker import *
|
[
"[email protected]"
] | |
feb3861b0c0a06a508fdf4a0748c05fe0b8f72be
|
0f00c8a02e8dc1d8136b2afc92338108f92cc6ae
|
/recipes/mrbayes/run_test.py
|
40033ea2ed9721ad50dfc69b067eccb43cef93ff
|
[] |
no_license
|
faircloth-lab/conda-recipes
|
3714f5be83753261bf3abc70454bdf6b7028c8d6
|
75a520a75a357ea47ee80262f3c3a6dfe1b0715f
|
refs/heads/master
| 2021-01-20T07:07:05.705307 | 2015-06-16T13:50:18 | 2015-06-16T13:50:18 | 12,671,015 | 2 | 6 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,416 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
(c) 2013 Brant Faircloth || http://faircloth-lab.org/
All rights reserved.
This code is distributed under a 3-clause BSD license. Please see
LICENSE.txt for more information.
Created on 30 December 2013 16:33 PST (-0800)
"""
import unittest
import subprocess
class TestMb(unittest.TestCase):
def test_mb(self):
cmd = ["mb", "-h"]
proc = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
self.stdout, self.stderr = proc.communicate()
text = [v.strip() for k, v in enumerate(self.stdout.split("\n"))
if k in range(0, 6, 2)]
assert text == [
'',
'MrBayes v3.2.2 x64',
'(Bayesian Analysis of Phylogeny)'
]
class TestMbMpi(unittest.TestCase):
def test_mb(self):
cmd = ["mb-mpi", "-h"]
proc = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
self.stdout, self.stderr = proc.communicate()
text = [v.strip() for k, v in enumerate(self.stdout.split("\n"))
if k in range(0, 6, 2)]
assert text == [
'MrBayes v3.2.2 x64',
'(Bayesian Analysis of Phylogeny)',
'(Parallel version)'
]
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
8c49afcd2557458371bc37031be00356b871799d
|
092e00ae8389811929a381637b73dcb2303fefeb
|
/blog/domain/user.py
|
338592ec2da4b0e0020f532f84602d13ba2ace07
|
[] |
no_license
|
uiandwe/rest_framework_ex
|
33cfb73e386785009b1d012a3dfa6909bdc74ab3
|
8130bcf9a6ffd67b91906c85d66ed9d8d453bab8
|
refs/heads/master
| 2022-11-27T20:56:26.911462 | 2021-10-12T07:46:17 | 2021-10-12T07:46:17 | 234,095,110 | 0 | 0 | null | 2022-11-22T05:17:55 | 2020-01-15T14:12:34 |
Python
|
UTF-8
|
Python
| false | false | 220 |
py
|
# -*- coding: utf-8 -*-
class User:
def __init__(self, email, username):
self.email = email
self.username = username
def __repr__(self):
return "{}, {}".format(self.email, self.username)
|
[
"[email protected]"
] | |
291145b4c5ed899fc48d811be2dd62caa2b32b4a
|
62e58c051128baef9452e7e0eb0b5a83367add26
|
/x12/4010/819004010.py
|
23f27f88966ad294e1ec85c55e27af7395e422d6
|
[] |
no_license
|
dougvanhorn/bots-grammars
|
2eb6c0a6b5231c14a6faf194b932aa614809076c
|
09db18d9d9bd9d92cefbf00f1c0de1c590fe3d0d
|
refs/heads/master
| 2021-05-16T12:55:58.022904 | 2019-05-17T15:22:23 | 2019-05-17T15:22:23 | 105,274,633 | 0 | 0 | null | 2017-09-29T13:21:21 | 2017-09-29T13:21:21 | null |
UTF-8
|
Python
| false | false | 1,730 |
py
|
from bots.botsconfig import *
from records004010 import recorddefs
syntax = {
'version' : '00403', #version of ISA to send
'functionalgroup' : 'JB',
}
structure = [
{ID: 'ST', MIN: 1, MAX: 1, LEVEL: [
{ID: 'BOS', MIN: 1, MAX: 1},
{ID: 'CUR', MIN: 0, MAX: 1},
{ID: 'ITD', MIN: 0, MAX: 5},
{ID: 'N1', MIN: 0, MAX: 10, LEVEL: [
{ID: 'N2', MIN: 0, MAX: 2},
{ID: 'N3', MIN: 0, MAX: 2},
{ID: 'N4', MIN: 0, MAX: 1},
{ID: 'REF', MIN: 0, MAX: 12},
{ID: 'MSG', MIN: 0, MAX: 12},
{ID: 'PER', MIN: 0, MAX: 3},
]},
{ID: 'JIL', MIN: 1, MAX: 10000, LEVEL: [
{ID: 'PID', MIN: 0, MAX: 99999},
{ID: 'REF', MIN: 0, MAX: 12},
{ID: 'MSG', MIN: 0, MAX: 12},
{ID: 'MEA', MIN: 0, MAX: 10},
{ID: 'ITA', MIN: 0, MAX: 10},
{ID: 'PSA', MIN: 0, MAX: 1},
{ID: 'DTM', MIN: 0, MAX: 1},
{ID: 'JID', MIN: 0, MAX: 1000, LEVEL: [
{ID: 'PID', MIN: 0, MAX: 99999},
{ID: 'DTM', MIN: 0, MAX: 10},
{ID: 'REF', MIN: 0, MAX: 12},
{ID: 'MSG', MIN: 0, MAX: 12},
{ID: 'MEA', MIN: 0, MAX: 5},
]},
]},
{ID: 'AMT', MIN: 1, MAX: 1},
{ID: 'QTY', MIN: 0, MAX: 5},
{ID: 'TDS', MIN: 0, MAX: 1},
{ID: 'PSA', MIN: 0, MAX: 1000, LEVEL: [
{ID: 'N1', MIN: 0, MAX: 1},
{ID: 'N2', MIN: 0, MAX: 2},
{ID: 'N3', MIN: 0, MAX: 2},
{ID: 'N4', MIN: 0, MAX: 1},
{ID: 'DTM', MIN: 0, MAX: 1},
{ID: 'REF', MIN: 0, MAX: 12},
{ID: 'PER', MIN: 0, MAX: 3},
]},
{ID: 'CTT', MIN: 1, MAX: 1},
{ID: 'SE', MIN: 1, MAX: 1},
]}
]
|
[
"[email protected]"
] | |
51086a37acacb82ec4da2e56fe316b05793a58d1
|
2335e7d1c10d800abb10b4432465f29a4456548d
|
/setup.py
|
721f1b8d75682c30d9183bd741ff5d826e50db7d
|
[
"LicenseRef-scancode-warranty-disclaimer",
"EFL-2.0"
] |
permissive
|
deathbybandaid/Sopel-StartupMonologue
|
48a7e85ca117c630cf8039af76a0bbaea91ff5a1
|
f495344cee379e66ec5022e1e7edf15f075c758c
|
refs/heads/master
| 2020-05-09T11:18:01.564022 | 2019-04-27T14:12:38 | 2019-04-27T14:12:38 | 181,074,314 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,434 |
py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import sys
from setuptools import setup, find_packages
if __name__ == '__main__':
print('Sopel does not correctly load modules installed with setup.py '
'directly. Please use "pip install .", or add {}/sopel_modules to '
'core.extra in your config.'.format(
os.path.dirname(os.path.abspath(__file__))),
file=sys.stderr)
with open('README.md') as readme_file:
readme = readme_file.read()
with open('NEWS') as history_file:
history = history_file.read()
with open('requirements.txt') as requirements_file:
requirements = [req for req in requirements_file.readlines()]
with open('dev-requirements.txt') as dev_requirements_file:
dev_requirements = [req for req in dev_requirements_file.readlines()]
setup(
name='sopel_modules.startupmonologue',
version='0.1.0',
description='Sopel Startup Monologue displays to all channels that the bot is online',
long_description=readme + '\n\n' + history,
author='Sam Zick',
author_email='[email protected]',
url='https://github.com/deathbybandaid/Sopel-StartupMonologue',
packages=find_packages('.'),
namespace_packages=['sopel_modules'],
include_package_data=True,
install_requires=requirements,
tests_require=dev_requirements,
test_suite='tests',
license='Eiffel Forum License, version 2',
)
|
[
"[email protected]"
] | |
55c13d8cf177119f3b0b4ac0b18bc121cc4f8d62
|
f64e31cb76909a6f7fb592ad623e0a94deec25ae
|
/tests/test_p1494_parallel_courses_ii.py
|
dbf8cbae087e98cebaed176c651d916aaa595833
|
[] |
no_license
|
weak-head/leetcode
|
365d635cb985e1d154985188f6728c18cab1f877
|
9a20e1835652f5e6c33ef5c238f622e81f84ca26
|
refs/heads/main
| 2023-05-11T14:19:58.205709 | 2023-05-05T20:57:13 | 2023-05-05T20:57:13 | 172,853,059 | 0 | 1 | null | 2022-12-09T05:22:32 | 2019-02-27T05:58:54 |
Python
|
UTF-8
|
Python
| false | false | 1,391 |
py
|
# flake8: noqa: F403, F405
import pytest
from leetcode.p1494_parallel_courses_ii import *
solutions = [
minNumberOfSemesters,
]
test_cases = [
(
[
13,
[
[12, 8],
[2, 4],
[3, 7],
[6, 8],
[11, 8],
[9, 4],
[9, 7],
[12, 4],
[11, 4],
[6, 4],
[1, 4],
[10, 7],
[10, 4],
[1, 7],
[1, 8],
[2, 7],
[8, 4],
[10, 8],
[12, 7],
[5, 4],
[3, 4],
[11, 7],
[7, 4],
[13, 4],
[9, 8],
[13, 8],
],
9,
],
3,
),
([4, [[2, 1], [3, 1], [1, 4]], 2], 3),
([5, [[2, 1], [3, 1], [4, 1], [1, 5]], 2], 4),
([11, [], 2], 6),
([11, [], 1], 11),
([11, [], 3], 4),
([11, [], 6], 2),
([11, [], 8], 2),
([11, [], 10], 2),
([11, [], 11], 1),
([11, [], 12], 1),
]
@pytest.mark.timeout(2)
@pytest.mark.parametrize(("args", "expectation"), test_cases)
@pytest.mark.parametrize("solution", solutions)
def test_solution(args, expectation, solution):
assert solution(*args) == expectation
|
[
"[email protected]"
] | |
78e368fb716111fadb4e8ba88e1ddd8e34f363a5
|
98b0d740346ad9aecd228b9a8ebb8e818908ce03
|
/hr-1.py
|
0d51517045973153f9d6f31c16975b8fb25a1e6b
|
[] |
no_license
|
alexisbellido/python-examples
|
8c63156a2800a584a8aff0909325e38acbe49163
|
e6a4f61d9cd18588987430007e28ef036971764b
|
refs/heads/master
| 2022-10-16T08:28:15.312916 | 2022-09-30T15:55:31 | 2022-09-30T15:55:31 | 240,379,353 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 341 |
py
|
def hi(name):
return f'Hi, {name}'
if __name__ == '__main__':
# people = [input().split() for i in range(int(input()))]
# print(*name_format(people), sep='\n')
####################
people = [
'John',
'Mike',
]
# print(hi(people[0]))
# print(hi(people[1]))
# print(*hi(people), sep='\n')
|
[
"[email protected]"
] | |
9e783b4e701f26b5c214da0138af22e4c3c66562
|
f2ac9260dfa7483cd54a30700bb952e10acbc1bb
|
/fit_lr.py
|
27c2ea1089ad19bf4212c6e4d9de0bab81cb012f
|
[] |
no_license
|
kudkudak/compound-activity-prediction
|
94dd9efd2ff7ba5c95ebb71ce1766eb6b8882aac
|
d55e6ecb4e3de74d40b1a37950449f60df1a2ca4
|
refs/heads/master
| 2016-09-15T21:35:54.930142 | 2015-01-14T13:09:19 | 2015-01-14T13:09:19 | 27,130,096 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,056 |
py
|
from misc.utils import *
from misc.experiment_utils import get_exp_options, print_exp_header, \
save_exp, get_exp_logger, generate_configs, print_exp_name
from data_api import prepare_experiment_data, prepare_experiment_data_embedded, get_raw_training_data
from sklearn.metrics import matthews_corrcoef, accuracy_score, confusion_matrix
from sklearn.neighbors import KNeighborsClassifier
from sklearn.preprocessing import MinMaxScaler
import sklearn.linear_model
def fit_lrs(config_in = None):
#### Load config and data ####
config = {"protein":0, "fingerprint":4,"n_folds":10,
"use_embedding": 1, "K":20, "max_hashes":1000, "seed":0, "C_min":-3, "C_max":7}
if config_in is None:
config.update(get_exp_options(config))
else:
config.update(config_in)
D, config_from_data = prepare_experiment_data_embedded(n_folds=10, seed=config["seed"], K=config["K"], \
max_hashes=config["max_hashes"],
protein=config["protein"], fingerprint=config["fingerprint"])
config.update(config_from_data)
config["C"] = [10.0**(i/float(2)) for i in range(2*config["C_min"],2*(1+config["C_max"]))]
print config["C"]
logger = get_exp_logger(config)
### Prepare experiment ###
E = {"config": config, "experiments":[]}
def fit_lr(config):
### Prepare result holders ###b
values = {}
results = {}
monitors = {}
E = {"config": config, "results": results, "monitors":monitors, "values":values}
### Print experiment header ###
print_exp_name(config)
### Train ###
monitors["acc_fold"] = []
monitors["mcc_fold"] = []
monitors["wac_fold"] = []
monitors["cm"] = [] # confusion matrix
monitors["clf"] = []
monitors["train_time"] = []
monitors["test_time"] = []
results["mean_acc"] = 0
results["mean_mcc"] = 0
values["transformers"] = []
for fold in D["folds"]:
X_train, Y_train, X_test, Y_test = fold["X_train"], fold["Y_train"], fold["X_test"], fold["Y_test"]
min_max_scaler = MinMaxScaler()
X_train = min_max_scaler.fit_transform(X_train)
X_test = min_max_scaler.transform(X_test)
clf =sklearn.linear_model.LogisticRegression (C=config["C"], class_weight="auto")
tstart = time.time()
monitors["train_time"].append(time.time() - tstart)
clf.fit(X_train.astype(float), Y_train.astype(float).reshape(-1))
tstart = time.time()
Y_pred = clf.predict(X_test.astype(float))
monitors["test_time"].append(time.time() - tstart)
acc_fold, mcc_fold = accuracy_score(Y_test, Y_pred), matthews_corrcoef(Y_test, Y_pred)
cm = confusion_matrix(Y_test, Y_pred)
tp, fn, fp, tn = cm[1,1], cm[1,0], cm[0,1], cm[0,0]
monitors["clf"].append(clf)
monitors["cm"].append(cm)
monitors["wac_fold"].append(0.5*tp/float(tp+fn) + 0.5*tn/float(tn+fp))
monitors["acc_fold"].append(acc_fold)
monitors["mcc_fold"].append(mcc_fold)
monitors["acc_fold"] = np.array(monitors["acc_fold"])
monitors["mcc_fold"] = np.array(monitors["mcc_fold"])
monitors["wac_fold"] = np.array(monitors["wac_fold"])
results["mean_acc"] = monitors["acc_fold"].mean()
results["mean_mcc"] = monitors["mcc_fold"].mean()
results["mean_wac"] = monitors["wac_fold"].mean()
logger.info(results)
return E
cv_configs = generate_configs(config, ["C"])
for c in cv_configs:
print c
E["experiments"].append(fit_lr(c))
save_exp(E)
best_e = E["experiments"][0]
for e in E["experiments"]:
if e["results"]["mean_wac"] > best_e["results"]["mean_wac"]:
best_e = e
logger.info(best_e)
logger.info("Done")
if __name__ == "__main__":
fit_lrs()
|
[
"[email protected]"
] | |
4fafdb60d2714fc699c55d2ce9bc473bfcffb686
|
b3b68efa404a7034f0d5a1c10b281ef721f8321a
|
/Scripts/simulation/situations/complex/university_mixer_situation.py
|
bdd94a7c82a8c319385d8ae99bf8517a96e6a57b
|
[
"Apache-2.0"
] |
permissive
|
velocist/TS4CheatsInfo
|
62195f3333076c148b2a59f926c9fb5202f1c6fb
|
b59ea7e5f4bd01d3b3bd7603843d525a9c179867
|
refs/heads/main
| 2023-03-08T01:57:39.879485 | 2021-02-13T21:27:38 | 2021-02-13T21:27:38 | 337,543,310 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,087 |
py
|
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\situations\complex\university_mixer_situation.py
# Compiled at: 2019-10-19 01:32:53
# Size of source mod 2**32: 5699 bytes
from situations.situation_complex import SituationComplex, CommonInteractionCompletedSituationState, CommonSituationState, SituationComplexCommon, TunableSituationJobAndRoleState, SituationStateData
from sims4.tuning.tunable import TunableReference, TunableEnumWithFilter
from tag import Tag
import services
from objects.object_manager import ObjectManager
from sims4.tuning.instances import lock_instance_tunables
from situations.bouncer.bouncer_request import exclusivity_compare
from situations.bouncer.bouncer_types import BouncerExclusivityCategory
from situations.situation_types import SituationCreationUIOption
from situations.situation import Situation
class _MixerParty(CommonSituationState):
def timer_expired(self):
self._change_state(self.owner.cleanup_party_state())
def on_activate(self, reader=None):
super().on_activate(reader)
if self.owner.juice_keg is not None:
self.owner._claim_object(self.owner.juice_keg.id)
class _CleanupJuiceKeg(CommonInteractionCompletedSituationState):
def on_activate(self, reader=None):
super().on_activate(reader)
if self.owner.juice_keg is None:
self.owner._self_destruct()
def _on_interaction_of_interest_complete(self, **kwargs):
self.owner._self_destruct()
class _SetupJuiceKeg(CommonInteractionCompletedSituationState):
def _on_interaction_of_interest_complete(self, **kwargs):
self._change_state(self.owner.mixer_party_state())
class UniversityMixerPartySituation(SituationComplexCommon):
INSTANCE_TUNABLES = {'juice_keg_bearer_job_and_role':TunableSituationJobAndRoleState(description='\n The job and role state for the bearer of the juice keg.\n '),
'setup_juice_keg_state':_SetupJuiceKeg.TunableFactory(description='\n The state to bring in the keg bearer and have the juice keg set up on the lot.\n ',
display_name='1. Setup Juice Keg State',
tuning_group=SituationComplexCommon.SITUATION_STATE_GROUP),
'mixer_party_state':_MixerParty.TunableFactory(description='\n The state to represent the party itself.\n ',
display_name='2. Mixer Party State',
tuning_group=SituationComplexCommon.SITUATION_STATE_GROUP),
'cleanup_party_state':_CleanupJuiceKeg.TunableFactory(description='\n The state to cleanup the juice keg and end the party\n ',
display_name='3. Party Cleanup State',
tuning_group=SituationComplexCommon.SITUATION_STATE_GROUP),
'juice_keg_tag':TunableEnumWithFilter(description='\n Tag used to find the juice keg supplied by the situation.\n ',
tunable_type=Tag,
default=Tag.INVALID,
invalid_enums=Tag.INVALID,
filter_prefixes=('func', ))}
REMOVE_INSTANCE_TUNABLES = Situation.NON_USER_FACING_REMOVE_INSTANCE_TUNABLES
def __init__(self, *args, **kwargs):
(super().__init__)(*args, **kwargs)
self._juice_keg_object_id = None
def start_situation(self):
super().start_situation()
if self.juice_keg is not None:
self._claim_object(self.juice_keg.id)
self._change_state(self.setup_juice_keg_state())
@classmethod
def _states(cls):
return (SituationStateData(1, _SetupJuiceKeg, factory=(cls.setup_juice_keg_state)),
SituationStateData(2, _MixerParty, factory=(cls.mixer_party_state)),
SituationStateData(3, _CleanupJuiceKeg, factory=(cls.cleanup_party_state)))
@classmethod
def _get_tuned_job_and_default_role_state_tuples(cls):
return [(cls.juice_keg_bearer_job_and_role.job, cls.juice_keg_bearer_job_and_role.role_state)]
@classmethod
def default_job(cls):
pass
@property
def juice_keg(self):
object_manager = services.object_manager()
juice_keg = None
if self._juice_keg_object_id is not None:
juice_keg = object_manager.get(self._juice_keg_object_id)
if juice_keg is None:
if self.juice_keg_bearer is not None:
for obj in object_manager.get_objects_with_tag_gen(self.juice_keg_tag):
if obj.get_sim_owner_id() is self.juice_keg_bearer.id:
juice_keg = obj
self._juice_keg_object_id = juice_keg.id
break
return juice_keg
@property
def juice_keg_bearer(self):
sim = next(self.all_sims_in_job_gen(self.juice_keg_bearer_job_and_role.job), None)
return sim
lock_instance_tunables(UniversityMixerPartySituation, exclusivity=(BouncerExclusivityCategory.NORMAL),
creation_ui_option=(SituationCreationUIOption.NOT_AVAILABLE))
|
[
"[email protected]"
] | |
2a6ed3ab36186dc4b2907c6eccfff147841622dd
|
bc28f8fe941caf281261afa1641868e743ecb5ab
|
/Google_APAC_Round_E/Beautiful_Numbers/Beautiful_Numbers.py
|
07ce6d570af05b0e1e80e6cd90d4524fcd142a89
|
[] |
no_license
|
anubhavshrimal/CompetitiveProgrammingInPython
|
9fc6949fb3cd715cfa8544c17a63ffbe52677b37
|
2692c446d49ec62d4967ed78a7973400db7ce981
|
refs/heads/master
| 2021-07-05T08:17:15.182154 | 2018-05-29T02:26:25 | 2018-05-29T02:26:25 | 60,554,340 | 7 | 6 | null | 2021-05-24T17:46:16 | 2016-06-06T19:18:27 |
Python
|
UTF-8
|
Python
| false | false | 465 |
py
|
import numpy as np
test = int(input())
for t in range(1, test+1):
num = int(input())
n1, n2 = abs(np.roots([1, 1, -(num-1)]))
if int(n1) != n1 or int(n2)!= n2:
ans = num-1
else:
if n1 == 1 or n1 == -1:
ans = n2
elif n2 == 1 or n2 == -1:
ans = n1
else:
if n2 > n1:
ans = n1
else:
ans = n2
print('Case #'+str(t)+':',str(int(ans)))
|
[
"[email protected]"
] | |
7054d92c14a1e6c568fc15281f3341cce89ae817
|
4c2136ab05913beba890b4127c2f608be4798ed2
|
/(0, '')/py/fc_session.py
|
751c6d3892c8e00fd0baf22a85673c65224e1427
|
[] |
no_license
|
Dyutee/test
|
345adcd1769cba0f468090bcc311f4d379ea5f1e
|
b8b3718922bafbac1bad3802f6c885d777e1bb08
|
refs/heads/master
| 2021-01-12T04:19:45.511927 | 2016-12-29T07:25:29 | 2016-12-29T07:25:29 | 77,588,025 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,517 |
py
|
#!/usr/bin/python
import cgitb, sys, header, common_methods
cgitb.enable()
sys.path.append('/var/nasexe/storage')
import storage_op
import sys,os
from lvm_infos import *
from functions import *
import san_disk_funs
check_fc = san_disk_funs.fc_target_status();
fc_target=san_disk_funs.fc_list_targets()
fc_ip = ''
ses = ''
########### FC Session ##########################
for session_tar in fc_target:
#print 'Session Target:'+str(session_tar)
#print '<br/>'
#print 'Sess Tar:'+str(session_tar)
#print '<br/>'
ses=san_disk_funs.fc_session(session_tar)
#print 'FC SESSION Info:'+str(sess)
import left_nav
#if (str(check_fc).find("'1'") > 0):
if (check_fc !=[]):
print
print """
<!--Right side body content starts from here-->
<div class="rightsidecontainer">
<div class="insidepage-heading">Fc >> <span class="content">Fc Session Information</span></div>
<!--tab srt-->
<div class="searchresult-container">
<div class="infoheader">
<div id="tabs">
<ul>
<li><a href="#tabs-1">Fc Session</a></li>
</ul>
<div id="tabs-1">
<!--form container starts here-->
<div class="form-container">
<div class="topinputwrap-heading">Fc Session Information </div>
<div class="inputwrap">
<div class="formrightside-content">
<form name = 'add_info' method = 'POST'>
<table width = "680" border = "1" cellspacing = "0" cellpadding = "0" name = 'disp_tables' id = 'id_target_info' style ="border-style:ridge;">"""
print"""<tr style = 'background-color:#999999; font-weight: bold;'>
<td height = "35px" valign = "middle" style = 'color: #FFF;'>Fc Target</td>
<td height = "35px" valign = "middle" style = 'color: #FFF;'>Connected Client</td>
</tr>"""
#print fc_target
if(ses !=''):
for tar_info in fc_target:
print"""<tr>
<!--<td class = "table_content" height = "35px" valign = "middle">
<a href = 'main.php?page=iscsi&act=add_disk_tgt_done&target=<?= $show_targets ?>'><img border = '0' style = 'margin-top: 2px;' src = '../images/add.png' title = 'Add disk to target' /></a> <a href = 'main.php?page=iscsi&act=del_disk_tgt_done&t=<?= $show_targets ?>'><img border = '0' src = '../images/fileclose.png' title = 'Remove disk from target' /></a> <a href = 'get_properties.php?target=<?= $show_targets ?>'><img border = '0' src = '../images/properties.png' title = 'Target properties' /></a> </td>-->
<td class = "table_content" height = "35px" valign = "middle">"""
print""" <font color ="darkred"><b>"""+tar_info+"""</b></font>"""
print """</td>"""
print"""<td class = "table_content" height = "35px" valign = "middle" style="font-family: Tahoma;text-decoration:blink;">"""
sesion_tar =sess=san_disk_funs.fc_session(tar_info)
replace_sess_nm = str(sesion_tar).replace('[]', '')
replace_sess_nm1 = str(replace_sess_nm).replace('[', '')
replace_sess_nm2 = str(replace_sess_nm1).replace(']', '')
replace_session_name = str(replace_sess_nm2).replace("'", '')
#print replace_session_name
if(replace_session_name!=''):
print"""<font color = 'darkgreen'><b>"""+replace_session_name+"""</b></font></td>"""
else:
print """
<marquee behavior="alternate" direction ="right"><b><font size="3">There is no Session for this client</font></b></marquee>
</td>
"""
else:
print"""<tr>
<td colspan = '3' align = 'center' height="50px;">
<marquee behavior="alternate" direction= "right"><b><font size="5">No Information is available</font></b></marquee>
</td>
</tr>"""
print"""
</table>
</form>
</div>"""
print"""
</div>
</div>
<!--form container ends here-->
</div>
</div>
</div>
</div>
<!--form container ends here-->
<!--form container starts here-->
<!--form container ends here-->
</div>
<!--Right side body content ends here-->
</div>
<!--Footer starts from here-->
<div class="insidefooter footer_content">© 2013 Opslag FS2</div>
<!-- Footer ends here-->
</div>
<!--inside body wrapper end-->
</div>"""
else:
print "<div style = 'margin-left: auto; margin-right: auto; text-align: center; vertical-align: center; color: darkred; width: 65%; font: 16px Arial;'><br/><br/><br/><b>Check the 'Enable/Disable FC' option in Maintenance -></b><a href= 'main.py?page=sr'><span style='text-decoration:underline;'>Services</span></a>.</div>"
print"""
<!--body wrapper end-->
</body>
</html>
"""
|
[
"[email protected]"
] | |
d3c0c2a4b226f7e7de023845098715c9f079029c
|
6484cdf98189f5f5736950c81a9d8d30e0f0c0db
|
/notifications/serializers.py
|
488db18520ad943f4fc0b50ec121588e37fe25bd
|
[] |
no_license
|
AlexFrundin/great_app_example
|
e0e9c91f06bfba76058f3af5b113a9399945bf6c
|
23225e7e88f2ee51359d23cac2200b32b8bd6e20
|
refs/heads/main
| 2023-05-30T15:02:22.035811 | 2021-06-17T06:40:06 | 2021-06-17T06:40:06 | 339,434,159 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 519 |
py
|
from rest_framework import serializers
from .models import Notification
# This class is use for serialize the data of user profile details
class NoitifcationListSerializer(serializers.ModelSerializer):
created_on = serializers.DateTimeField(format="%d %b %Y")
class Meta:
model = Notification
fields = (
'id',
'refrence_id',
'event_id',
'title',
'message',
'is_read',
'is_deleted',
'created_on')
|
[
"[email protected]"
] | |
0a6a1c337560a7be7affe868a65af85fb574f072
|
15581a76b36eab6062e71d4e5641cdfaf768b697
|
/LeetCode_30days_challenge/2021/February/Peeking Iterator.py
|
1c47322e8ae397e80fa7c43ca73eea44f3a2c292
|
[] |
no_license
|
MarianDanaila/Competitive-Programming
|
dd61298cc02ca3556ebc3394e8d635b57f58b4d2
|
3c5a662e931a5aa1934fba74b249bce65a5d75e2
|
refs/heads/master
| 2023-05-25T20:03:18.468713 | 2023-05-16T21:45:08 | 2023-05-16T21:45:08 | 254,296,597 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,642 |
py
|
# Below is the interface for Iterator, which is already defined for you.
#
# class Iterator:
# def __init__(self, nums):
# """
# Initializes an iterator object to the beginning of a list.
# :type nums: List[int]
# """
#
# def hasNext(self):
# """
# Returns true if the iteration has more elements.
# :rtype: bool
# """
#
# def next(self):
# """
# Returns the next element in the iteration.
# :rtype: int
# """
class PeekingIterator:
def __init__(self, iterator):
"""
Initialize your data structure here.
:type iterator: Iterator
"""
self.iterator = iterator
if self.iterator.hasNext():
self.buffer = self.iterator.next()
else:
self.buffer = None
def peek(self):
"""
Returns the next element in the iteration without advancing the iterator.
:rtype: int
"""
return self.buffer
def next(self):
"""
:rtype: int
"""
tmp = self.buffer
if self.iterator.hasNext():
self.buffer = self.iterator.next()
else:
self.buffer = None
return tmp
def hasNext(self):
"""
:rtype: bool
"""
return self.buffer is not None
# Your PeekingIterator object will be instantiated and called as such:
# iter = PeekingIterator(Iterator(nums))
# while iter.hasNext():
# val = iter.peek() # Get the next element but not advance the iterator.
# iter.next() # Should return the same value as [val].
|
[
"[email protected]"
] | |
050fbf37649611034d2d17fa1d8f6eaaec527045
|
99b784550a6d306147c022c8d829800b0fbb8c68
|
/Part_1_Basics/Chapter_9_Classes/number_served.py
|
c4bf3cff3db3a73bcf0555f68427754403f58a40
|
[] |
no_license
|
apuya/python_crash_course
|
116d6598f656d8fed0b4184edbce8e996cd0f564
|
0b2e8a6e9849a198cfb251706500a919d6f51fe7
|
refs/heads/main
| 2023-06-03T22:41:03.203889 | 2021-06-16T04:07:28 | 2021-06-16T04:07:28 | 367,812,531 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,282 |
py
|
# Python Crash Course: A Hands-On, Project-Based Introduction To Programming
#
# Name: Mark Lester Apuya
# Date: 06/12/2021
#
# Chapter 9: Classes
#
# Exercise 9.4 Number Served:
# Start with your program from Exercise 9-1 (page 162). Add an attribute
# called number_served with a default value of 0. Create an instance called
# restaurant from this class. Print the number of customers the restaurant has
# served, and then change this value and print it again.
# Add a method called set_number_served() that lets you set the number of
# customers that have been served. Call this method with a new number and print
# the value again.
# Add a method called increment_number_served() that lets you increment the
# number of customers who’ve been served. Call this method with any number you
# like that could represent how many customers were served in, say, a day of
# business.
class Restaurant:
"""
Restaurant information.
"""
def __init__(self, restaurant_name, cuisine_type):
"""
Initialize restuarant name and cuisine type
"""
self.restaurant_name = restaurant_name
self.cuisine_type = cuisine_type
self.number_served = 0
def discribe_restaurant(self):
"""
Prints restaurant information.
"""
print(f"\n{self.restaurant_name} serves {self.cuisine_type}")
def open_restaurant(self):
"""
Prints that the restaurant is open.
"""
print(f"\n{self.restaurant_name} is open.")
def set_number_served(self, number_served):
"""
Set the number of customers served.
"""
self.number_served = number_served
def increment_number_served(self, number_served):
"""
Increment the number of customers who have been served.
"""
self.number_served += number_served
restaurant = Restaurant('Olive Garden', 'Italian')
restaurant.discribe_restaurant()
print(f"\nNumber served: {restaurant.number_served}")
restaurant.number_served = 22
print(f"\nNumber served: {restaurant.number_served}")
restaurant.set_number_served(20)
print(f"\nNumber served: {restaurant.number_served}")
restaurant.increment_number_served(2)
print(f"\nNumber served: {restaurant.number_served}")
|
[
"[email protected]"
] | |
b91cb3c12a2949a4360518e9abecbc11298c03dd
|
230b7714d61bbbc9a75dd9adc487706dffbf301e
|
/third_party/blink/web_tests/external/wpt/tools/wptrunner/wptrunner/environment.py
|
2493f1fa4407a39aad3ac3c2a724322b75b0944a
|
[
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-w3c-03-bsd-license"
] |
permissive
|
byte4byte/cloudretro
|
efe4f8275f267e553ba82068c91ed801d02637a7
|
4d6e047d4726c1d3d1d119dfb55c8b0f29f6b39a
|
refs/heads/master
| 2023-02-22T02:59:29.357795 | 2021-01-25T02:32:24 | 2021-01-25T02:32:24 | 197,294,750 | 1 | 2 |
BSD-3-Clause
| 2019-09-11T19:35:45 | 2019-07-17T01:48:48 | null |
UTF-8
|
Python
| false | false | 8,027 |
py
|
import json
import os
import multiprocessing
import signal
import socket
import sys
import time
from mozlog import get_default_logger, handlers, proxy
from .wptlogging import LogLevelRewriter
here = os.path.split(__file__)[0]
repo_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir, os.pardir))
sys.path.insert(0, repo_root)
from tools import localpaths # noqa: flake8
from wptserve.handlers import StringHandler
serve = None
def do_delayed_imports(logger, test_paths):
global serve
serve_root = serve_path(test_paths)
sys.path.insert(0, serve_root)
failed = []
try:
from tools.serve import serve
except ImportError:
failed.append("serve")
if failed:
logger.critical(
"Failed to import %s. Ensure that tests path %s contains web-platform-tests" %
(", ".join(failed), serve_root))
sys.exit(1)
def serve_path(test_paths):
return test_paths["/"]["tests_path"]
class TestEnvironmentError(Exception):
pass
class TestEnvironment(object):
def __init__(self, test_paths, testharness_timeout_multipler, pause_after_test, debug_info, options, ssl_config, env_extras):
"""Context manager that owns the test environment i.e. the http and
websockets servers"""
self.test_paths = test_paths
self.server = None
self.config_ctx = None
self.config = None
self.testharness_timeout_multipler = testharness_timeout_multipler
self.pause_after_test = pause_after_test
self.test_server_port = options.pop("test_server_port", True)
self.debug_info = debug_info
self.options = options if options is not None else {}
self.cache_manager = multiprocessing.Manager()
self.stash = serve.stash.StashServer()
self.env_extras = env_extras
self.env_extras_cms = None
self.ssl_config = ssl_config
def __enter__(self):
self.config_ctx = self.build_config()
self.config = self.config_ctx.__enter__()
self.stash.__enter__()
self.cache_manager.__enter__()
self.setup_server_logging()
assert self.env_extras_cms is None, (
"A TestEnvironment object cannot be nested")
self.env_extras_cms = []
for env in self.env_extras:
cm = env(self.options, self.config)
cm.__enter__()
self.env_extras_cms.append(cm)
self.servers = serve.start(self.config,
self.get_routes())
if self.options.get("supports_debugger") and self.debug_info and self.debug_info.interactive:
self.ignore_interrupts()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.process_interrupts()
for scheme, servers in self.servers.iteritems():
for port, server in servers:
server.kill()
for cm in self.env_extras_cms:
cm.__exit__(exc_type, exc_val, exc_tb)
self.env_extras_cms = None
self.cache_manager.__exit__(exc_type, exc_val, exc_tb)
self.stash.__exit__()
self.config_ctx.__exit__(exc_type, exc_val, exc_tb)
def ignore_interrupts(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
def process_interrupts(self):
signal.signal(signal.SIGINT, signal.SIG_DFL)
def build_config(self):
override_path = os.path.join(serve_path(self.test_paths), "config.json")
config = serve.ConfigBuilder()
config.ports = {
"http": [8000, 8001],
"https": [8443],
"ws": [8888],
"wss": [8889],
}
if os.path.exists(override_path):
with open(override_path) as f:
override_obj = json.load(f)
config.update(override_obj)
config.check_subdomains = False
ssl_config = self.ssl_config.copy()
ssl_config["encrypt_after_connect"] = self.options.get("encrypt_after_connect", False)
config.ssl = ssl_config
if "browser_host" in self.options:
config.browser_host = self.options["browser_host"]
if "bind_address" in self.options:
config.bind_address = self.options["bind_address"]
config.server_host = self.options.get("server_host", None)
config.doc_root = serve_path(self.test_paths)
return config
def setup_server_logging(self):
server_logger = get_default_logger(component="wptserve")
assert server_logger is not None
log_filter = handlers.LogLevelFilter(lambda x:x, "info")
# Downgrade errors to warnings for the server
log_filter = LogLevelRewriter(log_filter, ["error"], "warning")
server_logger.component_filter = log_filter
server_logger = proxy.QueuedProxyLogger(server_logger)
try:
#Set as the default logger for wptserve
serve.set_logger(server_logger)
serve.logger = server_logger
except Exception:
# This happens if logging has already been set up for wptserve
pass
def get_routes(self):
route_builder = serve.RoutesBuilder()
for path, format_args, content_type, route in [
("testharness_runner.html", {}, "text/html", "/testharness_runner.html"),
(self.options.get("testharnessreport", "testharnessreport.js"),
{"output": self.pause_after_test,
"timeout_multiplier": self.testharness_timeout_multipler,
"explicit_timeout": "true" if self.debug_info is not None else "false"},
"text/javascript;charset=utf8",
"/resources/testharnessreport.js")]:
path = os.path.normpath(os.path.join(here, path))
# Note that .headers. files don't apply to static routes, so we need to
# readd any static headers here.
headers = {"Cache-Control": "max-age=3600"}
route_builder.add_static(path, format_args, content_type, route,
headers=headers)
data = b""
with open(os.path.join(repo_root, "resources", "testdriver.js"), "rb") as fp:
data += fp.read()
with open(os.path.join(here, "testdriver-extra.js"), "rb") as fp:
data += fp.read()
route_builder.add_handler(b"GET", b"/resources/testdriver.js",
StringHandler(data, "text/javascript"))
for url_base, paths in self.test_paths.iteritems():
if url_base == "/":
continue
route_builder.add_mount_point(url_base, paths["tests_path"])
if "/" not in self.test_paths:
del route_builder.mountpoint_routes["/"]
return route_builder.get_routes()
def ensure_started(self):
# Pause for a while to ensure that the server has a chance to start
total_sleep_secs = 30
each_sleep_secs = 0.5
end_time = time.time() + total_sleep_secs
while time.time() < end_time:
failed = self.test_servers()
if not failed:
return
time.sleep(each_sleep_secs)
raise EnvironmentError("Servers failed to start: %s" %
", ".join("%s:%s" % item for item in failed))
def test_servers(self):
failed = []
host = self.config["server_host"]
for scheme, servers in self.servers.iteritems():
for port, server in servers:
if self.test_server_port:
s = socket.socket()
s.settimeout(0.1)
try:
s.connect((host, port))
except socket.error:
failed.append((host, port))
finally:
s.close()
if not server.is_alive():
failed.append((scheme, port))
return failed
|
[
"[email protected]"
] | |
36ef2e86187829ed5ae2b132e41bef8f08740314
|
5e6d8b9989247801718dd1f10009f0f7f54c1eb4
|
/sdk/python/pulumi_azure_native/compute/v20210701/gallery_application_version.py
|
85870dafc661c246411261654d85f997d3480818
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
vivimouret29/pulumi-azure-native
|
d238a8f91688c9bf09d745a7280b9bf2dd6d44e0
|
1cbd988bcb2aa75a83e220cb5abeb805d6484fce
|
refs/heads/master
| 2023-08-26T05:50:40.560691 | 2021-10-21T09:25:07 | 2021-10-21T09:25:07 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 15,510 |
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['GalleryApplicationVersionArgs', 'GalleryApplicationVersion']
@pulumi.input_type
class GalleryApplicationVersionArgs:
def __init__(__self__, *,
gallery_application_name: pulumi.Input[str],
gallery_name: pulumi.Input[str],
publishing_profile: pulumi.Input['GalleryApplicationVersionPublishingProfileArgs'],
resource_group_name: pulumi.Input[str],
gallery_application_version_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a GalleryApplicationVersion resource.
:param pulumi.Input[str] gallery_application_name: The name of the gallery Application Definition in which the Application Version is to be created.
:param pulumi.Input[str] gallery_name: The name of the Shared Application Gallery in which the Application Definition resides.
:param pulumi.Input['GalleryApplicationVersionPublishingProfileArgs'] publishing_profile: The publishing profile of a gallery image version.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] gallery_application_version_name: The name of the gallery Application Version to be created. Needs to follow semantic version name pattern: The allowed characters are digit and period. Digits must be within the range of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>
:param pulumi.Input[str] location: Resource location
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags
"""
pulumi.set(__self__, "gallery_application_name", gallery_application_name)
pulumi.set(__self__, "gallery_name", gallery_name)
pulumi.set(__self__, "publishing_profile", publishing_profile)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if gallery_application_version_name is not None:
pulumi.set(__self__, "gallery_application_version_name", gallery_application_version_name)
if location is not None:
pulumi.set(__self__, "location", location)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="galleryApplicationName")
def gallery_application_name(self) -> pulumi.Input[str]:
"""
The name of the gallery Application Definition in which the Application Version is to be created.
"""
return pulumi.get(self, "gallery_application_name")
@gallery_application_name.setter
def gallery_application_name(self, value: pulumi.Input[str]):
pulumi.set(self, "gallery_application_name", value)
@property
@pulumi.getter(name="galleryName")
def gallery_name(self) -> pulumi.Input[str]:
"""
The name of the Shared Application Gallery in which the Application Definition resides.
"""
return pulumi.get(self, "gallery_name")
@gallery_name.setter
def gallery_name(self, value: pulumi.Input[str]):
pulumi.set(self, "gallery_name", value)
@property
@pulumi.getter(name="publishingProfile")
def publishing_profile(self) -> pulumi.Input['GalleryApplicationVersionPublishingProfileArgs']:
"""
The publishing profile of a gallery image version.
"""
return pulumi.get(self, "publishing_profile")
@publishing_profile.setter
def publishing_profile(self, value: pulumi.Input['GalleryApplicationVersionPublishingProfileArgs']):
pulumi.set(self, "publishing_profile", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="galleryApplicationVersionName")
def gallery_application_version_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the gallery Application Version to be created. Needs to follow semantic version name pattern: The allowed characters are digit and period. Digits must be within the range of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>
"""
return pulumi.get(self, "gallery_application_version_name")
@gallery_application_version_name.setter
def gallery_application_version_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gallery_application_version_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class GalleryApplicationVersion(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
gallery_application_name: Optional[pulumi.Input[str]] = None,
gallery_application_version_name: Optional[pulumi.Input[str]] = None,
gallery_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
publishing_profile: Optional[pulumi.Input[pulumi.InputType['GalleryApplicationVersionPublishingProfileArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Specifies information about the gallery Application Version that you want to create or update.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] gallery_application_name: The name of the gallery Application Definition in which the Application Version is to be created.
:param pulumi.Input[str] gallery_application_version_name: The name of the gallery Application Version to be created. Needs to follow semantic version name pattern: The allowed characters are digit and period. Digits must be within the range of a 32-bit integer. Format: <MajorVersion>.<MinorVersion>.<Patch>
:param pulumi.Input[str] gallery_name: The name of the Shared Application Gallery in which the Application Definition resides.
:param pulumi.Input[str] location: Resource location
:param pulumi.Input[pulumi.InputType['GalleryApplicationVersionPublishingProfileArgs']] publishing_profile: The publishing profile of a gallery image version.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: GalleryApplicationVersionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Specifies information about the gallery Application Version that you want to create or update.
:param str resource_name: The name of the resource.
:param GalleryApplicationVersionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(GalleryApplicationVersionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
gallery_application_name: Optional[pulumi.Input[str]] = None,
gallery_application_version_name: Optional[pulumi.Input[str]] = None,
gallery_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
publishing_profile: Optional[pulumi.Input[pulumi.InputType['GalleryApplicationVersionPublishingProfileArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = GalleryApplicationVersionArgs.__new__(GalleryApplicationVersionArgs)
if gallery_application_name is None and not opts.urn:
raise TypeError("Missing required property 'gallery_application_name'")
__props__.__dict__["gallery_application_name"] = gallery_application_name
__props__.__dict__["gallery_application_version_name"] = gallery_application_version_name
if gallery_name is None and not opts.urn:
raise TypeError("Missing required property 'gallery_name'")
__props__.__dict__["gallery_name"] = gallery_name
__props__.__dict__["location"] = location
if publishing_profile is None and not opts.urn:
raise TypeError("Missing required property 'publishing_profile'")
__props__.__dict__["publishing_profile"] = publishing_profile
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["tags"] = tags
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["replication_status"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:compute/v20210701:GalleryApplicationVersion"), pulumi.Alias(type_="azure-native:compute:GalleryApplicationVersion"), pulumi.Alias(type_="azure-nextgen:compute:GalleryApplicationVersion"), pulumi.Alias(type_="azure-native:compute/v20190301:GalleryApplicationVersion"), pulumi.Alias(type_="azure-nextgen:compute/v20190301:GalleryApplicationVersion"), pulumi.Alias(type_="azure-native:compute/v20190701:GalleryApplicationVersion"), pulumi.Alias(type_="azure-nextgen:compute/v20190701:GalleryApplicationVersion"), pulumi.Alias(type_="azure-native:compute/v20191201:GalleryApplicationVersion"), pulumi.Alias(type_="azure-nextgen:compute/v20191201:GalleryApplicationVersion"), pulumi.Alias(type_="azure-native:compute/v20200930:GalleryApplicationVersion"), pulumi.Alias(type_="azure-nextgen:compute/v20200930:GalleryApplicationVersion")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(GalleryApplicationVersion, __self__).__init__(
'azure-native:compute/v20210701:GalleryApplicationVersion',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'GalleryApplicationVersion':
"""
Get an existing GalleryApplicationVersion resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = GalleryApplicationVersionArgs.__new__(GalleryApplicationVersionArgs)
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["publishing_profile"] = None
__props__.__dict__["replication_status"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
return GalleryApplicationVersion(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The provisioning state, which only appears in the response.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="publishingProfile")
def publishing_profile(self) -> pulumi.Output['outputs.GalleryApplicationVersionPublishingProfileResponse']:
"""
The publishing profile of a gallery image version.
"""
return pulumi.get(self, "publishing_profile")
@property
@pulumi.getter(name="replicationStatus")
def replication_status(self) -> pulumi.Output['outputs.ReplicationStatusResponse']:
"""
This is the replication status of the gallery image version.
"""
return pulumi.get(self, "replication_status")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type
"""
return pulumi.get(self, "type")
|
[
"[email protected]"
] | |
242f80c5d1c207d66d4fd11b8d495d63cf4a6543
|
4b2c5fe21ffcc35837bba06d2c3b43c5116f74bd
|
/Bit++.py
|
b021896ca96ab26196e29a12c95ef313ebda47fc
|
[] |
no_license
|
joydas65/Codeforces-Problems
|
8870cbbf1db9fa12b961cee7aaef60960af714ae
|
eb0f5877d0fede95af18694278029add7385973d
|
refs/heads/master
| 2023-06-23T07:16:49.151676 | 2023-06-17T07:28:24 | 2023-06-17T07:28:24 | 184,123,514 | 5 | 1 | null | 2020-11-28T07:28:03 | 2019-04-29T18:33:23 |
Python
|
UTF-8
|
Python
| false | false | 212 |
py
|
ans = 0
for _ in range(int(input())):
s = input()
if s[0] == '+' or '+' in s:
ans += 1
elif s[0] == '-' or '-' in s:
ans -= 1
print(ans)
|
[
"[email protected]"
] | |
20eb7196fe3b002591b7b276815778936aebeb54
|
4eb76ddbe2bf6d7fb8ee791dcaa1dfaccd4a09b0
|
/jitai/events/EventTemplate.py
|
e85c491ebb1b21082dabbe5b4fef53d7216dc3b1
|
[] |
no_license
|
koike-ya/research
|
3cae0be17a8871d5782842510676c05a75627c49
|
3ff99c56c8e5d6c57ee65f1bca2431f3dc6f6593
|
refs/heads/master
| 2021-10-12T03:13:20.645738 | 2019-01-26T07:12:58 | 2019-01-26T07:12:58 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,138 |
py
|
from abc import ABC
from datetime import datetime, timedelta
import pandas as pd
from jitai.src.utils import set_hour_minute
class EventTemplate(ABC):
def __init__(self, param, user_info, ema, logger):
self.param = param
self.ema = ema
self.name = param["condition_name"]
self.ema_content = param["ema_content"]
self.user_info = user_info
self.logger = logger
self.exists = param["exists"]
self.ema_time = self.param["ema_time"] # param["ema_time]"の値はdict
def _init_ema_content(self):
if not self.ema_content == "none":
self.threshold = self.param["threshold"]
self.more_or_less = self.param["more_or_less"]
def _init_ema_time(self):
# 時間に関する設定
if list(self.ema_time.keys())[0] == "set_time":
from_ = datetime.strptime(self.ema_time["set_time"]["from"], "%H:%M")
self.ema_from_ = set_hour_minute(datetime.today(), from_)
to = datetime.strptime(self.ema_time["set_time"]["to"], "%H:%M")
self.ema_to = set_hour_minute(datetime.today(), to)
if list(self.ema_time.keys())[0] == "interval":
t = datetime.strptime(self.ema_time["interval"]["value"], "%H:%M")
self.ema_from_ = datetime.today() - timedelta(hours=t.hour, minutes=t.minute)
self.ema_to = datetime.today()
def _validate_params(self):
# TODO 与えられたパラメータが適切でないときにエラーを返す
# 例えば、こちらが想定するquestionの中に、self.ema_contentで指定された要素がない場合とか
pass
def _extract_about_time(self):
self.ema = self.ema[(self.ema["end"] >= self.ema_from_) & (self.ema["end"] <= self.ema_to)]
def _ema_content_not_none(self):
# このメソッドはDAMSの項目のみ有効, それ以外の場合はoverrideすること
content_df = self.ema[self.ema["question"] == self.ema_content]
content_df = content_df.astype({"answer": int})
if not content_df.empty:
if self.more_or_less == "more":
self.ema = content_df[content_df["answer"] >= self.threshold]
elif self.more_or_less == "less":
self.ema = content_df[content_df["answer"] < self.threshold]
else:
self.ema = pd.DataFrame(columns=self.ema)
def get_depend_class_last_ema_time(self):
# TODO 要テスト. use=Falseに対して、これで本当にロジックが通るのか.
if hasattr(self.depend_class, "use"):
res = self.depend_class.ema.run()
depend_ema = self.depend_class.ema
if depend_ema.empty:
self.ema = pd.DataFrame()
return 0
depend_ema.reset_index(drop=True, inplace=True)
return depend_ema.loc[depend_ema.shape[0] - 1, "end"]
def _depend_condition(self):
# 従属関係の条件はここに記述する.
self.ema_from_ = self.get_depend_class_last_ema_time()
t = datetime.strptime(self.param["ema_time"]["interval"]["value"], "%H:%M")
if self.ema_from_ != 0 and datetime.today() >= self.ema_from_ + timedelta(hours=t.hour, minutes=t.minute):
return True
else:
return False
def _run(self):
if not self.ema.empty:
self._extract_about_time()
if not self.ema.empty and not self.ema_content == "none":
self._ema_content_not_none()
def run(self):
if hasattr(self, "depend_class"):
fill_cond_flag = self._depend_condition()
# 〇〇時間経っていない場合にFalseが返る
if not fill_cond_flag:
return False
self._run()
if self.exists:
return True if not self.ema.empty else False
else:
return True if self.ema.empty else False
def add_depend_class(self, depend_class):
self.depend_class = depend_class
def copy(self):
return EventTemplate(self.param, self.user_info, self.ema, self.logger)
|
[
"[email protected]"
] | |
2527f4d9fd54b3e27de63af10a0a6823676bffc5
|
8f63cf27e69bc44dcd11e63a0c396b398443009b
|
/tests/unit/util/iterables.py
|
454eaf3914e1ade640b62d055b97606ada1ab216
|
[
"MIT"
] |
permissive
|
ethanjli/phylline
|
fae756dbbead0351dd11c770158a1aa08fa363d2
|
f11307d0f37ca835996250e1e835c44abd282769
|
refs/heads/master
| 2021-01-01T23:56:41.018911 | 2020-02-25T05:07:34 | 2020-02-25T05:07:34 | 239,400,454 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,382 |
py
|
"""Test the util.iterables module."""
# Builtins
# Packages
from phylline.util.iterables import make_collection, remove_none
def test_make_collection_singleton():
"""Test whether the make_collection function makes collections from singletons."""
assert make_collection(42) != 42
assert make_collection(42) == [42]
assert make_collection(42) != (42,)
assert make_collection(42, type=tuple) != 42
assert make_collection(42, type=tuple) != [42]
assert make_collection(42, type=tuple) == (42,)
def test_make_collection_iterable():
"""Test whether the make_collection function makes collections from iterables."""
assert make_collection(range(5)) != range(5)
assert make_collection(range(5)) == list(range(5))
assert make_collection(range(5)) != tuple(range(5))
assert make_collection(range(5), type=tuple) != range(5)
assert make_collection(range(5), type=tuple) != list(range(5))
assert make_collection(range(5), type=tuple) == tuple(range(5))
def test_remove_none():
"""Test whether remove_none removes Nones correctly."""
assert len(tuple(range(5))) == len(tuple(remove_none(range(5))))
for (initial, filtered) in zip(range(5), remove_none(range(5))):
assert initial == filtered
assert len(tuple(remove_none([1, 2, None, 3]))) == 3
assert tuple(remove_none([1, 2, None, 3])) == (1, 2, 3)
|
[
"[email protected]"
] | |
b86f37f64be3a4a6a783e0cc8de77ab087a399bf
|
4b360696d512a35b2114c482c658d10e3ff91a2c
|
/project-addons/mail_ph/models/__init__.py
|
94a375f2116535169a7287ca79e29be1a3feb530
|
[] |
no_license
|
Comunitea/CMNT_010_15_PHA
|
24ecf3be6a50441dfa3dd8deca4ee96ac5e61970
|
d4a24aafba48fc7dda7ee662e0c7e1112c220162
|
refs/heads/master
| 2022-08-12T00:39:24.464028 | 2022-07-11T10:30:30 | 2022-07-11T10:31:31 | 37,918,119 | 0 | 1 | null | 2015-12-02T12:39:43 | 2015-06-23T12:37:45 |
Python
|
UTF-8
|
Python
| false | false | 256 |
py
|
# -*- coding: utf-8 -*-
# © 2020 Pharmadus I.T.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from . import res_partner, sale_order, purchase_order, hr_holidays, \
account_invoice, res_company, stock, mail_compose_message, mail_mail
|
[
"[email protected]"
] | |
bb92611663129085e0c2b2b258620024399268b9
|
24d070c6410fdf7212c4c37c2fadc932cd4e8aec
|
/trac/tests/notification.py
|
f2f6ce13b9e162a72b77a90a539f2142f77a07ba
|
[
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] |
permissive
|
clubturbo/Trac-1.4.2
|
4f111e8df9e8007a0e02080bec560361b25fc11c
|
254ce54a3c2fb86b4f31810ddeabbd4ff8b54a78
|
refs/heads/master
| 2023-01-20T16:20:44.724154 | 2020-12-03T08:57:08 | 2020-12-03T08:57:08 | 317,922,011 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 15,655 |
py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2020 Edgewall Software
# Copyright (C) 2005-2006 Emmanuel Blot <[email protected]>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at https://trac.edgewall.org/log/.
#
# Include a basic SMTP server, based on L. Smithson
# ([email protected]) extensible Python SMTP Server
#
# This file does not contain unit tests, but provides a set of
# classes to run SMTP notification tests
#
import base64
import os
import quopri
import re
import socket
import string
import threading
import unittest
from contextlib import closing
from trac.config import ConfigurationError
from trac.notification import SendmailEmailSender, SmtpEmailSender
from trac.test import EnvironmentStub
LF = '\n'
CR = '\r'
SMTP_TEST_PORT = 7000 + os.getpid() % 1000
header_re = re.compile(r'^=\?(?P<charset>[\w\d\-]+)\?(?P<code>[qb])\?(?P<value>.*)\?=$')
class SMTPServerInterface(object):
"""
A base class for the implementation of an application specific SMTP
Server. Applications should subclass this and override these
methods, which by default do nothing.
A method is defined for each RFC821 command. For each of these
methods, 'args' is the complete command received from the
client. The 'data' method is called after all of the client DATA
is received.
If a method returns 'None', then a '250 OK' message is
automatically sent to the client. If a subclass returns a non-null
string then it is returned instead.
"""
def helo(self, args):
return None
def mail_from(self, args):
return None
def rcpt_to(self, args):
return None
def data(self, args):
return None
def quit(self, args):
return None
def reset(self, args):
return None
#
# Some helper functions for manipulating from & to addresses etc.
#
def strip_address(address):
"""
Strip the leading & trailing <> from an address. Handy for
getting FROM: addresses.
"""
start = string.index(address, '<') + 1
end = string.index(address, '>')
return address[start:end]
def split_to(address):
"""
Return 'address' as undressed (host, fulladdress) tuple.
Handy for use with TO: addresses.
"""
start = string.index(address, '<') + 1
sep = string.index(address, '@') + 1
end = string.index(address, '>')
return address[sep:end], address[start:end]
#
# This drives the state for a single RFC821 message.
#
class SMTPServerEngine(object):
"""
Server engine that calls methods on the SMTPServerInterface object
passed at construction time. It is constructed with a bound socket
connection to a client. The 'chug' method drives the state,
returning when the client RFC821 transaction is complete.
"""
ST_INIT = 0
ST_HELO = 1
ST_MAIL = 2
ST_RCPT = 3
ST_DATA = 4
ST_QUIT = 5
def __init__(self, socket, impl):
self.impl = impl
self.socket = socket
self.state = SMTPServerEngine.ST_INIT
def chug(self):
"""
Chug the engine, till QUIT is received from the client. As
each RFC821 message is received, calls are made on the
SMTPServerInterface methods on the object passed at
construction time.
"""
self.socket.send("220 Welcome to Trac notification test server\r\n")
while 1:
data = ''
complete_line = 0
# Make sure an entire line is received before handing off
# to the state engine. Thanks to John Hall for pointing
# this out.
while not complete_line:
try:
lump = self.socket.recv(1024)
if lump:
data += lump
if len(data) >= 2 and data[-2:] == '\r\n':
complete_line = 1
if self.state != SMTPServerEngine.ST_DATA:
rsp, keep = self.do_command(data)
else:
rsp = self.do_data(data)
if rsp is None:
continue
self.socket.send(rsp + "\r\n")
if keep == 0:
self.socket.close()
return
else:
# EOF
return
except socket.error:
return
def do_command(self, data):
"""Process a single SMTP Command"""
cmd = data[0:4]
cmd = string.upper(cmd)
keep = 1
rv = None
if cmd == "HELO":
self.state = SMTPServerEngine.ST_HELO
rv = self.impl.helo(data[5:])
elif cmd == "RSET":
rv = self.impl.reset(data[5:])
self.data_accum = ""
self.state = SMTPServerEngine.ST_INIT
elif cmd == "NOOP":
pass
elif cmd == "QUIT":
rv = self.impl.quit(data[5:])
keep = 0
elif cmd == "MAIL":
if self.state != SMTPServerEngine.ST_HELO:
return "503 Bad command sequence", 1
self.state = SMTPServerEngine.ST_MAIL
rv = self.impl.mail_from(data[5:])
elif cmd == "RCPT":
if (self.state != SMTPServerEngine.ST_MAIL) and \
(self.state != SMTPServerEngine.ST_RCPT):
return "503 Bad command sequence", 1
self.state = SMTPServerEngine.ST_RCPT
rv = self.impl.rcpt_to(data[5:])
elif cmd == "DATA":
if self.state != SMTPServerEngine.ST_RCPT:
return "503 Bad command sequence", 1
self.state = SMTPServerEngine.ST_DATA
self.data_accum = ""
return "354 OK, Enter data, terminated with a \\r\\n.\\r\\n", 1
else:
return "505 Eh? WTF was that?", 1
if rv:
return rv, keep
else:
return "250 OK", keep
def do_data(self, data):
"""
Process SMTP Data. Accumulates client DATA until the
terminator is found.
"""
self.data_accum = self.data_accum + data
if len(self.data_accum) > 4 and self.data_accum[-5:] == '\r\n.\r\n':
self.data_accum = self.data_accum[:-5]
rv = self.impl.data(self.data_accum)
self.state = SMTPServerEngine.ST_HELO
if rv:
return rv
else:
return "250 OK - Data and terminator. found"
else:
return None
class SMTPServer(object):
"""
A single threaded SMTP Server connection manager. Listens for
incoming SMTP connections on a given port. For each connection,
the SMTPServerEngine is chugged, passing the given instance of
SMTPServerInterface.
"""
def __init__(self, host, port):
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self._socket.bind((host, port))
self._socket_service = None
def serve(self, impl):
while self._resume:
try:
nsd = self._socket.accept()
except socket.error:
return
self._socket_service = nsd[0]
engine = SMTPServerEngine(self._socket_service, impl)
engine.chug()
self._socket_service = None
def start(self):
self._socket.listen(1)
self._resume = True
def stop(self):
self._resume = False
def terminate(self):
if self._socket_service:
# force the blocking socket to stop waiting for data
try:
#self._socket_service.shutdown(2)
self._socket_service.close()
except AttributeError:
# the SMTP server may also discard the socket
pass
self._socket_service = None
if self._socket:
#self._socket.shutdown(2)
self._socket.close()
self._socket = None
class SMTPServerStore(SMTPServerInterface):
"""
Simple store for SMTP data
"""
def __init__(self):
self.reset(None)
def helo(self, args):
self.reset(None)
def mail_from(self, args):
if args.lower().startswith('from:'):
self.sender = strip_address(args[5:].replace('\r\n', '').strip())
def rcpt_to(self, args):
if args.lower().startswith('to:'):
rcpt = args[3:].replace('\r\n', '').strip()
self.recipients.append(strip_address(rcpt))
def data(self, args):
self.message = args
def quit(self, args):
pass
def reset(self, args):
self.sender = None
self.recipients = []
self.message = None
class SMTPThreadedServer(threading.Thread):
"""
Run a SMTP server for a single connection, within a dedicated thread
"""
def __init__(self, port):
self.host = '127.0.0.1'
self.port = port
self.server = SMTPServer(self.host, port)
self.store = SMTPServerStore()
threading.Thread.__init__(self)
def run(self):
# run from within the SMTP server thread
self.server.serve(impl=self.store)
def start(self):
# run from the main thread
self.server.start()
threading.Thread.start(self)
def stop(self):
# run from the main thread
self.server.stop()
# send a message to make the SMTP server quit gracefully
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
try:
s.connect(('127.0.0.1', self.port))
s.send("QUIT\r\n")
except socket.error:
pass
# wait for the SMTP server to complete (for up to 2 secs)
self.join(2.0)
# clean up the SMTP server (and force quit if needed)
self.server.terminate()
def get_sender(self):
return self.store.sender
def get_recipients(self):
return self.store.recipients
def get_message(self):
return self.store.message
def cleanup(self):
self.store.reset(None)
def decode_header(header):
""" Decode a MIME-encoded header value """
mo = header_re.match(header)
# header does not seem to be MIME-encoded
if not mo:
return header
# attempts to decode the header,
# following the specified MIME encoding and charset
try:
encoding = mo.group('code').lower()
if encoding == 'q':
val = quopri.decodestring(mo.group('value'), header=True)
elif encoding == 'b':
val = base64.decodestring(mo.group('value'))
else:
raise AssertionError("unsupported encoding: %s" % encoding)
header = unicode(val, mo.group('charset'))
except Exception as e:
raise AssertionError(e)
return header
def parse_smtp_message(msg):
""" Split a SMTP message into its headers and body.
Returns a (headers, body) tuple
We do not use the email/MIME Python facilities here
as they may accept invalid RFC822 data, or data we do not
want to support nor generate """
headers = {}
lh = None
body = None
# last line does not contain the final line ending
msg += '\r\n'
for line in msg.splitlines(True):
if body is not None:
# append current line to the body
if line[-2] == CR:
body += line[0:-2]
body += '\n'
else:
raise AssertionError("body misses CRLF: %s (0x%x)"
% (line, ord(line[-1])))
else:
if line[-2] != CR:
# RFC822 requires CRLF at end of field line
raise AssertionError("header field misses CRLF: %s (0x%x)"
% (line, ord(line[-1])))
# discards CR
line = line[0:-2]
if line.strip() == '':
# end of headers, body starts
body = ''
else:
val = None
if line[0] in ' \t':
# continuation of the previous line
if not lh:
# unexpected multiline
raise AssertionError("unexpected folded line: %s"
% line)
val = decode_header(line.strip(' \t'))
# appends the current line to the previous one
if not isinstance(headers[lh], tuple):
headers[lh] += val
else:
headers[lh][-1] = headers[lh][-1] + val
else:
# splits header name from value
(h, v) = line.split(':', 1)
val = decode_header(v.strip())
if h in headers:
if isinstance(headers[h], tuple):
headers[h] += val
else:
headers[h] = (headers[h], val)
else:
headers[h] = val
# stores the last header (for multi-line headers)
lh = h
# returns the headers and the message body
return headers, body
class SendmailEmailSenderTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub()
def test_sendmail_path_not_found_raises(self):
sender = SendmailEmailSender(self.env)
self.env.config.set('notification', 'sendmail_path',
os.path.join(os.path.dirname(__file__),
'sendmail'))
self.assertRaises(ConfigurationError, sender.send,
'[email protected]', ['[email protected]'], "")
class SmtpEmailSenderTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub()
def test_smtp_server_not_found_raises(self):
sender = SmtpEmailSender(self.env)
self.env.config.set('notification', 'smtp_server', 'localhost')
self.env.config.set('notification', 'smtp_port', '65536')
self.assertRaises(ConfigurationError, sender.send,
'[email protected]', ['[email protected]'], "")
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SendmailEmailSenderTestCase))
suite.addTest(unittest.makeSuite(SmtpEmailSenderTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
|
[
"jonn@mindhunterx"
] |
jonn@mindhunterx
|
68e9badb63dfa7f93aed88ca630799e3a43f8ee8
|
bb24d8a7f71206fac23ebef0d53f94918d7aa32d
|
/mymusic/migrations/0005_album_image_url.py
|
2818a2cbf76e1a6e207e5a6e7dae1d783a693bd1
|
[] |
no_license
|
momentum-morehouse/django-music-bettinacjohnson
|
ec3311b41df1c3c09a3993fb476c06d715a87405
|
c52f24d2f9faec73b0cad4139ebfe002bd819766
|
refs/heads/master
| 2022-11-27T02:04:49.847168 | 2020-07-16T23:46:13 | 2020-07-16T23:46:13 | 279,333,283 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 394 |
py
|
# Generated by Django 3.0.8 on 2020-07-15 20:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mymusic', '0004_auto_20200714_1510'),
]
operations = [
migrations.AddField(
model_name='album',
name='image_url',
field=models.TextField(blank=True, null=True),
),
]
|
[
"[email protected]"
] | |
88e16d0fac13e4e9eee8c7bea8b9fa71c55ddd68
|
9c2edc273db48dcb6d31a937510476b7c0b0cc61
|
/cython_sample/setup.py
|
aee60680780e7c7437d6abd35f1504bd902ef425
|
[] |
no_license
|
miyamotok0105/python_sample
|
4d397ac8a3a723c0789c4c3e568f3319dd754501
|
77101c981bf4f725acd20c9f4c4891b29fbaea61
|
refs/heads/master
| 2022-12-19T22:53:44.949782 | 2020-05-05T05:09:22 | 2020-05-05T05:09:22 | 81,720,469 | 1 | 0 | null | 2022-11-22T02:22:55 | 2017-02-12T11:15:08 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 731 |
py
|
#! -*- coding: utf-8 -*-
#python setup.py build_ext --inplace
from Cython.Build import cythonize
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import numpy as np
try:
numpy_include = np.get_include()
except AttributeError:
numpy_include = np.get_numpy_include()
ext_modules = [
Extension(
"sample1",
["sample1.pyx"],
include_dirs=[numpy_include]
)
]
setup(
name='sample1',
ext_modules=cythonize(ext_modules),
)
# ext_modules = [
# Extension( "sample1", ["sample1.pyx"] ),
# ]
# setup(
# name = "Sample sample1 app",
# cmdclass = { "build_ext" : build_ext },
# ext_modules = ext_modules,
# )
|
[
"[email protected]"
] | |
60d34638bc1a71aec3b30bdb71943672f3a6594b
|
88ed6ed99589f7fb8e49aeb6c15bf0d51fe14a01
|
/026_removeDuplicates.py
|
5e8dbfc5edb96029cb37d413ce206813159f712a
|
[] |
no_license
|
ryeLearnMore/LeetCode
|
3e97becb06ca2cf4ec15c43f77447b6ac2a061c6
|
04ec1eb720474a87a2995938743f05e7ad5e66e3
|
refs/heads/master
| 2020-04-07T19:02:43.171691 | 2019-06-23T15:09:19 | 2019-06-23T15:09:19 | 158,634,176 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 658 |
py
|
#!/usr/bin/env python
#coding:utf-8
#@author: rye
#@time: 2019/2/18 17:15
'''
很快就写完了。。。算是最近写题最快的一个
'''
class Solution:
def removeDuplicates(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
i = 0
j = 0
while j < len(nums):
if nums[j] == nums[i]:
j += 1
else:
nums[i + 1] = nums[j]
i += 1
j += 1
return len(nums[:i + 1])
if __name__ == '__main__':
nums1 = [0,0,0,1,1,1,2,2,3,3,4]
print(Solution().removeDuplicates(nums1))
|
[
"[email protected]"
] | |
7ac450e80d74815ef7401aa056f3feb1952628a3
|
853d4cec42071b76a80be38c58ffe0fbf9b9dc34
|
/venv/Lib/site-packages/pandas/tests/series/test_duplicates.py
|
6577b3e54b7b981a4d18a17b1a5eb28849a224fe
|
[] |
no_license
|
msainTesting/TwitterAnalysis
|
5e1646dbf40badf887a86e125ef30a9edaa622a4
|
b1204346508ba3e3922a52380ead5a8f7079726b
|
refs/heads/main
| 2023-08-28T08:29:28.924620 | 2021-11-04T12:36:30 | 2021-11-04T12:36:30 | 424,242,582 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,717 |
py
|
import numpy as np
import pytest
from pandas import Categorical, Series
import pandas.util.testing as tm
def test_value_counts_nunique():
# basics.rst doc example
series = Series(np.random.randn(500))
series[20:500] = np.nan
series[10:20] = 5000
result = series.nunique()
assert result == 11
# GH 18051
s = Series(Categorical([]))
assert s.nunique() == 0
s = Series(Categorical([np.nan]))
assert s.nunique() == 0
def test_unique():
# GH714 also, dtype=float
s = Series([1.2345] * 100)
s[::2] = np.nan
result = s.unique()
assert len(result) == 2
s = Series([1.2345] * 100, dtype="f4")
s[::2] = np.nan
result = s.unique()
assert len(result) == 2
# NAs in object arrays #714
s = Series(["foo"] * 100, dtype="O")
s[::2] = np.nan
result = s.unique()
assert len(result) == 2
# decision about None
s = Series([1, 2, 3, None, None, None], dtype=object)
result = s.unique()
expected = np.array([1, 2, 3, None], dtype=object)
tm.assert_numpy_array_equal(result, expected)
# GH 18051
s = Series(Categorical([]))
tm.assert_categorical_equal(s.unique(), Categorical([]), check_dtype=False)
s = Series(Categorical([np.nan]))
tm.assert_categorical_equal(s.unique(), Categorical([np.nan]), check_dtype=False)
def test_unique_data_ownership():
# it works! #1807
Series(Series(["a", "c", "b"]).unique()).sort_values()
@pytest.mark.parametrize(
"data, expected",
[
(np.random.randint(0, 10, size=1000), False),
(np.arange(1000), True),
([], True),
([np.nan], True),
(["foo", "bar", np.nan], True),
(["foo", "foo", np.nan], False),
(["foo", "bar", np.nan, np.nan], False),
],
)
def test_is_unique(data, expected):
# GH11946 / GH25180
s = Series(data)
assert s.is_unique is expected
def test_is_unique_class_ne(capsys):
# GH 20661
class Foo:
def __init__(self, val):
self._value = val
def __ne__(self, other):
raise Exception("NEQ not supported")
with capsys.disabled():
li = [Foo(i) for i in range(5)]
s = Series(li, index=[i for i in range(5)])
s.is_unique
captured = capsys.readouterr()
assert len(captured.err) == 0
@pytest.mark.parametrize(
"keep, expected",
[
("first", Series([False, False, False, False, True, True, False])),
("last", Series([False, True, True, False, False, False, False])),
(False, Series([False, True, True, False, True, True, False])),
],
)
def test_drop_duplicates(any_numpy_dtype, keep, expected):
tc = Series([1, 0, 3, 5, 3, 0, 4], dtype=np.dtype(any_numpy_dtype))
if tc.dtype == "bool":
pytest.skip("tested separately in test_drop_duplicates_bool")
tm.assert_series_equal(tc.duplicated(keep=keep), expected)
tm.assert_series_equal(tc.drop_duplicates(keep=keep), tc[~expected])
sc = tc.copy()
sc.drop_duplicates(keep=keep, inplace=True)
tm.assert_series_equal(sc, tc[~expected])
@pytest.mark.parametrize(
"keep, expected",
[
("first", Series([False, False, True, True])),
("last", Series([True, True, False, False])),
(False, Series([True, True, True, True])),
],
)
def test_drop_duplicates_bool(keep, expected):
tc = Series([True, False, True, False])
tm.assert_series_equal(tc.duplicated(keep=keep), expected)
tm.assert_series_equal(tc.drop_duplicates(keep=keep), tc[~expected])
sc = tc.copy()
sc.drop_duplicates(keep=keep, inplace=True)
tm.assert_series_equal(sc, tc[~expected])
@pytest.mark.parametrize(
"keep, expected",
[
("first", Series([False, False, True, False, True], name="name")),
("last", Series([True, True, False, False, False], name="name")),
(False, Series([True, True, True, False, True], name="name")),
],
)
def test_duplicated_keep(keep, expected):
s = Series(["a", "b", "b", "c", "a"], name="name")
result = s.duplicated(keep=keep)
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize(
"keep, expected",
[
("first", Series([False, False, True, False, True])),
("last", Series([True, True, False, False, False])),
(False, Series([True, True, True, False, True])),
],
)
def test_duplicated_nan_none(keep, expected):
s = Series([np.nan, 3, 3, None, np.nan], dtype=object)
result = s.duplicated(keep=keep)
tm.assert_series_equal(result, expected)
|
[
"[email protected]"
] | |
c019e47f0ff83cf6dcdb0d544128652acf3ae52c
|
0cf6728548830b42c60e37ea1c38b54d0e019ddd
|
/Learning_MachineLearning/DeepLearningWithPython/5.3.py
|
0f1e218f44d0b1287be5fb399e830a0c97bf75a1
|
[] |
no_license
|
MuSaCN/PythonLearning
|
8efe166f66f2bd020d00b479421878d91f580298
|
507f1d82a9228d0209c416626566cf390e1cf758
|
refs/heads/master
| 2022-11-11T09:13:08.863712 | 2022-11-08T04:20:09 | 2022-11-08T04:20:09 | 299,617,217 | 2 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,734 |
py
|
# Author:Zhang Yuan
from MyPackage import *
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import seaborn as sns
import statsmodels.api as sm
from scipy import stats
#------------------------------------------------------------
__mypath__ = MyPath.MyClass_Path("\\DeepLearningWithPython") # 路径类
myfile = MyFile.MyClass_File() # 文件操作类
myword = MyFile.MyClass_Word() # word生成类
myexcel = MyFile.MyClass_Excel() # excel生成类
mytime = MyTime.MyClass_Time() # 时间类
myplt = MyPlot.MyClass_Plot() # 直接绘图类(单个图窗)
mypltpro = MyPlot.MyClass_PlotPro() # Plot高级图系列
myfig = MyPlot.MyClass_Figure(AddFigure=False) # 对象式绘图类(可多个图窗)
myfigpro = MyPlot.MyClass_FigurePro(AddFigure=False) # Figure高级图系列
mynp = MyArray.MyClass_NumPy() # 多维数组类(整合Numpy)
mypd = MyArray.MyClass_Pandas() # 矩阵数组类(整合Pandas)
mypdpro = MyArray.MyClass_PandasPro() # 高级矩阵数组类
myDA = MyDataAnalysis.MyClass_DataAnalysis() # 数据分析类
# myMql = MyMql.MyClass_MqlBackups() # Mql备份类
# myMT5 = MyMql.MyClass_ConnectMT5(connect=False) # Python链接MetaTrader5客户端类
# myDefault = MyDefault.MyClass_Default_Matplotlib() # matplotlib默认设置
# myBaidu = MyWebCrawler.MyClass_BaiduPan() # Baidu网盘交互类
# myImage = MyImage.MyClass_ImageProcess() # 图片处理类
myBT = MyBackTest.MyClass_BackTestEvent() # 事件驱动型回测类
myBTV = MyBackTest.MyClass_BackTestVector() # 向量型回测类
myML = MyMachineLearning.MyClass_MachineLearning() # 机器学习综合类
mySQL = MyDataBase.MyClass_MySQL(connect=False) # MySQL类
mySQLAPP = MyDataBase.MyClass_SQL_APPIntegration() # 数据库应用整合
myWebQD = MyWebCrawler.MyClass_QuotesDownload(tushare=False) # 金融行情下载类
myWebR = MyWebCrawler.MyClass_Requests() # Requests爬虫类
myWebS = MyWebCrawler.MyClass_Selenium(openChrome=False) # Selenium模拟浏览器类
myWebAPP = MyWebCrawler.MyClass_Web_APPIntegration() # 爬虫整合应用类
myEmail = MyWebCrawler.MyClass_Email() # 邮箱交互类
myReportA = MyQuant.MyClass_ReportAnalysis() # 研报分析类
myFactorD = MyQuant.MyClass_Factor_Detection() # 因子检测类
myKeras = MyDeepLearning.MyClass_Keras() # Keras综合类
#------------------------------------------------------------
#%%
from tensorflow.keras.applications import VGG16
conv_base = VGG16(weights='imagenet',
include_top=False,
input_shape=(150, 150, 3))
#%%
conv_base.summary()
#%%
import os
import numpy as np
original_dataset_dir = os.path.expandvars('%USERPROFILE%')+'\\.kaggle\\dogs-vs-cats'
base_dir = original_dataset_dir+'\\cats_and_dogs_small'
train_dir = os.path.join(base_dir, 'train')
validation_dir = os.path.join(base_dir, 'validation')
test_dir = os.path.join(base_dir, 'test')
# 使用已知模型快速特征提取
train_features, train_labels = myKeras.extract_features_from_directory(conv_base,train_dir,2000,batch_size=20)
validation_features, validation_labels = myKeras.extract_features_from_directory(conv_base,validation_dir,1000,batch_size=20)
test_features, test_labels = myKeras.extract_features_from_directory(conv_base,test_dir,1000,batch_size=20)
#%%
reshapecount = np.array(train_features.shape[1:]).cumprod()[-1]
train_features = np.reshape(train_features, (2000, reshapecount))
validation_features = np.reshape(validation_features, (1000, reshapecount))
test_features = np.reshape(test_features, (1000, reshapecount))
#%%
from tensorflow.keras import models
from tensorflow.keras import layers
from tensorflow.keras import optimizers
model = models.Sequential()
model.add(layers.Dense(256, activation='relu', input_dim=4 * 4 * 512))
model.add(layers.Dropout(0.5)) #(注意要使用 dropout 正则化)
model.add(layers.Dense(1, activation='sigmoid'))
model.compile(optimizer=optimizers.RMSprop(lr=2e-5),
loss='binary_crossentropy',
metrics=['acc'])
history = model.fit(train_features, train_labels,
epochs=30,
batch_size=20,
validation_data=(validation_features, validation_labels))
myKeras.plot_acc_loss(history)
#%%
myKeras.clear_session()
from tensorflow.keras import models
from tensorflow.keras import layers
model = models.Sequential()
model.add(conv_base)
model.add(layers.Flatten())
model.add(layers.Dense(256, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid'))
#%%
model.summary()
#%%
# 冻结conv_base网络
print('This is the number of trainable weights '
'before freezing the conv base:', len(model.trainable_weights))
conv_base.trainable = False
print('总共有 4 个权重张量,每层2个(主权重矩阵和偏置向量)。', len(model.trainable_weights))
#%%
model.compile(loss='binary_crossentropy',
optimizer=optimizers.RMSprop(lr=2e-5),
metrics=['acc'])
model,history = myKeras.cnn2D_fit_from_directory(model,train_dir,validation_dir,augmentation=True,flow_batch_size=20,epochs=30,plot=True)
#%%
myKeras.plot_acc_loss(history)
model.save(base_dir+'\\cats_and_dogs_small_3.h5')
#%%
conv_base.summary()
#%%
conv_base = myKeras.fine_tune_model(conv_base,'block5_conv1')
#%%
model.compile(loss='binary_crossentropy',
optimizer=optimizers.RMSprop(lr=1e-5),
metrics=['acc'])
model,history = myKeras.cnn2D_fit_from_directory(model,train_dir,validation_dir,augmentation=True,flow_batch_size=20,epochs=30,plot=True)
#%%
model.save(base_dir+'\\cats_and_dogs_small_4.h5')
#%%
myKeras.cnn2D_evaluate_from_directory(model,test_dir,flow_batch_size=20,steps=50)
|
[
"[email protected]"
] | |
fd7663c74ab7441e0d5e4e98c3e5a02023c432b6
|
48983b88ebd7a81bfeba7abd6f45d6462adc0385
|
/HakerRank/data_structures/trees/tree_top_view.py
|
54610fe4a1f57e64ca716708d368bed09f4c0f84
|
[] |
no_license
|
lozdan/oj
|
c6366f450bb6fed5afbaa5573c7091adffb4fa4f
|
79007879c5a3976da1e4713947312508adef2e89
|
refs/heads/master
| 2018-09-24T01:29:49.447076 | 2018-06-19T14:33:37 | 2018-06-19T14:33:37 | 109,335,964 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 546 |
py
|
# author: Daniel Lozano
# source: HackerRank ( https://www.hackerrank.com )
# problem name: Data Structures: Trees: Top View
# problem url: https://www.hackerrank.com/challenges/tree-top-view/problem
def topView(root):
instance = root
if not root:
return
answer = [instance.data]
while instance.left:
answer.append(instance.left.data)
instance = instance.left
answer.reverse()
while root.right:
answer.append(root.right.data)
root = root.right
print " ".join(map(str, answer))
|
[
"[email protected]"
] | |
263b4e73ca9c63039667b3f9bfd7f5987ff27324
|
56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e
|
/CMGTools/H2TauTau/prod/TauES_test/up/emb/DoubleMuParked/StoreResults-Run2012C_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0_1374851248/HTT_24Jul_newTES_manzoni_Up_Jobs/Job_165/run_cfg.py
|
8c6b3e542ac26649a77e44a538c98aeaa7bee2f0
|
[] |
no_license
|
rmanzoni/HTT
|
18e6b583f04c0a6ca10142d9da3dd4c850cddabc
|
a03b227073b2d4d8a2abe95367c014694588bf98
|
refs/heads/master
| 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 69,050 |
py
|
import FWCore.ParameterSet.Config as cms
import os,sys
sys.path.append('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/H2TauTau/prod/TauES_test/up/emb/DoubleMuParked/StoreResults-Run2012C_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0_1374851248/HTT_24Jul_newTES_manzoni_Up_Jobs')
from base_cfg import *
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
lumisToProcess = cms.untracked.VLuminosityBlockRange( ("190645:10-190645:110", "190646:1-190646:111", "190659:33-190659:167", "190679:1-190679:55", "190688:69-190688:249",
"190702:51-190702:53", "190702:55-190702:122", "190702:124-190702:169", "190703:1-190703:252", "190704:1-190704:3",
"190705:1-190705:5", "190705:7-190705:65", "190705:81-190705:336", "190705:338-190705:350", "190705:353-190705:383",
"190706:1-190706:126", "190707:1-190707:237", "190707:239-190707:257", "190708:1-190708:189", "190733:71-190733:96",
"190733:99-190733:389", "190733:392-190733:460", "190736:1-190736:80", "190736:83-190736:185", "190738:1-190738:130",
"190738:133-190738:226", "190738:229-190738:349", "190782:55-190782:181", "190782:184-190782:233", "190782:236-190782:399",
"190782:401-190782:409", "190895:64-190895:202", "190895:210-190895:302", "190895:305-190895:584", "190895:587-190895:948",
"190906:73-190906:256", "190906:259-190906:354", "190906:356-190906:496", "190945:124-190945:207", "190949:1-190949:81",
"191043:45-191043:46", "191046:1-191046:21", "191046:24-191046:82", "191046:84-191046:88", "191046:92-191046:116",
"191046:119-191046:180", "191046:183", "191046:185-191046:239", "191056:1", "191056:4-191056:9",
"191056:16-191056:17", "191056:19", "191057:1", "191057:4-191057:40", "191062:1",
"191062:3", "191062:5-191062:214", "191062:216-191062:541", "191090:1-191090:55", "191201:38-191201:49",
"191201:52-191201:79", "191202:1-191202:64", "191202:66-191202:68", "191202:87-191202:105", "191202:108-191202:118",
"191226:77-191226:78", "191226:81-191226:831", "191226:833-191226:1454", "191226:1456-191226:1466", "191226:1469-191226:1507",
"191226:1510-191226:1686", "191247:1-191247:153", "191247:156-191247:280", "191247:283-191247:606", "191247:608-191247:620",
"191247:622-191247:818", "191247:821-191247:834", "191247:837-191247:1031", "191247:1034-191247:1046", "191247:1049-191247:1140",
"191247:1143-191247:1187", "191247:1190-191247:1214", "191247:1217-191247:1224", "191248:1-191248:103", "191264:59-191264:79",
"191264:82-191264:152", "191264:155-191264:189", "191271:56-191271:223", "191271:225-191271:363", "191276:1-191276:16",
"191277:1-191277:28", "191277:30-191277:164", "191277:167-191277:253", "191277:255-191277:457", "191277:460-191277:535",
"191277:537-191277:576", "191277:579-191277:775", "191277:778-191277:811", "191277:813-191277:849", "191367:1-191367:2",
"191411:1-191411:23", "191695:1", "191718:43-191718:95", "191718:98-191718:207", "191720:1",
"191720:3-191720:15", "191720:17-191720:181", "191721:1", "191721:3-191721:34", "191721:36-191721:183",
"191721:186-191721:189", "191726:1-191726:13", "191810:15", "191810:22-191810:49", "191810:52-191810:92",
"191830:54-191830:242", "191830:245-191830:301", "191830:304-191830:393", "191833:1", "191833:3-191833:103",
"191834:1-191834:30", "191834:33-191834:74", "191834:77-191834:299", "191834:302-191834:352", "191837:1-191837:44",
"191837:47-191837:53", "191837:56-191837:65", "191856:1-191856:133", "191859:1-191859:28", "191859:31-191859:126",
"193093:1-193093:33", "193123:1-193123:27", "193124:1-193124:52", "193192:58-193192:86", "193193:1-193193:6",
"193193:8", "193193:11-193193:83", "193193:86-193193:120", "193193:122-193193:160", "193193:162-193193:274",
"193193:276-193193:495", "193193:497-193193:506", "193207:54-193207:182", "193334:29-193334:172", "193336:1-193336:264",
"193336:267-193336:492", "193336:495-193336:684", "193336:687-193336:729", "193336:732-193336:951", "193541:77-193541:101",
"193541:103-193541:413", "193541:416-193541:575", "193541:578-193541:619", "193556:41-193556:83", "193557:1-193557:84",
"193575:48-193575:173", "193575:176-193575:349", "193575:351-193575:394", "193575:397-193575:415", "193575:417-193575:658",
"193575:660-193575:752", "193621:60-193621:570", "193621:573-193621:769", "193621:772-193621:976", "193621:979-193621:1053",
"193621:1056-193621:1137", "193621:1139-193621:1193", "193621:1195-193621:1371", "193621:1373-193621:1654", "193834:1-193834:35",
"193835:1-193835:20", "193835:22-193835:26", "193836:1-193836:2", "193998:66-193998:113", "193998:115-193998:278",
"193999:1-193999:45", "194027:57-194027:113", "194050:53-194050:113", "194050:116-194050:273", "194050:275-194050:355",
"194050:357-194050:369", "194050:372-194050:391", "194050:394-194050:490", "194050:492-194050:814", "194050:816-194050:1435",
"194050:1437-194050:1735", "194050:1760-194050:1888", "194051:1-194051:12", "194052:1-194052:99", "194052:102-194052:166",
"194075:48-194075:101", "194075:103", "194075:105-194075:107", "194075:109", "194075:111",
"194076:1-194076:9", "194076:11-194076:55", "194076:58-194076:163", "194076:165-194076:228", "194076:230-194076:264",
"194076:267-194076:507", "194076:509-194076:527", "194076:530-194076:538", "194076:541-194076:562", "194076:565-194076:748",
"194108:81-194108:161", "194108:164-194108:264", "194108:266-194108:373", "194108:376-194108:396", "194108:398-194108:433",
"194108:436-194108:452", "194108:454-194108:577", "194108:579-194108:590", "194108:593-194108:668", "194108:671-194108:872",
"194115:66-194115:184", "194115:186-194115:338", "194115:340-194115:346", "194115:348-194115:493", "194115:496-194115:731",
"194115:819-194115:857", "194117:1-194117:38", "194119:1-194119:229", "194119:232-194119:261", "194120:1-194120:162",
"194120:165-194120:406", "194150:42-194150:127", "194150:129-194150:261", "194150:264-194150:311", "194151:47-194151:72",
"194151:75-194151:191", "194151:193-194151:238", "194151:240-194151:617", "194151:619", "194151:621",
"194151:623", "194153:1-194153:115", "194199:96-194199:227", "194199:229-194199:336", "194199:339-194199:402",
"194210:3-194210:195", "194210:198-194210:217", "194210:220-194210:359", "194210:361-194210:555", "194223:61-194223:112",
"194224:1-194224:126", "194224:129-194224:206", "194224:208-194224:250", "194224:253-194224:309", "194224:312-194224:386",
"194224:389-194224:412", "194225:1-194225:23", "194225:26-194225:47", "194225:49-194225:85", "194225:88-194225:149",
"194270:56-194270:68", "194303:56-194303:66", "194303:69-194303:102", "194304:1-194304:43", "194304:46",
"194305:1-194305:84", "194314:52-194314:130", "194314:133-194314:300", "194315:1-194315:10", "194315:13-194315:314",
"194315:317-194315:428", "194315:431-194315:452", "194315:455-194315:467", "194317:1-194317:20", "194424:63-194424:141",
"194424:144-194424:195", "194424:198-194424:266", "194424:268-194424:421", "194424:424-194424:478", "194424:481-194424:531",
"194424:534-194424:553", "194424:556-194424:706", "194424:708", "194428:1-194428:85", "194428:87-194428:122",
"194428:125-194428:294", "194428:296-194428:465", "194429:1-194429:4", "194429:7-194429:54", "194429:57-194429:147",
"194429:150-194429:411", "194429:413-194429:742", "194429:745-194429:986", "194429:988-194429:1019", "194439:46-194439:77",
"194439:79-194439:106", "194455:45-194455:64", "194455:67-194455:140", "194455:142-194455:255", "194455:293-194455:303",
"194464:1-194464:127", "194464:130-194464:142", "194464:145-194464:210", "194479:1-194479:44", "194479:165-194479:232",
"194479:235-194479:262", "194479:265-194479:374", "194479:377-194479:431", "194479:434-194479:489", "194479:492-194479:529",
"194479:531-194479:566", "194480:1-194480:32", "194480:34-194480:205", "194480:207-194480:375", "194480:377-194480:387",
"194480:389-194480:759", "194480:762-194480:956", "194480:959-194480:1402", "194533:46-194533:379", "194533:382-194533:415",
"194533:417-194533:618", "194533:620-194533:872", "194619:31-194619:110", "194631:1-194631:42", "194631:44-194631:100",
"194631:102-194631:169", "194631:171-194631:222", "194643:1-194643:287", "194644:1-194644:168", "194644:171-194644:181",
"194644:184-194644:185", "194644:187-194644:319", "194644:321-194644:421", "194691:61-194691:104", "194691:107-194691:155",
"194691:158-194691:251", "194691:254-194691:268", "194691:271-194691:272", "194691:275-194691:289", "194691:292-194691:313",
"194699:1-194699:30", "194699:32-194699:52", "194699:55-194699:64", "194699:67-194699:71", "194699:73-194699:154",
"194699:157-194699:215", "194699:218-194699:238", "194699:241-194699:259", "194702:1-194702:138", "194702:141-194702:191",
"194704:1-194704:41", "194704:44-194704:545", "194704:548-194704:592", "194711:1-194711:7", "194711:9-194711:619",
"194712:1-194712:56", "194712:61-194712:418", "194712:420-194712:625", "194712:627-194712:759", "194735:44-194735:71",
"194735:74-194735:101", "194735:104-194735:130", "194778:60-194778:118", "194778:120-194778:219", "194789:1-194789:18",
"194789:21-194789:32", "194789:34-194789:80", "194789:82-194789:166", "194789:168-194789:269", "194789:272-194789:405",
"194789:409-194789:414", "194789:417-194789:427", "194789:430-194789:566", "194790:1-194790:45", "194825:72-194825:117",
"194825:120-194825:221", "194896:34-194896:55", "194896:58-194896:79", "194896:82-194896:103", "194897:1-194897:6",
"194897:8-194897:78", "194897:80-194897:96", "194897:98-194897:102", "194912:53-194912:70", "194912:72-194912:96",
"194912:98-194912:444", "194912:446-194912:450", "194912:453-194912:467", "194912:470-194912:561", "194912:564-194912:660",
"194912:663-194912:813", "194912:815-194912:840", "194912:843-194912:864", "194912:866-194912:1004", "194912:1007-194912:1025",
"194912:1027-194912:1067", "194912:1069-194912:1137", "194912:1140-194912:1166", "194912:1168-194912:1249", "194912:1251-194912:1304",
"194912:1307-194912:1444", "194912:1447-194912:1487", "194912:1489-194912:1503", "194912:1506-194912:1662", "194914:1-194914:38",
"194915:1-194915:74", "195013:94-195013:144", "195013:146-195013:185", "195013:187-195013:206", "195013:208-195013:299",
"195013:302-195013:324", "195013:326-195013:366", "195013:369-195013:447", "195013:450-195013:526", "195013:528-195013:541",
"195014:1-195014:6", "195014:9-195014:119", "195014:121-195014:148", "195015:1-195015:13", "195016:1-195016:21",
"195016:23-195016:55", "195016:58-195016:63", "195016:65-195016:174", "195016:177-195016:184", "195016:186-195016:241",
"195016:243-195016:246", "195016:248-195016:251", "195016:254-195016:367", "195016:370-195016:422", "195016:425-195016:560",
"195016:563-195016:569", "195099:70-195099:144", "195099:147-195099:186", "195099:189-195099:208", "195099:211-195099:224",
"195099:227-195099:248", "195109:98-195109:241", "195112:1-195112:12", "195112:15-195112:26", "195113:1-195113:209",
"195113:212-195113:388", "195113:391-195113:403", "195113:406-195113:419", "195113:422-195113:492", "195113:495-195113:579",
"195114:1-195114:69", "195114:72-195114:103", "195115:1-195115:7", "195115:10-195115:22", "195147:132-195147:282",
"195147:285-195147:294", "195147:297-195147:331", "195147:334-195147:363", "195147:366-195147:442", "195147:445-195147:536",
"195147:539-195147:559", "195163:72-195163:138", "195163:140-195163:224", "195163:227-195163:240", "195163:243",
"195163:246-195163:347", "195164:1-195164:64", "195165:1-195165:4", "195165:7-195165:41", "195165:44-195165:54",
"195165:56-195165:153", "195165:156-195165:260", "195165:263-195165:266", "195251:1-195251:131", "195251:134-195251:137",
"195251:140-195251:152", "195251:154-195251:165", "195251:167-195251:242", "195303:109-195303:191", "195303:194-195303:277",
"195303:280-195303:310", "195303:312-195303:316", "195303:318-195303:409", "195304:1-195304:3", "195304:6-195304:22",
"195304:27-195304:80", "195304:83-195304:100", "195304:103-195304:154", "195304:157-195304:341", "195304:344-195304:588",
"195304:590-195304:727", "195304:729-195304:1003", "195304:1006-195304:1079", "195304:1083-195304:1140", "195304:1143-195304:1229",
"195378:90-195378:117", "195378:120-195378:127", "195378:130-195378:185", "195378:187-195378:204", "195378:206-195378:302",
"195378:305-195378:542", "195378:544-195378:565", "195378:567-195378:645", "195378:647-195378:701", "195378:703-195378:734",
"195378:737-195378:1120", "195378:1122-195378:1133", "195390:1", "195390:4-195390:27", "195390:30-195390:145",
"195390:147-195390:183", "195390:186-195390:187", "195390:190-195390:208", "195390:210-195390:213", "195390:215-195390:400",
"195396:49-195396:55", "195396:58-195396:63", "195396:66-195396:131", "195397:1-195397:10", "195397:12-195397:89",
"195397:92-195397:120", "195397:123-195397:141", "195397:143-195397:251", "195397:253", "195397:256-195397:475",
"195397:478-195397:525", "195397:527-195397:608", "195397:611-195397:776", "195397:779-195397:970", "195397:972-195397:1121",
"195397:1123-195397:1181", "195397:1184-195397:1198", "195397:1200-195397:1209", "195398:3-195398:137", "195398:139-195398:494",
"195398:497-195398:585", "195398:587-195398:817", "195398:820-195398:824", "195398:827-195398:1225", "195398:1228-195398:1307",
"195398:1309-195398:1712", "195398:1721-195398:1736", "195398:1741-195398:1752", "195398:1767-195398:1795", "195399:1-195399:192",
"195399:194-195399:382", "195530:1-195530:80", "195530:82-195530:104", "195530:107-195530:156", "195530:159-195530:300",
"195530:302-195530:405", "195540:68-195540:123", "195540:126-195540:137", "195540:140-195540:283", "195540:286-195540:319",
"195551:91-195551:106", "195552:1-195552:21", "195552:23-195552:27", "195552:30-195552:147", "195552:149-195552:155",
"195552:158-195552:182", "195552:185-195552:287", "195552:290-195552:349", "195552:352-195552:469", "195552:472-195552:815",
"195552:818-195552:823", "195552:825-195552:883", "195552:885-195552:1152", "195552:1154-195552:1300", "195552:1303-195552:1789",
"195633:40-195633:42", "195647:1-195647:41", "195649:1-195649:69", "195649:72-195649:151", "195649:154-195649:181",
"195649:183-195649:247", "195655:1-195655:129", "195655:131-195655:184", "195655:186-195655:260", "195655:263-195655:350",
"195655:353-195655:446", "195655:448-195655:483", "195655:485-195655:498", "195656:1-195656:362", "195658:1-195658:37",
"195658:40-195658:362", "195658:364-195658:382", "195658:384-195658:386", "195749:1-195749:8", "195749:10-195749:33",
"195749:36-195749:131", "195757:1-195757:82", "195757:85-195757:115", "195757:118-195757:161", "195757:163-195757:206",
"195758:1-195758:18", "195774:1-195774:13", "195774:16-195774:137", "195774:139-195774:151", "195774:154-195774:162",
"195774:164-195774:256", "195774:258-195774:276", "195774:279-195774:362", "195774:365-195774:466", "195774:469-195774:618",
"195774:620-195774:649", "195774:651-195774:830", "195775:1-195775:57", "195775:60-195775:100", "195775:103-195775:170",
"195776:1-195776:63", "195776:66-195776:283", "195776:286-195776:337", "195776:340-195776:399", "195776:401-195776:409",
"195776:411-195776:477", "195841:74-195841:85", "195868:1-195868:88", "195868:90-195868:107", "195868:110-195868:205",
"195915:1-195915:109", "195915:111-195915:275", "195915:278-195915:390", "195915:393-195915:417", "195915:419-195915:429",
"195915:432-195915:505", "195915:507-195915:747", "195915:749-195915:785", "195915:787-195915:828", "195915:830-195915:850",
"195916:1-195916:16", "195916:19-195916:68", "195916:71-195916:212", "195917:1-195917:4", "195918:1-195918:44",
"195918:46", "195918:49-195918:64", "195919:1-195919:15", "195923:1-195923:14", "195925:1-195925:12",
"195926:1", "195926:3-195926:19", "195926:21-195926:34", "195929:1-195929:29", "195930:1-195930:77",
"195930:80-195930:176", "195930:179-195930:526", "195930:529-195930:596", "195937:1-195937:28", "195937:31-195937:186",
"195937:188-195937:396", "195947:23-195947:62", "195947:64-195947:88", "195948:51-195948:116", "195948:119-195948:144",
"195948:147", "195948:150-195948:352", "195948:355-195948:369", "195948:372-195948:402", "195948:404-195948:500",
"195948:503-195948:540", "195948:543-195948:565", "195948:567-195948:602", "195948:605-195948:615", "195950:1-195950:71",
"195950:73-195950:138", "195950:141-195950:169", "195950:172-195950:332", "195950:335-195950:350", "195950:353-195950:382",
"195950:385-195950:421", "195950:424-195950:450", "195950:453-195950:483", "195950:485-195950:616", "195950:619-195950:715",
"195950:718-195950:787", "195950:789-195950:800", "195950:803-195950:829", "195950:831", "195950:833-195950:1587",
"195963:54-195963:58", "195970:44-195970:49", "195970:51-195970:85", "196019:54-196019:68", "196027:1-196027:55",
"196027:58-196027:119", "196027:121-196027:155", "196027:158-196027:186", "196046:12-196046:40", "196047:1-196047:64",
"196047:70-196047:75", "196048:1-196048:44", "196048:46-196048:48", "196197:58-196197:122", "196197:125-196197:179",
"196197:181-196197:311", "196197:313-196197:516", "196197:519-196197:562", "196199:1-196199:33", "196199:36-196199:83",
"196199:86-196199:118", "196199:121-196199:147", "196199:150-196199:237", "196199:239-196199:285", "196199:287-196199:534",
"196200:1-196200:68", "196202:3-196202:61", "196202:64-196202:108", "196203:1-196203:102", "196203:107-196203:117",
"196218:55-196218:199", "196218:201-196218:224", "196218:226-196218:393", "196218:396-196218:494", "196218:496-196218:741",
"196218:744-196218:752", "196218:754-196218:757", "196218:759-196218:820", "196239:1-196239:59", "196239:62-196239:154",
"196239:157-196239:272", "196239:274-196239:373", "196239:375-196239:432", "196239:435-196239:465", "196239:468-196239:647",
"196239:650-196239:706", "196239:709-196239:1025", "196249:63-196249:77", "196249:80-196249:99", "196250:1-196250:2",
"196250:5-196250:265", "196250:267-196250:426", "196252:1-196252:35", "196334:59-196334:111", "196334:113-196334:123",
"196334:126-196334:132", "196334:135-196334:167", "196334:170-196334:193", "196334:196-196334:257", "196334:259-196334:267",
"196334:270-196334:289", "196334:292-196334:342", "196349:65-196349:84", "196349:86-196349:154", "196349:157-196349:244",
"196349:246-196349:258", "196357:1-196357:4", "196359:1-196359:2", "196362:1-196362:88", "196363:1-196363:8",
"196363:11-196363:34", "196364:1-196364:93", "196364:96-196364:136", "196364:139-196364:365", "196364:368-196364:380",
"196364:382-196364:601", "196364:603-196364:795", "196364:798-196364:884", "196364:887-196364:1196", "196364:1199-196364:1200",
"196364:1203-196364:1299", "196437:1", "196437:3-196437:74", "196437:77-196437:169", "196438:1-196438:181",
"196438:184-196438:699", "196438:701-196438:1269", "196452:82-196452:112", "196452:114-196452:490", "196452:493-196452:586",
"196452:589-196452:618", "196452:622-196452:668", "196452:671-196452:716", "196452:718-196452:726", "196452:728-196452:956",
"196452:958-196452:1004", "196452:1007-196452:1091", "196453:1-196453:74", "196453:77-196453:145", "196453:147-196453:669",
"196453:673-196453:714", "196453:717-196453:799", "196453:802-196453:988", "196453:991-196453:1178", "196453:1180",
"196453:1182-196453:1248", "196453:1250-196453:1528", "196453:1531-196453:1647", "196495:114-196495:180", "196495:182-196495:272",
"196509:1-196509:68", "196531:62-196531:150", "196531:152-196531:253", "196531:256-196531:285", "196531:288-196531:302",
"196531:305-196531:422", "196531:425-196531:440", "198049:1-198049:11", "198049:14-198049:57", "198050:2-198050:155",
"198063:1-198063:37", "198063:40-198063:72", "198063:74-198063:124", "198063:127-198063:294", "198116:36-198116:52",
"198116:54-198116:55", "198116:58-198116:96", "198116:98-198116:112", "198207:1-198207:97", "198208:1-198208:92",
"198208:94-198208:134", "198208:137-198208:147", "198208:150-198208:209", "198210:1-198210:221", "198212:1-198212:574",
"198213:1-198213:107", "198215:1-198215:12", "198230:1-198230:33", "198230:36-198230:57", "198230:60-198230:235",
"198230:237-198230:324", "198230:326-198230:388", "198230:390-198230:459", "198230:462-198230:625", "198230:627-198230:651",
"198230:653-198230:805", "198230:808-198230:811", "198230:814-198230:948", "198230:950-198230:1090", "198230:1093-198230:1103",
"198230:1106-198230:1332", "198230:1335-198230:1380", "198249:1-198249:7", "198269:3-198269:198", "198271:1-198271:91",
"198271:93-198271:170", "198271:173-198271:299", "198271:301-198271:450", "198271:453-198271:513", "198271:516-198271:616",
"198271:619-198271:628", "198271:631-198271:791", "198271:793-198271:797", "198272:1-198272:185", "198272:188-198272:245",
"198272:248-198272:314", "198272:317-198272:433", "198272:436-198272:444", "198272:454-198272:620", "198346:44-198346:47",
"198372:57-198372:110", "198485:68-198485:109", "198485:112-198485:134", "198485:136-198485:181", "198485:184-198485:239",
"198487:1-198487:145", "198487:147-198487:514", "198487:517-198487:668", "198487:671-198487:733", "198487:736-198487:757",
"198487:760-198487:852", "198487:854-198487:994", "198487:997-198487:1434", "198487:1437-198487:1610", "198522:65-198522:144",
"198522:147-198522:208", "198941:102-198941:189", "198941:191-198941:220", "198941:222-198941:241", "198941:243-198941:249",
"198941:252-198941:284", "198954:108-198954:156", "198954:159-198954:277", "198955:1-198955:45", "198955:47-198955:50",
"198955:53-198955:220", "198955:223-198955:269", "198955:271-198955:284", "198955:286-198955:338", "198955:340-198955:580",
"198955:583-198955:742", "198955:744-198955:910", "198955:913-198955:946", "198955:949-198955:1162", "198955:1165-198955:1169",
"198955:1172-198955:1182", "198955:1185-198955:1188", "198955:1190-198955:1246", "198955:1249-198955:1304", "198955:1306-198955:1467",
"198955:1470-198955:1485", "198955:1487-198955:1552", "198969:58-198969:81", "198969:84-198969:247", "198969:249-198969:323",
"198969:325-198969:365", "198969:367-198969:413", "198969:416-198969:466", "198969:468-198969:643", "198969:646-198969:918",
"198969:920-198969:1011", "198969:1013-198969:1175", "198969:1178-198969:1236", "198969:1239-198969:1253", "199008:75-199008:93",
"199008:95-199008:121", "199008:124-199008:208", "199008:211-199008:331", "199008:333-199008:373", "199008:376-199008:482",
"199008:485-199008:605", "199008:608-199008:644", "199011:1-199011:11", "199011:13-199011:24", "199021:59-199021:88",
"199021:91-199021:128", "199021:130-199021:133", "199021:136-199021:309", "199021:311-199021:333", "199021:335-199021:410",
"199021:414-199021:469", "199021:471-199021:533", "199021:535-199021:563", "199021:565-199021:1223", "199021:1226-199021:1479",
"199021:1481-199021:1494", "199318:65-199318:138", "199319:1-199319:7", "199319:9-199319:223", "199319:226-199319:277",
"199319:280-199319:348", "199319:351-199319:358", "199319:360-199319:422", "199319:424-199319:490", "199319:492-199319:493",
"199319:496-199319:612", "199319:615-199319:642", "199319:645-199319:720", "199319:723-199319:728", "199319:730-199319:731",
"199319:734-199319:741", "199319:744-199319:752", "199319:754-199319:943", "199319:945-199319:997", "199336:1-199336:33",
"199336:36-199336:122", "199336:125-199336:231", "199336:234-199336:614", "199336:617-199336:789", "199336:791-199336:977",
"199356:95-199356:121", "199356:123-199356:168", "199356:171-199356:205", "199356:208-199356:231", "199409:25-199409:54",
"199409:56-199409:89", "199409:91-199409:204", "199409:206-199409:290", "199409:293-199409:583", "199409:586-199409:602",
"199409:604-199409:1014", "199409:1016-199409:1300", "199428:61-199428:197", "199428:200-199428:210", "199428:212-199428:382",
"199428:387-199428:414", "199428:417-199428:436", "199428:439-199428:530", "199428:533-199428:648", "199429:1-199429:28",
"199429:30-199429:36", "199429:39-199429:55", "199429:58-199429:101", "199429:103-199429:148", "199429:151-199429:154",
"199435:63-199435:106", "199435:109-199435:261", "199435:263-199435:579", "199435:582-199435:654", "199435:656-199435:696",
"199435:699-199435:1034", "199435:1037-199435:1144", "199435:1147-199435:1327", "199435:1330-199435:1411", "199435:1414-199435:1431",
"199435:1434-199435:1441", "199435:1444-199435:1487", "199435:1489-199435:1610", "199436:1-199436:113", "199436:116-199436:254",
"199436:257-199436:675", "199436:678-199436:748", "199564:1-199564:3", "199569:1-199569:2", "199569:5-199569:136",
"199569:139-199569:367", "199570:1-199570:17", "199571:1-199571:184", "199571:186-199571:360", "199571:363-199571:561",
"199572:1-199572:317", "199573:1-199573:22", "199574:1-199574:53", "199574:56-199574:153", "199574:156-199574:246",
"199608:60-199608:157", "199608:159-199608:209", "199608:211-199608:341", "199608:344-199608:390", "199608:392-199608:461",
"199608:464-199608:800", "199608:802-199608:1064", "199608:1067-199608:1392", "199608:1395-199608:1630", "199608:1633-199608:1904",
"199608:1907-199608:1962", "199608:1965-199608:2252", "199608:2255-199608:2422", "199698:72-199698:94", "199698:96-199698:127",
"199699:1-199699:154", "199699:157-199699:169", "199699:172-199699:410", "199699:412-199699:756", "199703:1-199703:94",
"199703:97-199703:482", "199703:485-199703:529", "199739:66-199739:133", "199751:103-199751:119", "199751:121-199751:127",
"199752:1-199752:141", "199752:144-199752:180", "199752:182-199752:186", "199752:188-199752:211", "199752:214-199752:322",
"199753:1-199753:59", "199754:1-199754:203", "199754:205-199754:325", "199754:328-199754:457", "199754:459-199754:607",
"199754:610-199754:613", "199754:615-199754:806", "199754:808-199754:998", "199804:78-199804:88", "199804:90-199804:181",
"199804:183-199804:235", "199804:238-199804:278", "199804:281-199804:290", "199804:292-199804:519", "199804:522-199804:575",
"199804:577-199804:628", "199804:631-199804:632", "199812:70-199812:141", "199812:144-199812:163", "199812:182-199812:211",
"199812:214-199812:471", "199812:474-199812:505", "199812:508-199812:557", "199812:560-199812:571", "199812:574-199812:623",
"199812:626-199812:751", "199812:754-199812:796", "199832:58-199832:62", "199832:65-199832:118", "199832:121-199832:139",
"199832:142-199832:286", "199833:1-199833:13", "199833:16-199833:103", "199833:105-199833:250", "199833:253-199833:493",
"199833:496-199833:794", "199833:797-199833:1032", "199833:1034-199833:1185", "199833:1188-199833:1239", "199834:1-199834:9",
"199834:11", "199834:14-199834:18", "199834:21-199834:54", "199834:56-199834:57", "199834:62-199834:65",
"199834:69-199834:284", "199834:286-199834:503", "199834:505-199834:942", "199862:59-199862:141", "199864:1-199864:87",
"199864:89", "199864:92-199864:103", "199864:106-199864:372", "199864:374-199864:385", "199864:388-199864:486",
"199867:1-199867:134", "199867:136-199867:172", "199867:174-199867:218", "199867:221-199867:320", "199868:1-199868:21",
"199875:70-199875:150", "199875:152-199875:334", "199876:1-199876:19", "199876:22-199876:95", "199876:97-199876:249",
"199876:252-199876:272", "199876:274-199876:340", "199876:343-199876:362", "199876:365-199876:376", "199877:1-199877:173",
"199877:175-199877:605", "199877:607-199877:701", "199877:703-199877:871", "199960:72-199960:139", "199960:141-199960:197",
"199960:204-199960:232", "199960:235-199960:363", "199960:365-199960:367", "199960:370-199960:380", "199960:383-199960:459",
"199960:461-199960:466", "199960:469-199960:485", "199961:1-199961:211", "199961:213-199961:287", "199967:60-199967:120",
"199967:122-199967:170", "199967:172-199967:198", "199973:73-199973:89", "200041:62-200041:83", "200041:85-200041:157",
"200041:162-200041:274", "200041:277-200041:318", "200041:321-200041:335", "200041:337-200041:386", "200041:388-200041:389",
"200041:392-200041:400", "200041:402-200041:568", "200041:571-200041:593", "200041:595-200041:646", "200041:649-200041:728",
"200041:731-200041:860", "200041:862-200041:930", "200041:932-200041:1096", "200042:1-200042:110", "200042:112-200042:536",
"200049:1-200049:177", "200075:76-200075:139", "200075:142-200075:232", "200075:256-200075:326", "200075:329-200075:422",
"200075:425-200075:431", "200075:434-200075:500", "200075:502-200075:605", "200091:67", "200091:70-200091:151",
"200091:154-200091:172", "200091:174-200091:187", "200091:190-200091:196", "200091:199-200091:201", "200091:204-200091:425",
"200091:428-200091:535", "200091:537-200091:607", "200091:610-200091:879", "200091:881-200091:943", "200091:946-200091:999",
"200091:1001-200091:1025", "200091:1027-200091:1132", "200091:1135-200091:1339", "200091:1341-200091:1433", "200091:1435-200091:1450",
"200091:1453-200091:1523", "200091:1526-200091:1664", "200091:1667-200091:1680", "200091:1683-200091:1710", "200152:74-200152:116",
"200160:52-200160:68", "200161:1-200161:97", "200161:100-200161:112", "200174:81-200174:84", "200177:1-200177:56",
"200178:1-200178:38", "200180:1-200180:18", "200186:1-200186:3", "200186:6-200186:24", "200188:1-200188:24",
"200188:27-200188:28", "200188:31-200188:76", "200188:79-200188:271", "200188:274-200188:352", "200190:1-200190:4",
"200190:6-200190:76", "200190:79-200190:143", "200190:146-200190:159", "200190:162-200190:256", "200190:258-200190:321",
"200190:324-200190:401", "200190:403-200190:453", "200190:456-200190:457", "200190:460-200190:565", "200190:567-200190:588",
"200190:591", "200190:593-200190:595", "200190:597-200190:646", "200190:649-200190:878", "200229:1-200229:33",
"200229:41-200229:219", "200229:222-200229:244", "200229:247-200229:290", "200229:293-200229:624", "200229:627-200229:629",
"200243:69-200243:103", "200243:106-200243:139", "200244:3-200244:304", "200244:307-200244:442", "200244:445-200244:507",
"200244:510-200244:619", "200245:1-200245:103", "200245:105-200245:128", "200245:131-200245:248", "200245:251-200245:357",
"200368:72-200368:180", "200369:1-200369:5", "200369:8-200369:61", "200369:64-200369:360", "200369:363-200369:439",
"200369:441-200369:578", "200369:580-200369:603", "200369:606-200369:684", "200369:686", "200381:8-200381:15",
"200381:18-200381:36", "200381:38-200381:89", "200381:91-200381:195", "200466:134-200466:274", "200473:96-200473:157",
"200473:159-200473:224", "200473:226-200473:304", "200473:306-200473:469", "200473:472-200473:524", "200473:527-200473:542",
"200473:545-200473:619", "200473:622-200473:688", "200473:691-200473:730", "200473:733-200473:738", "200473:740-200473:1324",
"200491:87-200491:107", "200491:110-200491:149", "200491:152-200491:157", "200491:160-200491:197", "200491:199-200491:237",
"200491:240-200491:270", "200491:273", "200491:276-200491:334", "200491:336-200491:360", "200491:363-200491:419",
"200515:97-200515:183", "200519:1-200519:111", "200519:114-200519:126", "200519:129-200519:136", "200519:138-200519:224",
"200519:227-200519:258", "200519:261-200519:350", "200519:353-200519:611", "200519:613-200519:747", "200525:77-200525:149",
"200525:151-200525:164", "200525:166-200525:190", "200525:193-200525:276", "200525:278-200525:311", "200525:314-200525:464",
"200525:467-200525:488", "200525:491-200525:674", "200525:676-200525:704", "200525:707-200525:755", "200525:757-200525:895",
"200525:898-200525:937", "200525:939-200525:990", "200532:1-200532:37", "200599:75-200599:129", "200599:132-200599:137",
"200600:1-200600:183", "200600:186-200600:299", "200600:302-200600:313", "200600:316-200600:324", "200600:327-200600:334",
"200600:336-200600:397", "200600:399-200600:417", "200600:420-200600:526", "200600:529-200600:591", "200600:594-200600:596",
"200600:598-200600:609", "200600:611-200600:660", "200600:663-200600:823", "200600:826-200600:900", "200600:902-200600:943",
"200600:945-200600:1139", "200961:1-200961:115", "200976:94-200976:164", "200990:75-200990:143", "200991:1-200991:42",
"200991:44", "200991:47-200991:80", "200991:83-200991:175", "200991:178-200991:181", "200991:184-200991:252",
"200991:255-200991:632", "200991:635-200991:916", "200991:918-200991:1017", "200991:1019-200991:1048", "200992:1-200992:405",
"200992:408-200992:434", "200992:436-200992:581", "201062:78-201062:268", "201097:83-201097:136", "201097:138-201097:245",
"201097:248-201097:300", "201097:303-201097:370", "201097:372-201097:429", "201097:432-201097:497", "201114:1-201114:14",
"201115:1-201115:73", "201159:70-201159:211", "201164:1-201164:8", "201164:10-201164:94", "201164:96-201164:125",
"201164:128-201164:178", "201164:180-201164:198", "201164:200-201164:271", "201164:274-201164:416", "201164:418",
"201168:1-201168:37", "201168:39-201168:275", "201168:278-201168:481", "201168:483-201168:558", "201168:560-201168:730",
"201173:1-201173:194", "201173:197-201173:586", "201174:1-201174:214", "201174:216-201174:263", "201174:265-201174:339",
"201174:342-201174:451", "201191:75-201191:98", "201191:100-201191:216", "201191:218-201191:389", "201191:392-201191:492",
"201191:494-201191:506", "201191:509-201191:585", "201191:587-201191:594", "201191:597-201191:607", "201191:609-201191:794",
"201191:796-201191:838", "201191:841-201191:974", "201191:977-201191:1105", "201191:1108-201191:1117", "201191:1120-201191:1382",
"201191:1385-201191:1386", "201193:1-201193:19", "201196:1-201196:238", "201196:241-201196:278", "201196:286-201196:299",
"201196:302-201196:338", "201196:341-201196:515", "201196:518-201196:720", "201196:723-201196:789", "201196:803-201196:841",
"201197:1-201197:23", "201202:1-201202:437", "201229:1-201229:5", "201229:8-201229:26", "201229:29-201229:73",
"201278:62-201278:163", "201278:166-201278:229", "201278:232-201278:256", "201278:259-201278:316", "201278:318-201278:595",
"201278:598-201278:938", "201278:942-201278:974", "201278:976-201278:1160", "201278:1163-201278:1304", "201278:1306-201278:1793",
"201278:1796-201278:1802", "201278:1805-201278:1906", "201278:1909-201278:1929", "201278:1932-201278:2174", "201554:70-201554:86",
"201554:88-201554:114", "201554:116-201554:126", "201602:76-201602:81", "201602:83-201602:194", "201602:196-201602:494",
"201602:496-201602:614", "201602:617-201602:635", "201611:87-201611:145", "201611:149-201611:182", "201611:184-201611:186",
"201613:1-201613:42", "201613:44-201613:49", "201613:53-201613:210", "201613:213-201613:215", "201613:218-201613:225",
"201613:228-201613:646", "201624:83-201624:92", "201624:95-201624:240", "201624:270", "201625:211-201625:312",
"201625:315-201625:348", "201625:351-201625:416", "201625:418-201625:588", "201625:591-201625:671", "201625:673-201625:758",
"201625:760-201625:791", "201625:793-201625:944", "201657:77-201657:93", "201657:95-201657:108", "201657:110-201657:118",
"201658:1-201658:19", "201658:21-201658:118", "201658:121-201658:136", "201658:139-201658:288", "201668:78-201668:157",
"201669:1-201669:9", "201669:12-201669:136", "201669:139-201669:141", "201669:143-201669:165", "201671:1-201671:120",
"201671:122-201671:174", "201671:177-201671:462", "201671:464-201671:482", "201671:485-201671:499", "201671:501-201671:545",
"201671:547-201671:571", "201671:574-201671:614", "201671:617-201671:766", "201671:768-201671:896", "201671:899-201671:911",
"201671:914-201671:1007", "201678:1-201678:120", "201679:1-201679:110", "201679:112-201679:241", "201679:244-201679:298",
"201679:302-201679:321", "201679:324-201679:461", "201679:463-201679:483", "201692:78-201692:81", "201692:83-201692:179",
"201705:65-201705:73", "201705:75-201705:109", "201705:111-201705:187", "201706:1-201706:62", "201707:1-201707:23",
"201707:26-201707:42", "201707:45-201707:115", "201707:118-201707:130", "201707:133-201707:160", "201707:163-201707:276",
"201707:279-201707:471", "201707:473-201707:511", "201707:514-201707:545", "201707:547-201707:570", "201707:572-201707:622",
"201707:625-201707:735", "201707:738-201707:806", "201707:809-201707:876", "201707:879-201707:964", "201708:1-201708:79",
"201718:58-201718:108", "201727:67-201727:185", "201729:6-201729:20", "201729:22-201729:75", "201729:77-201729:126",
"201729:129-201729:154", "201729:156-201729:216", "201729:219-201729:244", "201794:58-201794:94", "201802:68-201802:209",
"201802:211-201802:214", "201802:216-201802:220", "201802:223-201802:288", "201802:290-201802:296", "201816:1-201816:72",
"201816:74-201816:105", "201816:107-201816:157", "201817:1-201817:274", "201818:1", "201819:1-201819:94",
"201819:96-201819:241", "201824:1-201824:139", "201824:141-201824:176", "201824:179-201824:286", "201824:289-201824:492",
"202012:98-202012:121", "202012:126-202012:131", "202013:1-202013:2", "202013:5-202013:35", "202013:38-202013:57",
"202014:1-202014:5", "202014:8-202014:14", "202014:16-202014:18", "202014:20-202014:77", "202014:79-202014:102",
"202014:104-202014:174", "202014:177-202014:190", "202014:192-202014:196", "202016:1-202016:48", "202016:51-202016:134",
"202016:137-202016:177", "202016:179-202016:743", "202016:745-202016:831", "202016:834-202016:890", "202016:893-202016:896",
"202016:898-202016:932", "202016:934-202016:1010", "202044:84-202044:101", "202044:104-202044:266", "202044:268-202044:461",
"202044:463-202044:466", "202045:1-202045:30", "202045:33-202045:72", "202045:75-202045:528", "202045:531-202045:601",
"202045:603-202045:785", "202045:788-202045:809", "202045:822-202045:823", "202054:6-202054:266", "202054:268-202054:489",
"202054:492-202054:605", "202054:608-202054:631", "202060:76-202060:142", "202060:144-202060:154", "202060:156-202060:244",
"202060:246-202060:497", "202060:499-202060:642", "202060:644-202060:682", "202060:684-202060:743", "202060:746-202060:936",
"202074:66-202074:174", "202075:1-202075:18", "202075:21-202075:187", "202075:189-202075:214", "202075:217-202075:247",
"202075:250-202075:342", "202075:345-202075:406", "202075:409-202075:497", "202075:500-202075:537", "202075:539",
"202075:542-202075:560", "202075:562-202075:615", "202075:618-202075:628", "202084:83-202084:156", "202084:159-202084:177",
"202084:179-202084:180", "202084:182-202084:239", "202087:1-202087:25", "202087:28-202087:208", "202087:210-202087:357",
"202087:359-202087:652", "202087:655-202087:853", "202087:856-202087:1093", "202088:1-202088:286", "202093:1-202093:104",
"202093:107-202093:320", "202093:322-202093:360", "202116:59-202116:60", "202178:67-202178:78", "202178:80-202178:88",
"202178:91-202178:177", "202178:180-202178:186", "202178:188-202178:337", "202178:340-202178:377", "202178:379-202178:425",
"202178:428-202178:475", "202178:478-202178:548", "202178:551-202178:717", "202178:720-202178:965", "202178:967-202178:1444",
"202178:1447-202178:1505", "202178:1508-202178:1519", "202178:1522-202178:1555", "202205:94-202205:114", "202209:1-202209:48",
"202209:51-202209:142", "202237:39-202237:128", "202237:131", "202237:134-202237:219", "202237:222-202237:235",
"202237:238-202237:275", "202237:277-202237:289", "202237:291-202237:316", "202237:319-202237:419", "202237:422-202237:538",
"202237:540-202237:936", "202237:939-202237:950", "202237:952-202237:976", "202237:979-202237:1079", "202272:76-202272:112",
"202272:115-202272:141", "202272:144-202272:185", "202272:188-202272:205", "202272:208-202272:305", "202272:307-202272:313",
"202272:315-202272:371", "202272:436-202272:480", "202272:483-202272:555", "202272:558-202272:577", "202272:579-202272:683",
"202272:686-202272:705", "202272:707-202272:740", "202272:742-202272:890", "202272:937-202272:1295", "202272:1299-202272:1481",
"202299:68-202299:84", "202299:87-202299:141", "202299:143-202299:193", "202299:196-202299:358", "202299:361-202299:379",
"202299:382-202299:414", "202299:416-202299:452", "202299:455-202299:555", "202305:1-202305:89", "202305:92-202305:130",
"202305:133-202305:323", "202314:67-202314:104", "202314:107-202314:265", "202314:268-202314:278", "202328:46-202328:89",
"202328:92-202328:156", "202328:158-202328:276", "202328:278-202328:291", "202328:294-202328:434", "202328:437-202328:460",
"202328:463-202328:586", "202328:588-202328:610", "202328:612-202328:614", "202333:1-202333:235", "202389:81-202389:182",
"202389:185-202389:190", "202389:192-202389:199", "202469:87-202469:158", "202469:160-202469:174", "202469:177-202469:352",
"202472:1-202472:96", "202472:99-202472:112", "202477:1-202477:129", "202477:131-202477:150", "202478:1-202478:177",
"202478:180-202478:183", "202478:186-202478:219", "202478:222-202478:360", "202478:362-202478:506", "202478:509-202478:531",
"202478:534-202478:718", "202478:720-202478:927", "202478:929-202478:973", "202478:975-202478:1029", "202478:1031-202478:1186",
"202478:1189-202478:1212", "202478:1215-202478:1248", "202504:77-202504:96", "202504:99-202504:133", "202504:135-202504:182",
"202504:184-202504:211", "202504:213-202504:241", "202504:243-202504:392", "202504:395-202504:527", "202504:529-202504:617",
"202504:620-202504:715", "202504:718-202504:763", "202504:766-202504:1172", "202504:1174-202504:1247", "202504:1250-202504:1471",
"202504:1474-202504:1679", "202504:1682-202504:1704", "202972:1-202972:30", "202972:33-202972:184", "202972:186-202972:290",
"202972:292-202972:295", "202972:298-202972:371", "202972:374-202972:429", "202972:431-202972:544", "202973:1-202973:234",
"202973:237-202973:305", "202973:308-202973:437", "202973:439-202973:530", "202973:532-202973:541", "202973:544-202973:552",
"202973:555-202973:851", "202973:853-202973:1408", "203002:77-203002:128", "203002:130-203002:141", "203002:144-203002:207",
"203002:209-203002:267", "203002:270-203002:360", "203002:362-203002:501", "203002:504-203002:641", "203002:643-203002:669",
"203002:671", "203002:674-203002:717", "203002:720-203002:1034", "203002:1037-203002:1070", "203002:1073-203002:1370",
"203002:1372-203002:1392", "203002:1395-203002:1410", "203002:1413-203002:1596", "203709:1-203709:121", "203742:1-203742:29",
"203777:103-203777:113", "203830:82-203830:182", "203832:1-203832:11", "203833:1-203833:70", "203833:73-203833:128",
"203834:1-203834:40", "203835:1-203835:70", "203835:73-203835:358", "203853:122-203853:222", "203894:82-203894:272",
"203894:275-203894:477", "203894:480-203894:902", "203894:905-203894:1319", "203909:79-203909:113", "203909:116-203909:117",
"203909:120-203909:140", "203909:143-203909:382", "203912:1-203912:306", "203912:308-203912:566", "203912:569-203912:609",
"203912:611-203912:698", "203912:701-203912:820", "203912:823-203912:865", "203912:867-203912:1033", "203912:1035-203912:1321",
"203987:1-203987:9", "203987:12-203987:241", "203987:243-203987:339", "203987:342-203987:781", "203987:784-203987:1014",
"203992:1-203992:15", "203994:1-203994:56", "203994:59-203994:136", "203994:139-203994:304", "203994:306-203994:342",
"203994:344-203994:425", "204100:117-204100:139", "204101:1-204101:74", "204113:82-204113:96", "204113:98-204113:102",
"204113:105-204113:127", "204113:129-204113:191", "204113:194-204113:258", "204113:261-204113:327", "204113:329-204113:388",
"204113:390-204113:400", "204113:402-204113:583", "204113:585-204113:690", "204114:1-204114:358", "204238:23-204238:52",
"204238:55", "204250:92-204250:118", "204250:121-204250:177", "204250:179-204250:285", "204250:287-204250:336",
"204250:339-204250:400", "204250:403-204250:521", "204250:524-204250:543", "204250:546-204250:682", "204250:684-204250:801",
"204511:1-204511:56", "204541:5-204541:39", "204541:42", "204541:44-204541:139", "204541:142-204541:149",
"204541:151-204541:204", "204544:1-204544:11", "204544:13-204544:93", "204544:96-204544:195", "204544:197-204544:224",
"204544:226-204544:334", "204544:337-204544:426", "204552:1-204552:9", "204553:1-204553:51", "204553:53-204553:60",
"204553:63-204553:101", "204554:1-204554:5", "204554:7-204554:221", "204554:224-204554:455", "204554:458-204554:470",
"204554:472-204554:481", "204554:483-204554:514", "204555:1-204555:329", "204555:331-204555:334", "204563:91-204563:99",
"204563:102-204563:178", "204563:180-204563:219", "204563:222-204563:229", "204563:231-204563:364", "204563:366",
"204563:369-204563:470", "204563:473-204563:524", "204563:527-204563:571", "204564:1-204564:84", "204564:87-204564:89",
"204564:92-204564:159", "204564:161-204564:187", "204564:190-204564:191", "204564:193-204564:293", "204564:296-204564:315",
"204564:317-204564:340", "204564:343-204564:427", "204564:429-204564:434", "204564:437-204564:735", "204564:737-204564:855",
"204564:858-204564:1206", "204564:1209-204564:1248", "204564:1251-204564:1284", "204565:1-204565:48", "204566:1-204566:12",
"204567:1-204567:38", "204576:49-204576:192", "204576:195-204576:301", "204577:1-204577:46", "204577:49-204577:64",
"204577:67-204577:105", "204577:107-204577:170", "204577:173-204577:181", "204577:183-204577:193", "204577:196-204577:653",
"204577:656-204577:669", "204577:671-204577:740", "204577:742-204577:913", "204577:915-204577:1057", "204577:1059-204577:1115",
"204577:1117-204577:1282", "204599:73-204599:83", "204599:85-204599:94", "204599:97-204599:121", "204599:124-204599:125",
"204599:128-204599:173", "204599:175-204599:240", "204599:243-204599:245", "204599:248-204599:264", "204599:266-204599:292",
"204599:294-204599:334", "204601:1-204601:25", "204601:28-204601:62", "204601:65-204601:80", "204601:83-204601:89",
"204601:92-204601:290", "204601:292-204601:563", "204601:565-204601:591", "204601:593-204601:652", "204601:655-204601:780",
"204601:783-204601:812", "204601:814-204601:892", "204601:894-204601:984", "204601:986-204601:1003", "204601:1006-204601:1038",
"204601:1040-204601:1088", "204601:1091-204601:1102", "204601:1105-204601:1161", "204601:1164-204601:1250", "205086:95-205086:149",
"205111:88-205111:390", "205111:392-205111:441", "205111:444-205111:446", "205158:81-205158:289", "205158:292-205158:313",
"205158:315-205158:473", "205158:476-205158:591", "205158:594-205158:595", "205158:597-205158:612", "205158:615-205158:663",
"205158:665-205158:667", "205158:672-205158:685", "205158:687-205158:733", "205193:80-205193:109", "205193:111-205193:349",
"205193:352-205193:486", "205193:488-205193:650", "205193:652-205193:712", "205193:714-205193:902", "205217:1-205217:12",
"205217:16-205217:111", "205217:113-205217:171", "205217:174-205217:250", "205217:253-205217:318", "205233:94-205233:153",
"205236:1-205236:190", "205236:193-205236:207", "205236:209-205236:260", "205236:263-205236:331", "205236:334-205236:352",
"205238:1-205238:6", "205238:9-205238:199", "205238:202-205238:254", "205238:256-205238:304", "205238:306-205238:355",
"205238:358-205238:381", "205238:384-205238:596", "205238:598-205238:617", "205303:35-205303:54", "205303:90-205303:132",
"205303:135-205303:144", "205310:76-205310:306", "205310:309-205310:313", "205310:316", "205310:319-205310:321",
"205310:324-205310:457", "205310:460-205310:559", "205311:1-205311:85", "205311:88-205311:92", "205311:95-205311:183",
"205311:186-205311:395", "205311:397-205311:592", "205311:595-205311:910", "205311:913-205311:1260", "205339:71-205339:175",
"205339:178-205339:213", "205339:216-205339:230", "205339:233-205339:262", "205339:265-205339:404", "205344:1-205344:83",
"205344:86-205344:104", "205344:106-205344:359", "205344:362-205344:431", "205344:433-205344:949", "205344:951-205344:967",
"205344:969-205344:1127", "205344:1129-205344:1346", "205344:1348-205344:1586", "205515:82-205515:201", "205515:203-205515:216",
"205519:1-205519:47", "205519:50-205519:172", "205519:175-205519:367", "205519:370-205519:386", "205519:389-205519:472",
"205526:1-205526:269", "205526:272-205526:277", "205526:280-205526:332", "205614:1-205614:4", "205614:7-205614:40",
"205617:1-205617:29", "205617:32-205617:102", "205617:105-205617:123", "205617:125-205617:140", "205617:143-205617:264",
"205617:266-205617:448", "205617:451-205617:532", "205617:534-205617:547", "205618:1-205618:12", "205620:1-205620:175",
"205666:60-205666:119", "205666:122-205666:165", "205666:168-205666:259", "205666:261-205666:322", "205666:325-205666:578",
"205666:580-205666:594", "205666:597-205666:721", "205666:724-205666:739", "205667:1-205667:165", "205667:168-205667:282",
"205667:285-205667:318", "205667:321-205667:412", "205667:415-205667:689", "205667:692-205667:751", "205667:754-205667:774",
"205667:777-205667:1109", "205683:76-205683:82", "205683:85-205683:178", "205683:181-205683:198", "205683:201-205683:305",
"205690:1-205690:40", "205694:1-205694:205", "205694:208-205694:230", "205694:233-205694:347", "205694:350-205694:452",
"205694:455-205694:593", "205694:595-205694:890", "205718:49-205718:75", "205718:78-205718:97", "205718:100-205718:103",
"205718:105-205718:176", "205718:178-205718:338", "205718:341-205718:361", "205718:363-205718:524", "205718:527-205718:531",
"205718:534-205718:589", "205718:591-205718:694", "205774:1-205774:80", "205777:1-205777:8", "205781:1-205781:89",
"205781:91-205781:197", "205781:200-205781:502", "205826:80-205826:232", "205826:235-205826:303", "205826:306-205826:468",
"205833:84-205833:86", "205833:89-205833:121", "205833:123-205833:155", "205833:157-205833:165", "205833:167-205833:173",
"205833:176-205833:219", "205833:221-205833:267", "205833:270-205833:312", "205833:315-205833:346", "205833:350-205833:355",
"205833:360-205833:366", "205834:1-205834:12", "205834:14-205834:195", "205908:68-205908:200", "205908:202-205908:209",
"205921:22-205921:73", "205921:76-205921:268", "205921:271-205921:394", "205921:397-205921:401", "205921:410-205921:428",
"205921:431-205921:498", "205921:500-205921:571", "205921:574-205921:779", "205921:782-205921:853", "206066:89-206066:146",
"206088:86-206088:159", "206088:161-206088:178", "206088:181-206088:199", "206088:202-206088:286", "206102:83-206102:116",
"206102:120-206102:130", "206102:133-206102:208", "206102:211-206102:235", "206102:238-206102:246", "206102:249-206102:278",
"206102:281-206102:349", "206187:107-206187:169", "206187:172-206187:242", "206187:245-206187:288", "206187:290-206187:340",
"206187:343-206187:427", "206187:429-206187:435", "206187:437-206187:486", "206187:489-206187:569", "206187:571-206187:647",
"206187:649-206187:662", "206187:664-206187:708", "206188:1-206188:40", "206188:42-206188:55", "206199:1-206199:75",
"206199:77-206199:82", "206199:85-206199:114", "206207:82-206207:130", "206207:132-206207:176", "206207:179-206207:194",
"206207:196-206207:388", "206207:390-206207:419", "206207:422-206207:447", "206207:450-206207:569", "206207:572-206207:690",
"206208:1-206208:470", "206208:472-206208:518", "206210:11-206210:25", "206210:28-206210:275", "206210:277-206210:298",
"206210:300-206210:383", "206210:386-206210:466", "206243:62-206243:169", "206243:172-206243:196", "206243:199-206243:354",
"206243:357-206243:433", "206243:435-206243:448", "206243:451-206243:533", "206243:536-206243:554", "206243:557-206243:723",
"206243:726-206243:905", "206245:1-206245:62", "206246:1-206246:14", "206246:16-206246:237", "206246:240-206246:285",
"206246:288-206246:407", "206246:412-206246:676", "206246:678-206246:704", "206246:706-206246:785", "206246:787-206246:962",
"206246:965-206246:997", "206246:1000-206246:1198", "206246:1201-206246:1290", "206257:1-206257:29", "206258:1-206258:36",
"206258:39-206258:223", "206258:226-206258:249", "206302:1-206302:8", "206302:11-206302:33", "206302:36-206302:44",
"206302:47-206302:82", "206302:84-206302:108", "206302:110-206302:149", "206302:151-206302:186", "206302:189-206302:229",
"206302:231-206302:232", "206302:234-206302:241", "206302:243-206302:276", "206303:1-206303:19", "206303:23-206303:286",
"206304:1-206304:4", "206304:6-206304:62", "206331:91-206331:222", "206331:225-206331:312", "206389:88-206389:185",
"206389:187-206389:249", "206389:252-206389:272", "206389:275-206389:392", "206391:1-206391:55", "206391:57-206391:91",
"206401:69-206401:90", "206401:92-206401:194", "206401:197-206401:210", "206401:212-206401:249", "206401:251-206401:265",
"206401:267-206401:409", "206446:92-206446:141", "206446:143-206446:159", "206446:162-206446:205", "206446:208-206446:301",
"206446:304-206446:442", "206446:445", "206446:448-206446:474", "206446:476-206446:616", "206446:619-206446:872",
"206446:874-206446:910", "206446:912-206446:948", "206446:950-206446:989", "206446:992-206446:1030", "206446:1033-206446:1075",
"206446:1109-206446:1149", "206448:1-206448:143", "206448:145-206448:559", "206448:561-206448:1170", "206448:1173-206448:1231",
"206448:1235-206448:1237", "206466:24-206466:137", "206466:140-206466:277", "206466:280-206466:296", "206466:299-206466:303",
"206466:306-206466:405", "206466:407-206466:419", "206466:422-206466:477", "206466:480-206466:511", "206466:514-206466:676",
"206476:73-206476:129", "206476:133-206476:137", "206476:140-206476:141", "206476:143-206476:219", "206477:1-206477:14",
"206477:16-206477:31", "206477:33-206477:41", "206477:44-206477:51", "206477:53-206477:70", "206477:73-206477:75",
"206477:77-206477:89", "206477:91-206477:94", "206477:97-206477:115", "206477:118-206477:184", "206478:1-206478:27",
"206478:29-206478:136", "206478:139-206478:144", "206484:73-206484:95", "206484:98-206484:133", "206484:136-206484:163",
"206484:166-206484:186", "206484:189-206484:384", "206484:387-206484:463", "206484:465-206484:551", "206484:554",
"206484:556-206484:669", "206512:91-206512:123", "206512:125-206512:133", "206512:136-206512:161", "206512:163-206512:190",
"206512:193-206512:201", "206512:203-206512:212", "206512:214-206512:332", "206512:334-206512:584", "206512:587-206512:604",
"206512:607-206512:1005", "206512:1008-206512:1123", "206512:1126-206512:1163", "206512:1165-206512:1211", "206513:3-206513:39",
"206513:42-206513:188", "206513:191-206513:234", "206513:237-206513:238", "206513:241-206513:323", "206542:1-206542:115",
"206542:117-206542:165", "206542:168-206542:511", "206542:514-206542:547", "206542:550-206542:603", "206542:606-206542:668",
"206542:671-206542:727", "206542:730-206542:739", "206542:741-206542:833", "206550:77-206550:132", "206550:135-206550:144",
"206572:37-206572:47", "206573:2-206573:14", "206574:1-206574:87", "206575:1-206575:7", "206575:10",
"206575:12-206575:69", "206594:72-206594:107", "206594:110-206594:246", "206594:249-206594:281", "206595:1-206595:34",
"206595:37-206595:42", "206595:45-206595:193", "206596:1-206596:13", "206596:15-206596:220", "206596:222-206596:228",
"206596:231-206596:236", "206596:239-206596:292", "206596:295-206596:695", "206596:697-206596:728", "206596:730-206596:810",
"206598:1-206598:81", "206598:83-206598:103", "206598:105-206598:588", "206598:591-206598:657", "206598:659-206598:719",
"206605:1-206605:36", "206605:39-206605:78", "206744:49-206744:157", "206744:160-206744:192", "206744:195-206744:395",
"206744:398-206744:452", "206745:1-206745:81", "206745:84-206745:199", "206745:202-206745:224", "206745:227-206745:237",
"206745:240-206745:304", "206745:306-206745:318", "206745:321-206745:720", "206745:723-206745:796", "206745:799-206745:894",
"206745:897-206745:944", "206745:946-206745:1106", "206745:1108-206745:1524", "206745:1527-206745:1862", "206745:1988-206745:1996",
"206859:79-206859:210", "206859:212-206859:258", "206859:260-206859:323", "206859:325-206859:356", "206859:359-206859:609",
"206859:612-206859:681", "206859:684-206859:732", "206859:734-206859:768", "206859:771-206859:808", "206859:811-206859:827",
"206859:830-206859:848", "206866:1-206866:30", "206866:33-206866:113", "206866:115-206866:274", "206868:1-206868:3",
"206868:10-206868:16", "206869:1-206869:251", "206869:253-206869:271", "206869:274-206869:502", "206869:507-206869:520",
"206869:522-206869:566", "206869:568-206869:752", "206897:1-206897:34", "206897:38-206897:61", "206897:63-206897:102",
"206897:109", "206897:111-206897:112", "206897:114-206897:131", "206897:133-206897:137", "206901:1-206901:98",
"206906:1-206906:31", "206906:38-206906:94", "206906:96-206906:136", "206906:138-206906:139", "206906:142-206906:149",
"206906:151-206906:175", "206906:177-206906:206", "206940:1-206940:151", "206940:153", "206940:155-206940:298",
"206940:301-206940:382", "206940:384-206940:712", "206940:715-206940:803", "206940:805-206940:960", "206940:963-206940:1027",
"207099:83-207099:134", "207099:137-207099:172", "207099:175-207099:213", "207099:216-207099:314", "207099:316-207099:320",
"207099:323-207099:330", "207099:333-207099:367", "207099:370-207099:481", "207099:484-207099:602", "207099:605-207099:755",
"207099:757-207099:1046", "207099:1048-207099:1171", "207100:1-207100:91", "207100:94", "207214:57-207214:112",
"207214:114-207214:177", "207214:179-207214:181", "207214:184-207214:196", "207214:199-207214:220", "207214:223-207214:262",
"207214:265-207214:405", "207214:408-207214:482", "207214:485-207214:640", "207214:643-207214:708", "207214:718-207214:757",
"207214:759-207214:808", "207214:811-207214:829", "207217:1-207217:32", "207219:1-207219:112", "207220:1-207220:160",
"207221:1-207221:102", "207222:1-207222:17", "207222:20-207222:289", "207231:70-207231:84", "207231:86-207231:121",
"207231:123-207231:184", "207231:187-207231:189", "207231:192-207231:303", "207231:306-207231:354", "207231:357-207231:481",
"207231:484-207231:504", "207231:508-207231:549", "207231:552-207231:626", "207231:628-207231:690", "207231:693-207231:875",
"207231:878-207231:1000", "207231:1003-207231:1170", "207231:1173-207231:1187", "207231:1189-207231:1227", "207231:1229-207231:1415",
"207231:1418-207231:1445", "207231:1447-207231:1505", "207233:1-207233:119", "207233:121-207233:148", "207269:80-207269:394",
"207269:397-207269:436", "207269:439-207269:463", "207269:466-207269:551", "207269:568-207269:577", "207273:3-207273:877",
"207279:68-207279:138", "207279:141-207279:149", "207279:151-207279:237", "207279:240-207279:266", "207279:269-207279:307",
"207279:309-207279:416", "207279:498-207279:551", "207279:554-207279:640", "207279:643-207279:961", "207279:963-207279:1095",
"207279:1098-207279:1160", "207320:1-207320:110", "207320:112-207320:350", "207371:72-207371:117", "207371:120-207371:124",
"207372:1-207372:27", "207372:30-207372:113", "207372:116-207372:154", "207372:156-207372:174", "207372:176-207372:478",
"207372:480-207372:496", "207397:32-207397:77", "207397:80-207397:140", "207397:143-207397:179", "207398:1-207398:14",
"207398:16-207398:33", "207454:79-207454:95", "207454:98-207454:123", "207454:126-207454:259", "207454:261-207454:363",
"207454:365-207454:458", "207454:461-207454:498", "207454:501-207454:609", "207454:612-207454:632", "207454:635-207454:781",
"207454:784-207454:866", "207454:869-207454:974", "207454:977-207454:1064", "207454:1067-207454:1079", "207454:1081-207454:1321",
"207454:1323-207454:1464", "207454:1467-207454:1569", "207454:1571-207454:1604", "207454:1607-207454:1712", "207454:1714-207454:1988",
"207469:1-207469:31", "207469:34-207469:45", "207477:76-207477:104", "207477:107-207477:111", "207477:114-207477:147",
"207477:150-207477:295", "207477:298-207477:483", "207477:486-207477:494", "207477:497-207477:527", "207477:530-207477:563",
"207477:565-207477:570", "207487:50-207487:98", "207487:101-207487:311", "207487:313-207487:359", "207487:363-207487:468",
"207487:471-207487:472", "207488:1-207488:63", "207488:66-207488:92", "207488:95-207488:113", "207488:116-207488:198",
"207488:200-207488:250", "207488:252-207488:288", "207488:291-207488:365", "207488:368-207488:377", "207488:379-207488:440",
"207490:1-207490:48", "207490:51-207490:111", "207491:1-207491:176", "207491:179-207491:458", "207492:1-207492:20",
"207492:23-207492:298", "207515:79-207515:109", "207515:112-207515:132", "207515:134-207515:208", "207515:211-207515:225",
"207515:228-207515:320", "207515:322-207515:381", "207515:383-207515:498", "207515:500-207515:730", "207515:733-207515:849",
"207515:851-207515:954", "207515:957-207515:994", "207515:997-207515:1052", "207515:1055-207515:1143", "207515:1145-207515:1211",
"207517:1-207517:12", "207517:15-207517:57", "207518:1-207518:59", "207518:61-207518:83", "207882:22-207882:45",
"207883:1", "207883:3-207883:4", "207883:7-207883:75", "207884:1-207884:106", "207884:108-207884:183",
"207885:1-207885:90", "207886:1-207886:30", "207886:32-207886:90", "207886:92-207886:156", "207886:158-207886:166",
"207886:168-207886:171", "207889:1-207889:43", "207889:47-207889:57", "207889:60-207889:303", "207889:306-207889:442",
"207889:445", "207889:447-207889:551", "207889:553-207889:731", "207889:733-207889:907", "207889:910-207889:945",
"207898:1-207898:33", "207898:36-207898:57", "207898:60-207898:235", "207898:239-207898:257", "207898:260-207898:277",
"207905:75-207905:196", "207905:198-207905:281", "207905:284-207905:329", "207905:331-207905:402", "207905:404-207905:565",
"207905:568-207905:672", "207905:675-207905:805", "207905:807-207905:850", "207905:852-207905:861", "207905:864-207905:884",
"207905:886-207905:1180", "207905:1183-207905:1283", "207905:1285-207905:1331", "207905:1333-207905:1515", "207905:1518-207905:1734",
"207905:1737-207905:1796", "207920:84-207920:146", "207920:149-207920:241", "207920:243-207920:261", "207920:264-207920:291",
"207920:294-207920:486", "207920:489-207920:518", "207920:520-207920:598", "207920:600-207920:708", "207920:710-207920:826",
"207921:1-207921:37", "207921:40-207921:58", "207922:1-207922:69", "207922:71-207922:100", "207922:103-207922:126",
"207922:129-207922:242", "207922:274-207922:291", "207924:1-207924:52", "207924:54-207924:171", "207924:173-207924:178",
"207924:181-207924:339", "208307:2-208307:42", "208307:45", "208307:47-208307:70", "208307:72-208307:147",
"208307:150-208307:252", "208307:256-208307:259", "208307:262-208307:275", "208307:278-208307:342", "208307:345-208307:450",
"208307:453-208307:527", "208307:530-208307:583", "208307:586-208307:605", "208307:608-208307:616", "208307:618-208307:667",
"208307:670-208307:761", "208307:763-208307:798", "208307:800-208307:889", "208307:891-208307:893", "208307:896-208307:1055",
"208307:1057-208307:1205", "208307:1208-208307:1294", "208307:1297-208307:1328", "208339:77-208339:89", "208339:91-208339:122",
"208339:125-208339:208", "208339:211-208339:346", "208339:349-208339:363", "208341:1-208341:84", "208341:87-208341:117",
"208341:120-208341:513", "208341:515-208341:685", "208341:688-208341:693", "208341:695-208341:775", "208341:777-208341:824",
"208351:83-208351:97", "208351:100-208351:356", "208351:359-208351:367", "208351:369", "208352:1-208352:15",
"208352:17", "208352:19", "208353:1-208353:76", "208353:78-208353:269", "208353:271-208353:348",
"208357:1-208357:70", "208357:73-208357:507", "208390:72-208390:128", "208390:130-208390:169", "208391:52-208391:82",
"208391:84-208391:162", "208391:164-208391:216", "208391:219-208391:493", "208391:495-208391:498", "208391:500-208391:523",
"208391:526-208391:533", "208391:535-208391:588", "208391:591-208391:660", "208391:663-208391:869", "208427:49-208427:89",
"208427:92-208427:161", "208427:164", "208427:166-208427:173", "208427:175-208427:268", "208427:271-208427:312",
"208427:315", "208427:317-208427:335", "208427:337-208427:361", "208427:364-208427:402", "208427:404-208427:422",
"208427:425-208427:577", "208427:580-208427:647", "208428:1-208428:58", "208428:61-208428:68", "208428:70-208428:156",
"208428:159-208428:227", "208429:1-208429:56", "208429:59-208429:139", "208429:141-208429:159", "208429:162-208429:237",
"208429:240-208429:440", "208429:442-208429:452", "208429:455-208429:589", "208429:592-208429:712", "208429:715-208429:922",
"208487:2-208487:26", "208487:29-208487:159", "208487:161-208487:307", "208487:309-208487:459", "208487:462-208487:476",
"208487:479-208487:621", "208509:71-208509:232", "208538:2-208538:43", "208540:1-208540:26", "208540:29-208540:98",
"208541:1-208541:57", "208541:59-208541:173", "208541:175-208541:376", "208541:378-208541:413", "208551:119-208551:193",
"208551:195-208551:212", "208551:215-208551:300", "208551:303-208551:354", "208551:356-208551:554", "208551:557-208551:580",
"208686:73-208686:79", "208686:82-208686:181", "208686:183-208686:224", "208686:227-208686:243", "208686:246-208686:311",
"208686:313-208686:459" ) ),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/user/cmgtools/CMG/DoubleMuParked/StoreResults-Run2012C_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0/cmgTuple_542.root',
'/store/cmst3/user/cmgtools/CMG/DoubleMuParked/StoreResults-Run2012C_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0/cmgTuple_543.root',
'/store/cmst3/user/cmgtools/CMG/DoubleMuParked/StoreResults-Run2012C_22Jan2013_v1_PFembedded_trans1_tau132_pthad1_30had2_30_v1-5ef1c0fd428eb740081f19333520fdc8/USER/V5_B/PAT_CMG_V5_16_0/cmgTuple_544.root')
)
|
[
"[email protected]"
] | |
314ea5491f976610601bc93def87970f19fa13e6
|
33e006f5ae711d44d796a0e3ca384caefe1ec299
|
/Wprowadzenie do algorytmow - ksiazka/rozdzial 2/2.1-2.py
|
1919575e88d14a8d51ece544f7292e484a60b267
|
[] |
no_license
|
Cozoob/Algorithms_and_data_structures
|
959b188f8cef3e6b7b1fd2a6c45a5e169d8f41fe
|
f786a397964f71e2938d9fd6268d3428e3ed7992
|
refs/heads/main
| 2023-08-05T02:23:43.565651 | 2021-09-17T10:52:14 | 2021-09-17T10:52:14 | 407,532,105 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 514 |
py
|
# Zmodyfikuj INSERTION_SORT tak zeby sortowala w porzadku nierosnacym
def insertion_sort(A):
for j in range(1, len(A)):
key = A[j]
# Wstaw A[j] w posortowany ciąg A[1,...,j-1]
i = j - 1
while i >= 0 and A[i] < key:
A[i + 1] = A[i]
i -= 1
A[i + 1] = key
return A
if __name__ == '__main__':
A = [5,2,4,6,1,3]
B = [31,41,59,26,41,58]
print(A)
insertion_sort(A)
insertion_sort(B)
print(A)
print(B)
|
[
"[email protected]"
] | |
c62c4a9af1d76050479aa8b61113b12aa938d298
|
9187131d6a06e4a2cd56a0eb6d20604b38ea2359
|
/apitest/tp/mail/test_case/page_object/mail_page.py
|
fd5073f7bbd54dfe0c0487251a04d2b334badf62
|
[] |
no_license
|
hikaruwin/hikaru
|
0dc75843047c01023327854798fbf4999e710f57
|
1675192d4584609bb1f678c2e5a82c06915ab25e
|
refs/heads/master
| 2020-03-27T23:33:14.958007 | 2018-09-04T10:29:40 | 2018-09-04T10:29:40 | 147,327,361 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 358 |
py
|
# coding: utf-8
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
from time import sleep
from .base import Base
class MailPage(Base):
url = '/'
login_success_user_loc = (By.ID, 'spnUid')
def login_success_user(self):
return self.find_element(*self.login_success_user_loc).text
|
[
"your email"
] |
your email
|
b07f99a0807b1964ad81d8b566bd461031dd078d
|
48832d27da16256ee62c364add45f21b968ee669
|
/res/scripts/client/account_helpers/customfilescache.py
|
76a90b18fe88817f3ac8604b079be904324562d0
|
[] |
no_license
|
webiumsk/WOT-0.9.15.1
|
0752d5bbd7c6fafdd7f714af939ae7bcf654faf7
|
17ca3550fef25e430534d079876a14fbbcccb9b4
|
refs/heads/master
| 2021-01-20T18:24:10.349144 | 2016-08-04T18:08:34 | 2016-08-04T18:08:34 | 64,955,694 | 0 | 0 | null | null | null | null |
WINDOWS-1250
|
Python
| false | false | 18,439 |
py
|
# 2016.08.04 19:47:56 Střední Evropa (letní čas)
# Embedded file name: scripts/client/account_helpers/CustomFilesCache.py
import os
import time
import base64
import urllib2
import cPickle
import BigWorld
import binascii
import threading
import BigWorld
from debug_utils import *
from functools import partial
from helpers import getFullClientVersion
from Queue import Queue
import shelve as provider
import random
_MIN_LIFE_TIME = 15 * 60
_MAX_LIFE_TIME = 24 * 60 * 60
_LIFE_TIME_IN_MEMORY = 20 * 60
_CACHE_VERSION = 2
_CLIENT_VERSION = getFullClientVersion()
def _LOG_EXECUTING_TIME(startTime, methodName, deltaTime = 0.1):
finishTime = time.time()
if finishTime - startTime > deltaTime:
LOG_WARNING('Method "%s" takes too much time %s' % (methodName, finishTime - startTime))
def parseHttpTime(t):
if t is None:
return
elif isinstance(t, int):
return t
else:
if isinstance(t, str):
try:
parts = t.split()
weekdays = ['mon',
'tue',
'wed',
'thu',
'fri',
'sat',
'sun']
months = ['jan',
'feb',
'mar',
'apr',
'may',
'jun',
'jul',
'aug',
'sep',
'oct',
'nov',
'dec']
tm_wday = weekdays.index(parts[0][:3].lower())
tm_day = int(parts[1])
tm_month = months.index(parts[2].lower()) + 1
tm_year = int(parts[3])
tm = parts[4].split(':')
tm_hour = int(tm[0])
tm_min = int(tm[1])
tm_sec = int(tm[2])
t = int(time.mktime((tm_year,
tm_month,
tm_day,
tm_hour,
tm_min,
tm_sec,
tm_wday,
0,
-1)))
except Exception as e:
LOG_ERROR(e, t)
t = None
return t
def makeHttpTime(dt):
try:
weekday = ['Mon',
'Tue',
'Wed',
'Thu',
'Fri',
'Sat',
'Sun'][dt.tm_wday]
month = ['Jan',
'Feb',
'Mar',
'Apr',
'May',
'Jun',
'Jul',
'Aug',
'Sep',
'Oct',
'Nov',
'Dec'][dt.tm_mon - 1]
t = '%s, %02d %s %04d %02d:%02d:%02d GMT' % (weekday,
dt.tm_mday,
month,
dt.tm_year,
dt.tm_hour,
dt.tm_min,
dt.tm_sec)
except Exception as e:
LOG_ERROR(e, dt)
t = None
return t
def getSafeDstUTCTime():
t = time.gmtime()
return int(time.mktime((t.tm_year,
t.tm_mon,
t.tm_mday,
t.tm_hour,
t.tm_min,
t.tm_sec,
t.tm_wday,
0,
-1)))
class NotModifiedHandler(urllib2.BaseHandler):
def http_error_304(self, req, fp, code, message, headers):
addinfourl = urllib2.addinfourl(fp, headers, req.get_full_url())
addinfourl.code = code
return addinfourl
class CFC_OP_TYPE():
DOWNLOAD = 1
READ = 2
WRITE = 3
CHECK = 4
class WorkerThread(threading.Thread):
def __init__(self):
super(WorkerThread, self).__init__()
self.input_queue = Queue(60)
self.__terminate = False
self.isBusy = False
def add_task(self, task):
callback = task['callback']
try:
self.input_queue.put(task, block=False)
except:
callback(None, None, None)
return
def close(self):
self.isBusy = False
self.__terminate = True
self.input_queue.put(None)
return
def run(self):
while True:
task = self.input_queue.get()
if task is None:
break
if self.__terminate:
break
try:
self.isBusy = True
type = task['opType']
if type == CFC_OP_TYPE.DOWNLOAD:
self.__run_download(**task)
elif type == CFC_OP_TYPE.READ:
self.__run_read(**task)
elif type == CFC_OP_TYPE.WRITE:
self.__run_write(**task)
elif type == CFC_OP_TYPE.CHECK:
self.__run_check(**task)
except:
LOG_CURRENT_EXCEPTION()
self.isBusy = False
self.input_queue.task_done()
self.input_queue.task_done()
return
def __run_download(self, url, modified_time, callback, **params):
startTime = time.time()
try:
fh = file = None
last_modified = expires = None
req = urllib2.Request(url)
req.add_header('User-Agent', _CLIENT_VERSION)
if modified_time and isinstance(modified_time, str):
req.add_header('If-Modified-Since', modified_time)
opener = urllib2.build_opener(NotModifiedHandler())
fh = opener.open(req, timeout=10)
headers = fh.info()
if hasattr(fh, 'code'):
code = fh.code
if code in (304, 200):
info = fh.info()
last_modified = info.getheader('Last-Modified')
expires = info.getheader('Expires')
if code == 200:
file = fh.read()
else:
opener = urllib2.build_opener(urllib2.BaseHandler())
fh = opener.open(req, timeout=10)
info = fh.info()
last_modified = info.getheader('Last-Modified')
expires = info.getheader('Expires')
file = fh.read()
if expires is None:
expires = makeHttpTime(time.gmtime())
else:
ctime = getSafeDstUTCTime()
expiresTmp = parseHttpTime(expires)
if expiresTmp > ctime + _MAX_LIFE_TIME or expiresTmp < ctime:
expires = makeHttpTime(time.gmtime(time.time() + _MAX_LIFE_TIME))
except urllib2.HTTPError as e:
LOG_WARNING('Http error. Code: %d, url: %s' % (e.code, url))
except urllib2.URLError as e:
LOG_WARNING('Url error. Reason: %s, url: %s' % (str(e.reason), url))
except Exception as e:
LOG_ERROR("Client couldn't download file.", e, url)
finally:
if fh:
fh.close()
_LOG_EXECUTING_TIME(startTime, '__run_download', 10.0)
callback(file, last_modified, expires)
return
def __run_read(self, name, db, callback, **params):
file = None
try:
startTime = time.time()
if db is not None and db.has_key(name):
file = db[name]
_LOG_EXECUTING_TIME(startTime, '__run_read')
except Exception as e:
LOG_WARNING("Client couldn't read file.", e, name)
callback(file, None, None)
return
def __run_write(self, name, data, db, callback, **params):
try:
startTime = time.time()
if db is not None:
db[name] = data
_LOG_EXECUTING_TIME(startTime, '__run_write', 5.0)
except:
LOG_CURRENT_EXCEPTION()
callback(None, None, None)
return
def __run_check(self, name, db, callback, **params):
res = False
try:
startTime = time.time()
if db is not None:
res = db.has_key(name)
_LOG_EXECUTING_TIME(startTime, '__run_check')
except:
LOG_CURRENT_EXCEPTION()
callback(res, None, None)
return
class ThreadPool():
def __init__(self, num = 8):
num = max(2, num)
self.__workers = []
for i in range(num):
self.__workers.append(WorkerThread())
def start(self):
for w in self.__workers:
w.start()
def close(self):
for w in self.__workers:
w.close()
self.__workers = []
def add_task(self, task):
if len(self.__workers) == 0:
return
type = task['opType']
if type in (CFC_OP_TYPE.WRITE, CFC_OP_TYPE.READ, CFC_OP_TYPE.CHECK):
self.__workers[0].add_task(task)
else:
workers = self.__workers[1:]
for w in workers:
if w.isBusy:
continue
w.add_task(task)
return
w = random.choice(workers)
w.add_task(task)
class CustomFilesCache(object):
def __init__(self):
prefsFilePath = unicode(BigWorld.wg_getPreferencesFilePath(), 'utf-8', errors='ignore')
self.__cacheDir = os.path.join(os.path.dirname(prefsFilePath), 'custom_data')
self.__cacheDir = os.path.normpath(self.__cacheDir)
self.__mutex = threading.RLock()
self.__cache = {}
self.__accessedCache = {}
self.__processedCache = {}
self.__written_cache = set()
self.__db = None
self.__prepareCache()
self.__worker = ThreadPool()
self.__worker.start()
self.__startTimer()
return
def close(self):
self.__worker.close()
self.__cache = {}
self.__accessedCache = {}
self.__processedCache = {}
self.__written_cache = set()
if self.__timer is not None:
BigWorld.cancelCallback(self.__timer)
self.__timer = None
if self.__db is not None:
startTime = time.time()
try:
self.__db.close()
except:
LOG_CURRENT_EXCEPTION()
_LOG_EXECUTING_TIME(startTime, 'close')
self.__db = None
return
def __startTimer(self):
self.__timer = BigWorld.callback(60, self.__idle)
def get(self, url, callback, showImmediately = False):
if callback is None:
return
else:
startDownload = True
if url in self.__processedCache:
startDownload = False
self.__processedCache.setdefault(url, []).append(callback)
if startDownload:
self.__get(url, showImmediately, False)
return
def __get(self, url, showImmediately, checkedInCache):
try:
ctime = getSafeDstUTCTime()
hash = base64.b32encode(url)
self.__mutex.acquire()
cache = self.__cache
if hash in cache:
data = cache[hash]
if data is None:
LOG_DEBUG('readLocalFile, there is no file in memory.', url)
self.__readLocalFile(url, showImmediately)
else:
self.__accessedCache[hash] = ctime
expires, creation_time, _, file, _, last_modified = data
expires = parseHttpTime(expires)
if expires is None:
LOG_ERROR('Unable to parse expires time.', url)
self.__postTask(url, None, True)
return
if ctime - _MIN_LIFE_TIME <= expires <= ctime + _MAX_LIFE_TIME + _MIN_LIFE_TIME:
LOG_DEBUG('postTask, Sends file to requester.', url, last_modified, data[0])
self.__postTask(url, file, True)
else:
if showImmediately:
LOG_DEBUG('postTask, Do not release callbacks. Sends file to requester.', url, last_modified, data[0])
self.__postTask(url, file, False)
LOG_DEBUG('readRemoteFile, there is file in cache, check last_modified field.', url, last_modified, data[0])
self.__readRemoteFile(url, last_modified, showImmediately)
elif checkedInCache:
LOG_DEBUG('readRemoteFile, there is no file in cache.', url)
self.__readRemoteFile(url, None, False)
else:
LOG_DEBUG('checkFile. Checking file in cache.', url, showImmediately)
self.__checkFile(url, showImmediately)
finally:
self.__mutex.release()
return
def __idle(self):
try:
self.__mutex.acquire()
cache = self.__cache
accessed_cache = self.__accessedCache
ctime = getSafeDstUTCTime()
for k, v in accessed_cache.items():
if v and abs(ctime - v) >= _LIFE_TIME_IN_MEMORY:
cache[k] = None
accessed_cache.pop(k, None)
LOG_DEBUG('Idle. Removing old file from memory.', k)
finally:
self.__mutex.release()
self.__startTimer()
return
def __readLocalFile(self, url, showImmediately):
task = {'opType': CFC_OP_TYPE.READ,
'db': self.__db,
'name': base64.b32encode(url),
'callback': partial(self.__onReadLocalFile, url, showImmediately)}
self.__worker.add_task(task)
def __onReadLocalFile(self, url, showImmediately, file, d1, d2):
data = file
try:
crc, f, ver = data[2:5]
if crc != binascii.crc32(f) or _CACHE_VERSION != ver:
LOG_DEBUG('Old file was found.', url)
raise Exception('Invalid data.')
except:
data = None
try:
hash = base64.b32encode(url)
self.__mutex.acquire()
cache = self.__cache
if data is not None:
cache[hash] = data
else:
cache.pop(hash, None)
self.__accessedCache.pop(hash, None)
finally:
self.__mutex.release()
self.__get(url, showImmediately, True)
return
def __checkFile(self, url, showImmediately):
task = {'opType': CFC_OP_TYPE.CHECK,
'db': self.__db,
'name': base64.b32encode(url),
'callback': partial(self.__onCheckFile, url, showImmediately)}
self.__worker.add_task(task)
def __onCheckFile(self, url, showImmediately, res, d1, d2):
if res is None:
self.__postTask(url, None, True)
return
else:
if res:
try:
hash = base64.b32encode(url)
self.__mutex.acquire()
self.__cache[hash] = None
finally:
self.__mutex.release()
self.__get(url, showImmediately, True)
return
def __readRemoteFile(self, url, modified_time, showImmediately):
task = {'opType': CFC_OP_TYPE.DOWNLOAD,
'url': url,
'modified_time': modified_time,
'callback': partial(self.__onReadRemoteFile, url, showImmediately)}
self.__worker.add_task(task)
def __onReadRemoteFile(self, url, showImmediately, file, last_modified, expires):
if file is None and last_modified is None:
if showImmediately:
LOG_DEBUG('__onReadRemoteFile, Error occurred. Release callbacks.', url)
self.__processedCache.pop(url, None)
else:
self.__postTask(url, None, True)
return
else:
hash = base64.b32encode(url)
ctime = getSafeDstUTCTime()
fileChanged = False
try:
self.__mutex.acquire()
cache = self.__cache
if file is None and last_modified is not None:
value = cache.get(hash, None)
if value is None:
LOG_WARNING('File is expected in cache, but there is no file')
self.__postTask(url, None, True)
return
crc, file = value[2:4]
else:
crc = binascii.crc32(file)
fileChanged = True
packet = (expires,
ctime,
crc,
file,
_CACHE_VERSION,
last_modified)
cache[hash] = packet
finally:
self.__mutex.release()
LOG_DEBUG('writeCache', url, last_modified, expires)
self.__writeCache(hash, packet)
if showImmediately and not fileChanged:
LOG_DEBUG('__onReadRemoteFile, showImmediately = True. Release callbacks.', url)
self.__processedCache.pop(url, None)
else:
self.__get(url, False, True)
return
def __prepareCache(self):
try:
cacheDir = self.__cacheDir
if not os.path.isdir(cacheDir):
os.makedirs(cacheDir)
filename = os.path.join(cacheDir, 'icons')
self.__db = provider.open(filename, flag='c', writeback=True)
except:
LOG_CURRENT_EXCEPTION()
def __writeCache(self, name, packet):
if name in self.__written_cache:
return
self.__written_cache.add(name)
task = {'opType': CFC_OP_TYPE.WRITE,
'db': self.__db,
'name': name,
'data': packet,
'callback': partial(self.__onWriteCache, name)}
self.__worker.add_task(task)
def __onWriteCache(self, name, d1, d2, d3):
self.__written_cache.discard(name)
def __postTask(self, url, file, invokeAndReleaseCallbacks):
BigWorld.callback(0.001, partial(self.__onPostTask, url, invokeAndReleaseCallbacks, file))
def __onPostTask(self, url, invokeAndReleaseCallbacks, file):
if invokeAndReleaseCallbacks:
cbs = self.__processedCache.pop(url, [])
else:
cbs = self.__processedCache.get(url, [])
for cb in cbs:
cb(url, file)
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\account_helpers\customfilescache.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.08.04 19:47:57 Střední Evropa (letní čas)
|
[
"[email protected]"
] | |
300105105b624689dfe8a2adcac101be4fe25fd7
|
149489e12a2f209e33a82684518785540b3508b8
|
/configs/dram/low_power_sweep.py
|
9adfcaff0c0faa9eb1e0e129a7edc6b1e1f8ad9c
|
[
"BSD-3-Clause",
"LicenseRef-scancode-proprietary-license",
"LGPL-2.0-or-later",
"MIT"
] |
permissive
|
FPSG-UIUC/SPT
|
8dac03b54e42df285d774bfc2c08be3123ea0dbb
|
34ec7b2911078e36284fa0d35ae1b5551b48ba1b
|
refs/heads/master
| 2023-04-15T07:11:36.092504 | 2022-05-28T21:34:30 | 2022-05-28T21:34:30 | 405,761,413 | 4 | 1 |
BSD-3-Clause
| 2023-04-11T11:44:49 | 2021-09-12T21:54:08 |
C++
|
UTF-8
|
Python
| false | false | 10,445 |
py
|
# Copyright (c) 2014-2015, 2017 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Radhika Jagtap
# Andreas Hansson
import argparse
import m5
from m5.objects import *
from m5.util import addToPath
from m5.stats import periodicStatDump
addToPath(os.getcwd() + '/configs/common')
import MemConfig
# This script aims at triggering low power state transitions in the DRAM
# controller. The traffic generator is used in DRAM mode and traffic
# states target a different levels of bank utilization and strides.
# At the end after sweeping through bank utilization and strides, we go
# through an idle state with no requests to enforce self-refresh.
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
# Use a single-channel DDR4-2400 in 16x4 configuration by default
parser.add_argument("--mem-type", default="DDR4_2400_16x4",
choices=MemConfig.mem_names(),
help = "type of memory to use")
parser.add_argument("--mem-ranks", "-r", type=int, default=1,
help = "Number of ranks to iterate across")
parser.add_argument("--page-policy", "-p",
choices=["close_adaptive", "open_adaptive"],
default="close_adaptive", help="controller page policy")
parser.add_argument("--itt-list", "-t", default="1 20 100",
help="a list of multipliers for the max value of itt, " \
"e.g. \"1 20 100\"")
parser.add_argument("--rd-perc", type=int, default=100,
help = "Percentage of read commands")
parser.add_argument("--addr-map", type=int, default=1,
help = "0: RoCoRaBaCh; 1: RoRaBaCoCh/RoRaBaChCo")
parser.add_argument("--idle-end", type=int, default=50000000,
help = "time in ps of an idle period at the end ")
args = parser.parse_args()
# Start with the system itself, using a multi-layer 2.0 GHz
# crossbar, delivering 64 bytes / 3 cycles (one header cycle)
# which amounts to 42.7 GByte/s per layer and thus per port.
system = System(membus = IOXBar(width = 32))
system.clk_domain = SrcClockDomain(clock = '2.0GHz',
voltage_domain =
VoltageDomain(voltage = '1V'))
# We are fine with 256 MB memory for now.
mem_range = AddrRange('256MB')
# Start address is 0
system.mem_ranges = [mem_range]
# Do not worry about reserving space for the backing store
system.mmap_using_noreserve = True
# Force a single channel to match the assumptions in the DRAM traffic
# generator
args.mem_channels = 1
args.external_memory_system = 0
args.tlm_memory = 0
args.elastic_trace_en = 0
MemConfig.config_mem(args, system)
# Sanity check for memory controller class.
if not isinstance(system.mem_ctrls[0], m5.objects.DRAMCtrl):
fatal("This script assumes the memory is a DRAMCtrl subclass")
# There is no point slowing things down by saving any data.
system.mem_ctrls[0].null = True
# Set the address mapping based on input argument
# Default to RoRaBaCoCh
if args.addr_map == 0:
system.mem_ctrls[0].addr_mapping = "RoCoRaBaCh"
elif args.addr_map == 1:
system.mem_ctrls[0].addr_mapping = "RoRaBaCoCh"
else:
fatal("Did not specify a valid address map argument")
system.mem_ctrls[0].page_policy = args.page_policy
# We create a traffic generator state for each param combination we want to
# test. Each traffic generator state is specified in the config file and the
# generator remains in the state for specific period. This period is 0.25 ms.
# Stats are dumped and reset at the state transition.
period = 250000000
# We specify the states in a config file input to the traffic generator.
cfg_file_name = "configs/dram/lowp_sweep.cfg"
cfg_file = open(cfg_file_name, 'w')
# Get the number of banks
nbr_banks = int(system.mem_ctrls[0].banks_per_rank.value)
# determine the burst size in bytes
burst_size = int((system.mem_ctrls[0].devices_per_rank.value *
system.mem_ctrls[0].device_bus_width.value *
system.mem_ctrls[0].burst_length.value) / 8)
# next, get the page size in bytes (the rowbuffer size is already in bytes)
page_size = system.mem_ctrls[0].devices_per_rank.value * \
system.mem_ctrls[0].device_rowbuffer_size.value
# Inter-request delay should be such that we can hit as many transitions
# to/from low power states as possible to. We provide a min and max itt to the
# traffic generator and it randomises in the range. The parameter is in
# seconds and we need it in ticks (ps).
itt_min = system.mem_ctrls[0].tBURST.value * 1000000000000
#The itt value when set to (tRAS + tRP + tCK) covers the case where
# a read command is delayed beyond the delay from ACT to PRE_PDN entry of the
# previous command. For write command followed by precharge, this delay
# between a write and power down entry will be tRCD + tCL + tWR + tRP + tCK.
# As we use this delay as a unit and create multiples of it as bigger delays
# for the sweep, this parameter works for reads, writes and mix of them.
pd_entry_time = (system.mem_ctrls[0].tRAS.value +
system.mem_ctrls[0].tRP.value +
system.mem_ctrls[0].tCK.value) * 1000000000000
# We sweep itt max using the multipliers specified by the user.
itt_max_str = args.itt_list.strip().split()
itt_max_multiples = map(lambda x : int(x), itt_max_str)
if len(itt_max_multiples) == 0:
fatal("String for itt-max-list detected empty\n")
itt_max_values = map(lambda m : pd_entry_time * m, itt_max_multiples)
# Generate request addresses in the entire range, assume we start at 0
max_addr = mem_range.end
# For max stride, use min of the page size and 512 bytes as that should be
# more than enough
max_stride = min(512, page_size)
mid_stride = 4 * burst_size
stride_values = [burst_size, mid_stride, max_stride]
# be selective about bank utilization instead of going from 1 to the number of
# banks
bank_util_values = [1, int(nbr_banks/2), nbr_banks]
# Next we create the config file, but first a comment
cfg_file.write("""# STATE state# period mode=DRAM
# read_percent start_addr end_addr req_size min_itt max_itt data_limit
# stride_size page_size #banks #banks_util addr_map #ranks\n""")
nxt_state = 0
for itt_max in itt_max_values:
for bank in bank_util_values:
for stride_size in stride_values:
cfg_file.write("STATE %d %d %s %d 0 %d %d "
"%d %d %d %d %d %d %d %d %d\n" %
(nxt_state, period, "DRAM", args.rd_perc, max_addr,
burst_size, itt_min, itt_max, 0, stride_size,
page_size, nbr_banks, bank, args.addr_map,
args.mem_ranks))
nxt_state = nxt_state + 1
# State for idle period
idle_period = args.idle_end
cfg_file.write("STATE %d %d IDLE\n" % (nxt_state, idle_period))
# Init state is state 0
cfg_file.write("INIT 0\n")
# Go through the states one by one
for state in range(1, nxt_state + 1):
cfg_file.write("TRANSITION %d %d 1\n" % (state - 1, state))
# Transition from last state to itself to not break the probability math
cfg_file.write("TRANSITION %d %d 1\n" % (nxt_state, nxt_state))
cfg_file.close()
# create a traffic generator, and point it to the file we just created
system.tgen = TrafficGen(config_file = cfg_file_name)
# add a communication monitor
system.monitor = CommMonitor()
# connect the traffic generator to the bus via a communication monitor
system.tgen.port = system.monitor.slave
system.monitor.master = system.membus.slave
# connect the system port even if it is not used in this example
system.system_port = system.membus.slave
# every period, dump and reset all stats
periodicStatDump(period)
root = Root(full_system = False, system = system)
root.system.mem_mode = 'timing'
m5.instantiate()
# Simulate for exactly as long as it takes to go through all the states
# This is why sim exists.
m5.simulate(nxt_state * period + idle_period)
print "--- Done DRAM low power sweep ---"
print "Fixed params - "
print "\tburst: %d, banks: %d, max stride: %d, itt min: %s ns" % \
(burst_size, nbr_banks, max_stride, itt_min)
print "Swept params - "
print "\titt max multiples input:", itt_max_multiples
print "\titt max values", itt_max_values
print "\tbank utilization values", bank_util_values
print "\tstride values:", stride_values
print "Traffic gen config file:", cfg_file_name
|
[
"[email protected]"
] | |
1c8bcdf2d99bd5630809fedcd85b30f4ca5af1d3
|
b61b0a5333814779669320532233ee75327f039f
|
/xls_proc.py
|
2b62ee064f9f7d001f18b164b612cead6498106d
|
[] |
no_license
|
marine0131/attendance_calc
|
75f6d387e336dfd7ff22fcde5bcb13c96a87c810
|
e991f30ba7ff88474b2135315b12f360d52ee726
|
refs/heads/master
| 2020-03-26T07:52:31.226713 | 2018-08-14T08:37:25 | 2018-08-14T08:37:25 | 144,675,548 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,994 |
py
|
#! /usr/bin/env python
import xlrd
import xlwt
import re
import datetime
import json
with open("config.txt", 'r') as f:
params = json.load(f)
FILE = params["FILE"]
MONTH = params['MONTH']
ON_WORK_TIME = params['ON_WORK_TIME']
LUNCH_TIME = params['LUNCH_TIME']
REST_TIME = params['REST_TIME']
AFTERNOON_WORK_TIME = params['AFTERNOON_WORK_TIME']
OFF_WORK_TIME = params['OFF_WORK_TIME']
OVER_WORK_TIME = params['OVER_WORK_TIME']
OVER_TIME = params['OVER_TIME']
def str_to_absmin(t_str):
a = list(map(int, t_str.split(':'))) # list() for python3 compatible
return a[0]*60 + a[1]
def duration(start, end):
return str_to_absmin(end) - str_to_absmin(start)
def proc_time(time_list, is_weekend=False):
if len(time_list) == 0:
return "", "~", 0, 0
if len(time_list) == 1:
return "", time_list[0]+"~", 0, 0
start = time_list[0]
end = time_list[-1]
start_min = str_to_absmin(start)
end_min = str_to_absmin(end)
tag = ""
start_end = start + "~" + end
work_duration = 0
over_duration = 0
if is_weekend:
over_duration = duration(start, end)
over_duration = round(over_duration/60.0, 1) # * 2)/2.0
return tag, start_end, work_duration, over_duration
else:
morning_work_min = duration(ON_WORK_TIME, LUNCH_TIME)
afternoon_work_min = duration(AFTERNOON_WORK_TIME, OFF_WORK_TIME)
regular_work_min = morning_work_min + afternoon_work_min
if start_min <= str_to_absmin(ON_WORK_TIME): # check in regular
if end_min > str_to_absmin(OVER_TIME): # work over time
work_duration = regular_work_min
over_duration = duration(OVER_WORK_TIME, end)
elif end_min >= str_to_absmin(OFF_WORK_TIME): # regular work
work_duration = regular_work_min
elif end_min >= str_to_absmin(AFTERNOON_WORK_TIME): # work over lunch
work_duration = morning_work_min + duration(AFTERNOON_WORK_TIME, end)
elif end_min >= str_to_absmin(LUNCH_TIME): # work whole morning
work_duration = morning_work_min
else: # work only morning
work_duration = duration(ON_WORK_TIME, end)
elif start_min > str_to_absmin(ON_WORK_TIME) and start_min <= str_to_absmin(LUNCH_TIME): # late check in morning
late = start_min - str_to_absmin(ON_WORK_TIME)
tag = "late: " + str(late) + "min"
if late < 30: # late but worktime is full
late = 0
start = ON_WORK_TIME
if late > 60:
tag = "absence: " + str(late) + "min"
if end_min > str_to_absmin(OVER_TIME): # work over time
work_duration = regular_work_min - late
over_duration = duration(OVER_WORK_TIME, end)
elif end_min > str_to_absmin(OFF_WORK_TIME): # regular work
work_duration = regular_work_min - late
elif end_min > str_to_absmin(AFTERNOON_WORK_TIME): # work over lunch
work_duration = duration(start, LUNCH_TIME) + duration(AFTERNOON_WORK_TIME, end)
elif end_min >= str_to_absmin(LUNCH_TIME): # work whole morning
work_duration = duration(start, LUNCH_TIME)
else: # work only morning
work_duration = duration(start, end)
# check in lunchtime
elif start_min > str_to_absmin(LUNCH_TIME) and start_min < str_to_absmin(AFTERNOON_WORK_TIME):
tag = "absence: " + str(morning_work_min) + "min"
if end_min > str_to_absmin(OVER_TIME): # work over time
work_duration = afternoon_work_min
over_duration = duration(OVER_WORK_TIME, end)
elif end_min > str_to_absmin(OFF_WORK_TIME): # regular work
work_duration = afternoon_work_min
elif end_min > str_to_absmin(AFTERNOON_WORK_TIME): # work over lunch
work_duration = duration(start, end)
else:
pass
# check in afternoon
elif start_min > str_to_absmin(AFTERNOON_WORK_TIME) and start_min <= str_to_absmin(OFF_WORK_TIME): # check in afternoon
tag = "absence: morning"
if end_min > str_to_absmin(OVER_TIME): # work over time
work_duration = duration(start, OFF_WORK_TIME)
over_duration = duration(OVER_WORK_TIME, end)
elif end_min > str_to_absmin(OFF_WORK_TIME): # regular work
work_duration = duration(start, OFF_WORK_TIME)
else:
work_duration = duration(start, end)
else: # check in evening
if end_min > str_to_absmin(OVER_TIME): # work over time
over_duration = duration(OVER_WORK_TIME, end)
else:
pass
work_duration = round(work_duration/60.0, 1) # * 2)/2.0
over_duration = round(over_duration/60.0, 1) # * 2)/2.0
return tag, start_end, work_duration, over_duration
def check_weekend(day):
weekenum = ["Mon", "Tus", "Wed", "Thu", "Fri", "Sat", "Sun"]
year_month = MONTH.split('/')
d = datetime.date(int(year_month[0]), int(year_month[1]), int(day))
if d.weekday() == 5 or d.weekday() == 6:
return True, weekenum[d.weekday()]
else:
return False, weekenum[d.weekday()]
if __name__ == "__main__":
src_book = xlrd.open_workbook(FILE)
src_sheet = src_book.sheets()[2]
n_rows = src_sheet.nrows
print("sheet rows:{}".format(n_rows))
dst_book = xlwt.Workbook()
dst_sheet = dst_book.add_sheet('Sheet1')
# copy the head
row = src_sheet.row_values(2)
dst_sheet.write(0, 0, row[0])
dst_sheet.write(0, 1, row[2])
dst_sheet.write(0, 20, "generate by whj")
row = src_sheet.row_values(3)
for i, r in enumerate(row):
dst_sheet.write(1, i+1, r)
# copy and calc work time
ind = 2
for i in range(4, n_rows):
row = src_sheet.row_values(i)
if i%2 == 0:
dst_sheet.write(ind, 0, row[2] + ":".encode('utf-8') + row[10])
ind += 1
else:
# write title
dst_sheet.write(ind, 0, "start~end")
dst_sheet.write(ind+1, 0, "worktime")
dst_sheet.write(ind+2, 0, "overtime")
dst_sheet.write(ind+3, 0, "comment")
for j, r in enumerate(row):
time_list = re.findall(r'.{5}', r)
is_weekend, day_tag = check_weekend(src_sheet.cell_value(3, j))
tag, start_end, work_duration, over_duration = proc_time(time_list, is_weekend)
dst_sheet.write(ind, j+1, start_end)
dst_sheet.write(ind+1, j+1, work_duration)
dst_sheet.write(ind+2, j+1, over_duration)
dst_sheet.write(ind+3, j+1, tag)
if is_weekend:
dst_sheet.write(ind-1, j+1, day_tag)
ind += 4
dst_book.save("new.xls")
|
[
"[email protected]"
] | |
580d3bab5161c2089c9b1c92b66b2465fd94ddb9
|
3e24611b7315b5ad588b2128570f1341b9c968e8
|
/pacbiolib/thirdparty/pythonpkgs/scipy/scipy_0.9.0+pbi86/lib/python2.7/site-packages/scipy/linalg/interface_gen.py
|
aed22b2164e1399c612a6bd8fd85ad35866e808f
|
[
"BSD-2-Clause"
] |
permissive
|
bioCKO/lpp_Script
|
dc327be88c7d12243e25557f7da68d963917aa90
|
0cb2eedb48d4afa25abc2ed7231eb1fdd9baecc2
|
refs/heads/master
| 2022-02-27T12:35:05.979231 | 2019-08-27T05:56:33 | 2019-08-27T05:56:33 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,791 |
py
|
#! python
import os
import re
from distutils.dir_util import mkpath
def all_subroutines(interface_in):
# remove comments
comment_block_exp = re.compile(r'/\*(?:\s|.)*?\*/')
subroutine_exp = re.compile(r'subroutine (?:\s|.)*?end subroutine.*')
function_exp = re.compile(r'function (?:\s|.)*?end function.*')
interface = comment_block_exp.sub('',interface_in)
subroutine_list = subroutine_exp.findall(interface)
function_list = function_exp.findall(interface)
subroutine_list = subroutine_list + function_list
subroutine_list = map(lambda x: x.strip(),subroutine_list)
return subroutine_list
def real_convert(val_string):
return val_string
def complex_convert(val_string):
return '(' + val_string + ',0.)'
def convert_types(interface_in,converter):
regexp = re.compile(r'<type_convert=(.*?)>')
interface = interface_in[:]
while 1:
sub = regexp.search(interface)
if sub is None: break
converted = converter(sub.group(1))
interface = interface.replace(sub.group(),converted)
return interface
def generic_expand(generic_interface,skip_names=[]):
generic_types ={'s' :('real', 'real', real_convert,
'real'),
'd' :('double precision','double precision',real_convert,
'double precision'),
'c' :('complex', 'complex',complex_convert,
'real'),
'z' :('double complex', 'double complex',complex_convert,
'double precision'),
'cs':('complex', 'real',complex_convert,
'real'),
'zd':('double complex', 'double precision',complex_convert,
'double precision'),
'sc':('real', 'complex',real_convert,
'real'),
'dz':('double precision','double complex', real_convert,
'double precision')}
generic_c_types = {'real':'float',
'double precision':'double',
'complex':'complex_float',
'double complex':'complex_double'}
# cc_types is specific in ATLAS C BLAS, in particular, for complex arguments
generic_cc_types = {'real':'float',
'double precision':'double',
'complex':'void',
'double complex':'void'}
#2. get all subroutines
subs = all_subroutines(generic_interface)
print len(subs)
#loop through the subs
type_exp = re.compile(r'<tchar=(.*?)>')
TYPE_EXP = re.compile(r'<TCHAR=(.*?)>')
routine_name = re.compile(r'(subroutine|function)\s*(?P<name>\w+)\s*\(')
interface = ''
for sub in subs:
#3. Find the typecodes to use:
m = type_exp.search(sub)
if m is None:
interface = interface + '\n\n' + sub
continue
type_chars = m.group(1)
# get rid of spaces
type_chars = type_chars.replace(' ','')
# get a list of the characters (or character pairs)
type_chars = type_chars.split(',')
# Now get rid of the special tag that contained the types
sub = re.sub(type_exp,'<tchar>',sub)
m = TYPE_EXP.search(sub)
if m is not None:
sub = re.sub(TYPE_EXP,'<TCHAR>',sub)
sub_generic = sub.strip()
for char in type_chars:
type_in,type_out,converter, rtype_in = generic_types[char]
sub = convert_types(sub_generic,converter)
function_def = sub.replace('<tchar>',char)
function_def = function_def.replace('<TCHAR>',char.upper())
function_def = function_def.replace('<type_in>',type_in)
function_def = function_def.replace('<type_in_c>',
generic_c_types[type_in])
function_def = function_def.replace('<type_in_cc>',
generic_cc_types[type_in])
function_def = function_def.replace('<rtype_in>',rtype_in)
function_def = function_def.replace('<rtype_in_c>',
generic_c_types[rtype_in])
function_def = function_def.replace('<type_out>',type_out)
function_def = function_def.replace('<type_out_c>',
generic_c_types[type_out])
m = routine_name.match(function_def)
if m:
if m.group('name') in skip_names:
print 'Skipping',m.group('name')
continue
else:
print 'Possible bug: Failed to determine routines name'
interface = interface + '\n\n' + function_def
return interface
#def interface_to_module(interface_in,module_name,include_list,sdir='.'):
def interface_to_module(interface_in,module_name):
pre_prefix = "!%f90 -*- f90 -*-\n"
# heading and tail of the module definition.
file_prefix = "\npython module " + module_name +" ! in\n" \
"!usercode '''#include \"cblas.h\"\n"\
"!'''\n"\
" interface \n"
file_suffix = "\n end interface\n" \
"end module %s" % module_name
return pre_prefix + file_prefix + interface_in + file_suffix
def process_includes(interface_in,sdir='.'):
include_exp = re.compile(r'\n\s*[^!]\s*<include_file=(.*?)>')
include_files = include_exp.findall(interface_in)
for filename in include_files:
f = open(os.path.join(sdir,filename))
interface_in = interface_in.replace('<include_file=%s>'%filename,
f.read())
f.close()
return interface_in
def generate_interface(module_name,src_file,target_file,skip_names=[]):
print "generating",module_name,"interface"
f = open(src_file)
generic_interface = f.read()
f.close()
sdir = os.path.dirname(src_file)
generic_interface = process_includes(generic_interface,sdir)
generic_interface = generic_expand(generic_interface,skip_names)
module_def = interface_to_module(generic_interface,module_name)
mkpath(os.path.dirname(target_file))
f = open(target_file,'w')
user_routines = os.path.join(sdir,module_name+"_user_routines.pyf")
if os.path.exists(user_routines):
f2 = open(user_routines)
f.write(f2.read())
f2.close()
f.write(module_def)
f.close()
def process_all():
# process the standard files.
for name in ['fblas','cblas','clapack','flapack']:
generate_interface(name,'generic_%s.pyf'%(name),name+'.pyf')
if __name__ == "__main__":
process_all()
|
[
"[email protected]"
] | |
013c1d369981d94c454a38a281f78ed4f54d4b91
|
5f86944bdf1b810a84c63adc6ed01bbb48d2c59a
|
/kubernetes/test/test_settings_api.py
|
e266034720dee9676cdc5fb197e1b837aaa3f470
|
[
"Apache-2.0"
] |
permissive
|
m4ttshaw/client-python
|
384c721ba57b7ccc824d5eca25834d0288b211e2
|
4eac56a8b65d56eb23d738ceb90d3afb6dbd96c1
|
refs/heads/master
| 2021-01-13T06:05:51.564765 | 2017-06-21T08:31:03 | 2017-06-21T08:31:03 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 848 |
py
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.apis.settings_api import SettingsApi
class TestSettingsApi(unittest.TestCase):
""" SettingsApi unit test stubs """
def setUp(self):
self.api = kubernetes.client.apis.settings_api.SettingsApi()
def tearDown(self):
pass
def test_get_api_group(self):
"""
Test case for get_api_group
"""
pass
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
efa90fcef860a3ef52b4c5a68e10fff81084c425
|
b5bc72861644c274b75e42374201ea8cdb84c1a2
|
/class_examples/class_college.py
|
23c904a627487340213fb1578c4134909be7e295
|
[] |
no_license
|
Aadhya-Solution/PythonExample
|
737c3ddc9ad5e3d0cde24ac9f366ce2de2fa6cfe
|
34bc04570182130ebc13b6c99997c81834ad5f53
|
refs/heads/master
| 2022-12-18T09:54:30.857011 | 2020-08-24T13:53:59 | 2020-08-24T13:53:59 | 288,183,879 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 91 |
py
|
import class_student
ps=class_student.Student('Shiva',20)
print ps.name
print ps.age
|
[
"[email protected]"
] | |
822de060466784748690413911f1bd522c7cfdc4
|
ebb0f88adedf4d5202185b27fd2b8cecc1e59ebb
|
/pplot/figure.py
|
5f79f1facf388ef88e8cc178d7e5fd4a5acc9fd6
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
pmacosta/pplot
|
c528de8a6fcec83784ed92b84979a4d738444f57
|
ac2e40aa1fc22a3c2aa39d894bc71c29ba33058a
|
refs/heads/master
| 2020-12-31T04:28:02.763633 | 2019-06-11T18:35:59 | 2019-06-11T18:35:59 | 58,674,101 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 45,520 |
py
|
"""
Generate presentation-quality plots.
[[[cog
import os, sys
if sys.hexversion < 0x03000000:
import __builtin__
else:
import builtins as __builtin__
sys.path.append(os.environ['TRACER_DIR'])
import trace_ex_plot_figure
exobj_plot = trace_ex_plot_figure.trace_module(no_print=True)
]]]
[[[end]]]
"""
# figure.py
# Copyright (c) 2013-2019 Pablo Acosta-Serafini
# See LICENSE for details
# pylint: disable=C0111,C0302,C0413,R0201,R0205,R0914,R0915,W0105,W0212
# Standard library imports
from __future__ import print_function
import math
import os
import sys
import warnings
# PyPI imports
if os.environ.get("READTHEDOCS", "") != "True": # pragma: no cover
import PIL
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=RuntimeWarning)
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.backends.backend_agg import FigureCanvasAgg
from matplotlib.transforms import Bbox
import pmisc
import pexdoc.exh
import pexdoc.pcontracts
import peng
# Intra-package imports
from .constants import TITLE_FONT_SIZE
from .panel import Panel
from .functions import _F, _MF, _intelligent_ticks
###
# Global variables
###
INF = sys.float_info.max
SPACER = 0.2 # in inches
PANEL_SEP = 10 * SPACER
###
# Class
###
class Figure(object):
r"""
Generate presentation-quality plots.
:param panels: One or more data panels
:type panels: :py:class:`pplot.Panel` *or list of*
:py:class:`pplot.Panel` *or None*
:param indep_var_label: Independent variable label
:type indep_var_label: string
:param indep_var_units: Independent variable units
:type indep_var_units: string
:param indep_axis_tick_labels: Independent axis tick labels. If not None
overrides ticks automatically generated
or as given by the **indep_axis_ticks**
argument (ignored for figures with a
logarithmic independent axis)
:type indep_axis_tick_labels: list of strings or None
:param indep_axis_ticks: Independent axis tick marks. If not None
overrides automatically generated tick marks if
the axis type is linear. If None automatically
generated tick marks are used for the independent
axis
:type indep_axis_ticks: list, Numpy vector or None
:param fig_width: Hard copy plot width in inches. If None the width is
automatically calculated so that the figure has a 4:3
aspect ratio and there is no horizontal overlap between
any two text elements in the figure
:type fig_width: `PositiveRealNum <https://pexdoc.readthedocs.io/en/
stable/ptypes.html#positiverealnum>`_ or None
:param fig_height: Hard copy plot height in inches. If None the height is
automatically calculated so that the figure has a 4:3
aspect ratio and there is no vertical overlap between
any two text elements in the figure
:type fig_height: `PositiveRealNum <https://pexdoc.readthedocs.io/en/
stable/ptypes.html#positiverealnum>`_ or None
:param title: Plot title
:type title: string
:param log_indep_axis: Flag that indicates whether the independent
axis is linear (False) or logarithmic (True)
:type log_indep_axis: boolean
:param dpi: Dots per inch to be used while showing or displaying figure
:type dpi: positive number
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.__init__
:raises:
* RuntimeError (Argument \`dpi\` is not valid)
* RuntimeError (Argument \`fig_height\` is not valid)
* RuntimeError (Argument \`fig_width\` is not valid)
* RuntimeError (Argument \`indep_axis_tick_labels\` is not valid)
* RuntimeError (Argument \`indep_axis_ticks\` is not valid)
* RuntimeError (Argument \`indep_var_label\` is not valid)
* RuntimeError (Argument \`indep_var_units\` is not valid)
* RuntimeError (Argument \`log_indep_axis\` is not valid)
* RuntimeError (Argument \`panels\` is not valid)
* RuntimeError (Argument \`title\` is not valid)
* RuntimeError (Figure size is too small: minimum width *[min_width]*,
minimum height *[min_height]*)
* RuntimeError (Number of tick locations and number of tick labels
mismatch)
* TypeError (Panel *[panel_num]* is not fully specified)
* ValueError (Figure cannot be plotted with a logarithmic independent
axis because panel *[panel_num]*, series *[series_num]* contains
negative independent data points)
.. [[[end]]]
"""
# pylint: disable=R0902,R0913
def __init__(
self,
panels=None,
indep_var_label="",
indep_var_units="",
indep_axis_tick_labels=None,
indep_axis_ticks=None,
fig_width=None,
fig_height=None,
title="",
log_indep_axis=False,
dpi=100.0,
): # noqa
pexdoc.exh.addai(
"indep_axis_ticks",
(indep_axis_ticks is not None)
and (
(not isinstance(indep_axis_ticks, list))
and (not isinstance(indep_axis_ticks, np.ndarray))
),
)
pexdoc.exh.addai(
"indep_axis_tick_labels",
(indep_axis_tick_labels is not None)
and (
(not isinstance(indep_axis_tick_labels, list))
or (
isinstance(indep_axis_tick_labels, list)
and (indep_axis_ticks is not None)
and (len(indep_axis_tick_labels) != len(indep_axis_ticks))
)
),
)
# Private attributes
self._need_redraw = False
self._min_fig_width = None
self._min_fig_height = None
self._size_given = False
# Public attributes
self._dpi = None
self._indep_axis_ticks = None
self._indep_axis_tick_labels = None
self._fig = None
self._panels = None
self._indep_var_label = None
self._title = None
self._log_indep_axis = None
self._fig_width = None
self._fig_height = None
self._indep_var_units = None
self._indep_var_div = None
self._axes_list = []
self._scaling_done = False
self._indep_axis_dict = None
self._title_obj = None
# Assignment of arguments to attributes
self._set_dpi(dpi)
self._set_indep_var_label(indep_var_label)
self._set_indep_var_units(indep_var_units)
self._set_title(title)
self._set_log_indep_axis(log_indep_axis)
self._set_indep_axis_ticks(
indep_axis_ticks if not self.log_indep_axis else None
)
self._set_indep_axis_tick_labels(indep_axis_tick_labels)
self._set_panels(panels)
self._set_fig_width(fig_width)
self._set_fig_height(fig_height)
def __bool__(self): # pragma: no cover
"""
Test if the figure has at least a panel associated with it.
.. note:: This method applies to Python 3.x
"""
return self._panels is not None
def __iter__(self):
r"""
Return an iterator over the panel object(s) in the figure.
For example:
.. =[=cog
.. import pmisc
.. pmisc.incfile('plot_example_7.py', cog.out)
.. =]=
.. code-block:: python
# plot_example_7.py
from __future__ import print_function
import numpy as np
import pplot
def figure_iterator_example(no_print):
source1 = pplot.BasicSource(
indep_var=np.array([1, 2, 3, 4]),
dep_var=np.array([1, -10, 10, 5])
)
source2 = pplot.BasicSource(
indep_var=np.array([100, 200, 300, 400]),
dep_var=np.array([50, 75, 100, 125])
)
series1 = pplot.Series(
data_source=source1,
label='Goals'
)
series2 = pplot.Series(
data_source=source2,
label='Saves',
color='b',
marker=None,
interp='STRAIGHT',
line_style='--'
)
panel1 = pplot.Panel(
series=series1,
primary_axis_label='Average',
primary_axis_units='A',
display_indep_axis=False
)
panel2 = pplot.Panel(
series=series2,
primary_axis_label='Standard deviation',
primary_axis_units=r'$\sqrt{{A}}$',
display_indep_axis=True
)
figure = pplot.Figure(
panels=[panel1, panel2],
indep_var_label='Time',
indep_var_units='sec',
title='Sample Figure'
)
if not no_print:
for num, panel in enumerate(figure):
print('Panel {0}:'.format(num+1))
print(panel)
print('')
else:
return figure
.. =[=end=]=
.. code-block:: python
>>> import docs.support.plot_example_7 as mod
>>> mod.figure_iterator_example(False)
Panel 1:
Series 0:
Independent variable: [ 1.0, 2.0, 3.0, 4.0 ]
Dependent variable: [ 1.0, -10.0, 10.0, 5.0 ]
Label: Goals
Color: k
Marker: o
Interpolation: CUBIC
Line style: -
Secondary axis: False
Primary axis label: Average
Primary axis units: A
Secondary axis label: not specified
Secondary axis units: not specified
Logarithmic dependent axis: False
Display independent axis: False
Legend properties:
cols: 1
pos: BEST
<BLANKLINE>
Panel 2:
Series 0:
Independent variable: [ 100.0, 200.0, 300.0, 400.0 ]
Dependent variable: [ 50.0, 75.0, 100.0, 125.0 ]
Label: Saves
Color: b
Marker: None
Interpolation: STRAIGHT
Line style: --
Secondary axis: False
Primary axis label: Standard deviation
Primary axis units: $\sqrt{{A}}$
Secondary axis label: not specified
Secondary axis units: not specified
Logarithmic dependent axis: False
Display independent axis: True
Legend properties:
cols: 1
pos: BEST
<BLANKLINE>
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. [[[end]]]
"""
return iter(self._panels)
def __nonzero__(self): # pragma: no cover
"""
Test if the figure has at least a panel associated with it.
.. note:: This method applies to Python 2.x
"""
return self._panels is not None
def __str__(self):
r"""
Print figure information.
For example:
>>> from __future__ import print_function
>>> import docs.support.plot_example_7 as mod
>>> print(mod.figure_iterator_example(True)) #doctest: +ELLIPSIS
Panel 0:
Series 0:
Independent variable: [ 1.0, 2.0, 3.0, 4.0 ]
Dependent variable: [ 1.0, -10.0, 10.0, 5.0 ]
Label: Goals
Color: k
Marker: o
Interpolation: CUBIC
Line style: -
Secondary axis: False
Primary axis label: Average
Primary axis units: A
Secondary axis label: not specified
Secondary axis units: not specified
Logarithmic dependent axis: False
Display independent axis: False
Legend properties:
cols: 1
pos: BEST
Panel 1:
Series 0:
Independent variable: [ 100.0, 200.0, 300.0, 400.0 ]
Dependent variable: [ 50.0, 75.0, 100.0, 125.0 ]
Label: Saves
Color: b
Marker: None
Interpolation: STRAIGHT
Line style: --
Secondary axis: False
Primary axis label: Standard deviation
Primary axis units: $\sqrt{{A}}$
Secondary axis label: not specified
Secondary axis units: not specified
Logarithmic dependent axis: False
Display independent axis: True
Legend properties:
cols: 1
pos: BEST
Independent variable label: Time
Independent variable units: sec
Logarithmic independent axis: False
Title: Sample Figure
Figure width: ...
Figure height: ...
<BLANKLINE>
"""
# pylint: disable=C1801
self._create_figure()
fig_width, fig_height = self._fig_dims()
ret = ""
if (self.panels is None) or (len(self.panels) == 0):
ret += "Panels: None\n"
else:
for num, element in enumerate(self.panels):
ret += "Panel {0}:\n".format(num)
temp = str(element).split("\n")
temp = [3 * " " + line for line in temp]
ret += "\n".join(temp)
ret += "\n"
ret += "Independent variable label: {0}\n".format(
self.indep_var_label
if self.indep_var_label not in ["", None]
else "not specified"
)
ret += "Independent variable units: {0}\n".format(
self.indep_var_units
if self.indep_var_units not in ["", None]
else "not specified"
)
ret += "Logarithmic independent axis: {0}\n".format(self.log_indep_axis)
ret += "Title: {0}\n".format(
self.title if self.title not in ["", None] else "not specified"
)
ret += "Figure width: {0}\n".format(fig_width)
ret += "Figure height: {0}\n".format(fig_height)
return ret
def _bbox(self, obj):
"""Return bounding box of an object."""
renderer = self._fig.canvas.get_renderer()
return obj.get_window_extent(renderer=renderer).transformed(
self._fig.dpi_scale_trans.inverted()
)
def _calculate_min_figure_size(self):
"""Calculate minimum panel and figure size."""
dround = lambda x: math.floor(x) / self.dpi
title_width = 0
if self.title not in [None, ""]:
title_bbox = self._bbox(self._title_obj)
title_width = title_bbox.width
min_width = max(
[
(
max(panel._left_overhang for panel in self.panels)
+ max(
max(panel._min_spine_bbox.width, panel._legend_width)
for panel in self.panels
)
+ max(panel._right_overhang for panel in self.panels)
),
max(
panel._prim_yaxis_annot
+ panel._indep_label_width
+ panel._sec_yaxis_annot
for panel in self.panels
),
title_width,
]
)
self._min_fig_width = dround(min_width * self.dpi)
npanels = len(self.panels)
self._min_fig_height = dround(
npanels * max([panel._min_bbox.height * self.dpi for panel in self.panels])
+ ((npanels - 1) * PANEL_SEP)
)
def _check_figure_spec(self, fig_width=None, fig_height=None):
"""Validate given figure size against minimum dimension."""
small_ex = pexdoc.exh.addex(
RuntimeError,
"Figure size is too small: minimum width *[min_width]*, "
"minimum height *[min_height]*",
)
small_ex(
bool(
(fig_width and (fig_width < self._min_fig_width))
or (fig_height and (fig_height < self._min_fig_height))
),
[
_F("min_width", self._min_fig_width),
_F("min_height", self._min_fig_height),
],
)
def _create_figure(self, raise_exception=False):
"""Create and resize figure."""
if raise_exception:
specified_ex = pexdoc.exh.addex(
RuntimeError, "Figure object is not fully specified"
)
specified_ex(raise_exception and (not self._complete))
if not self._complete:
return Bbox([[0, 0], [0, 0]])
if self._need_redraw:
self._size_given = (self._fig_width is not None) and (
self._fig_height is not None
)
# First _draw call is to calculate approximate figure size, (until
# matplotlib actually draws the figure, all the bounding boxes of
# the elements in the figure are null boxes. The second _draw call
# is to draw figure with either the calculated minimum dimensions
# or the user-given dimensions, provided they are equal or greater
# than the minimum dimensions
self._draw()
if not self._size_given:
self._draw()
bbox = self._fig_bbox()
fig_width, fig_height = self._fig_dims()
self._fig.set_size_inches(fig_width, fig_height, forward=True)
self._need_redraw = False
# From https://github.com/matplotlib/matplotlib/issues/7984:
# When the Figure is drawn, its Axes are sorted based on zorder
# with a stable sort, and then drawn in that order. Then within
# each Axes, artists are sorted based on zorder. Therefore you
# can't interleave the drawing orders of artists from one Axes with
# those from another.
else:
bbox = self._fig_bbox()
fig_width, fig_height = self._fig_dims()
# Get figure pixel size exact
width = int(round(fig_width * self._dpi))
lwidth = int(round(width / 2.0))
rwidth = width - lwidth
height = int(round(fig_height * self._dpi))
bheight = int(round(height / 2.0))
theight = height - bheight
bbox_xcenter = bbox.xmin + 0.5 * bbox.width
bbox_ycenter = bbox.ymin + 0.5 * bbox.height
bbox = Bbox(
[
[
bbox_xcenter - (lwidth / self._dpi),
bbox_ycenter - (bheight / self._dpi),
],
[
bbox_xcenter + (rwidth / self._dpi),
bbox_ycenter + (theight / self._dpi),
],
]
)
return bbox
def _draw(self):
# pylint: disable=C0326,W0612
num_panels = len(self.panels)
if not self._scaling_done:
# Find union of the independent variable data set of all panels
indep_axis_ticks = self._get_global_xaxis()
self._indep_var_div = indep_axis_ticks.div
self._indep_axis_ticks = indep_axis_ticks.locs
# Scale all panel series
for panel_obj in self.panels:
panel_obj._scale_indep_var(self._indep_var_div)
self._indep_axis_tick_labels = (
self._indep_axis_tick_labels or indep_axis_ticks.labels
)
self._indep_axis_dict = {
"log_indep": self.log_indep_axis,
"indep_var_min": indep_axis_ticks.min,
"indep_var_max": indep_axis_ticks.max,
"indep_var_locs": indep_axis_ticks.locs,
"indep_var_labels": self._indep_axis_tick_labels,
"indep_axis_label": self.indep_var_label,
"indep_axis_units": self.indep_var_units,
"indep_axis_unit_scale": indep_axis_ticks.unit_scale,
}
self._scaling_done = True
# Create required number of panels
self._draw_panels()
# Draw figure otherwise some bounding boxes return NaN
FigureCanvasAgg(self._fig).draw()
self._calculate_min_figure_size()
def _draw_panels(self, fbbox=None):
def init_figure(num_panels, fbbox=None):
fig_width, fig_height = self._fig_dims()
figsize = (fig_width, fig_height) if fig_width and fig_height else None
plt.close("all")
self._fig, axesh = plt.subplots(
nrows=num_panels, ncols=1, dpi=self.dpi, figsize=figsize
)
plt.tight_layout(pad=0, h_pad=2, rect=fbbox)
axesh = [axesh] if num_panels == 1 else axesh
if self.title not in ["", None]:
self._title_obj = self._fig.suptitle(
self.title,
fontsize=TITLE_FONT_SIZE,
horizontalalignment="center",
verticalalignment="top",
multialignment="center",
y=1.0,
)
return axesh, fig_width, fig_height
num_panels = len(self.panels)
axesh, fig_width, fig_height = init_figure(num_panels, fbbox)
self._axes_list = []
top = right = -INF
bottom = left = +INF
if all(not panel.display_indep_axis for panel in self.panels):
self.panels[-1]._display_indep_axis = True
for panel, axish in zip(self.panels, axesh):
disp_indep_axis = (num_panels == 1) or panel.display_indep_axis
panel._draw(disp_indep_axis, self._indep_axis_dict, axish)
left = min(left, panel._panel_bbox.xmin)
bottom = min(bottom, panel._panel_bbox.ymin)
right = max(right, panel._panel_bbox.xmax)
top = max(top, panel._panel_bbox.ymax)
if self._title_obj:
title_bbox = self._bbox(self._title_obj)
left = min(title_bbox.xmin, left)
right = max(title_bbox.xmax, right)
if fig_width and fig_height:
xdelta_left = -left / fig_width
ydelta_bot = -bottom / fig_height
xdelta_right = 1 - ((right - fig_width) / fig_width)
ydelta_top = (
title_bbox.ymin / top
if self._title_obj
else 1 - ((top - fig_height) / fig_height)
)
fbbox = [xdelta_left, ydelta_bot, xdelta_right, ydelta_top]
axesh, _, _ = init_figure(num_panels, fbbox)
for panel, axish in zip(self.panels, axesh):
disp_indep_axis = (num_panels == 1) or panel.display_indep_axis
panel._draw(disp_indep_axis, self._indep_axis_dict, axish)
def _fig_bbox(self):
"""Return bounding box of figure."""
tleft = tbottom = +INF
tright = ttop = -INF
if self._title_obj:
title_bbox = self._bbox(self._title_obj)
tleft = title_bbox.xmin
tright = title_bbox.xmax
ttop = title_bbox.ymax
tbottom = title_bbox.ymin
left = min(tleft, min(pobj._left for pobj in self.panels))
bottom = min(tbottom, min(pobj._bottom for pobj in self.panels))
top = max(ttop, max(pobj._top for pobj in self.panels))
right = max(tright, max(pobj._right for pobj in self.panels))
fig_bbox = Bbox([[left, bottom], [right, top]])
return fig_bbox
def _fig_dims(self):
"""Get actual figure size, given or minimum calculated."""
fig_width = self._fig_width or self._min_fig_width
fig_height = self._fig_height or self._min_fig_height
return fig_width, fig_height
def _get_axes_list(self):
self._create_figure()
return self._axes_list
def _get_complete(self):
"""Return True if figure is fully specified, otherwise returns False."""
return (self.panels is not None) and len(self.panels)
def _get_dpi(self):
return self._dpi
def _get_fig(self):
self._create_figure()
return self._fig
def _get_fig_height(self):
if self._complete and (self._fig_height is None):
self._create_figure()
self._fig_height = self._min_fig_height
return self._fig_height
def _get_fig_width(self):
if self._complete and (self._fig_width is None):
self._create_figure()
self._fig_width = self._min_fig_width
return self._fig_width
def _get_global_xaxis(self):
log_ex = pexdoc.exh.addex(
ValueError,
"Figure cannot be plotted with a logarithmic "
"independent axis because panel *[panel_num]*, series "
"*[series_num]* contains negative independent data points",
)
ticks_num_ex = pexdoc.exh.addex(
RuntimeError, "Number of tick locations and number of tick labels mismatch"
)
glob_indep_var = []
for panel_num, panel_obj in enumerate(self.panels):
for series_num, series_obj in enumerate(panel_obj.series):
log_ex(
bool(self.log_indep_axis and (min(series_obj.indep_var) < 0)),
edata=_MF("panel_num", panel_num, "series_num", series_num),
)
glob_indep_var = np.unique(
np.append(
glob_indep_var,
np.array(
[
peng.round_mantissa(element, 10)
for element in series_obj.indep_var
]
),
)
)
indep_axis_ticks = _intelligent_ticks(
glob_indep_var,
min(glob_indep_var),
max(glob_indep_var),
tight=True,
log_axis=self.log_indep_axis,
tick_list=(None if self._log_indep_axis else self._indep_axis_ticks),
)
ticks_num_ex(
(self._indep_axis_tick_labels is not None)
and (len(self._indep_axis_tick_labels) != len(indep_axis_ticks.labels))
)
return indep_axis_ticks
def _get_indep_axis_scale(self):
self._create_figure()
return self._indep_var_div
def _get_indep_axis_ticks(self):
self._create_figure()
return self._indep_axis_ticks
def _get_indep_axis_tick_labels(self):
self._create_figure()
return self._indep_axis_tick_labels
def _get_indep_var_label(self):
return self._indep_var_label
def _get_indep_var_units(self):
return self._indep_var_units
def _get_log_indep_axis(self):
return self._log_indep_axis
def _get_panels(self):
return self._panels
def _get_title(self):
return self._title
@pexdoc.pcontracts.contract(dpi="None|positive_real_num")
def _set_dpi(self, dpi):
self._dpi = float(dpi)
@pexdoc.pcontracts.contract(fig_height="None|positive_real_num")
def _set_fig_height(self, fig_height):
if self._complete:
self._create_figure()
self._check_figure_spec(self.fig_width, fig_height)
self._fig_height = fig_height
self._need_redraw = True
@pexdoc.pcontracts.contract(fig_width="None|positive_real_num")
def _set_fig_width(self, fig_width):
if self._complete:
self._create_figure()
self._check_figure_spec(fig_width, self.fig_height)
self._fig_width = fig_width
self._need_redraw = True
@pexdoc.pcontracts.contract(indep_axis_ticks="None|increasing_real_numpy_vector")
def _set_indep_axis_ticks(self, indep_axis_ticks):
self._indep_axis_ticks = indep_axis_ticks
self._need_redraw = True
@pexdoc.pcontracts.contract(indep_axis_tick_labels="None|list(str)")
def _set_indep_axis_tick_labels(self, indep_axis_tick_labels):
if not self._log_indep_axis:
self._indep_axis_tick_labels = indep_axis_tick_labels
self._need_redraw = True
self._create_figure()
@pexdoc.pcontracts.contract(indep_var_label="None|str")
def _set_indep_var_label(self, indep_var_label):
self._indep_var_label = indep_var_label
self._need_redraw = True
@pexdoc.pcontracts.contract(indep_var_units="None|str")
def _set_indep_var_units(self, indep_var_units):
self._indep_var_units = indep_var_units
self._need_redraw = True
@pexdoc.pcontracts.contract(log_indep_axis="None|bool")
def _set_log_indep_axis(self, log_indep_axis):
self._log_indep_axis = log_indep_axis
self._need_redraw = True
@pexdoc.pcontracts.contract(title="None|str")
def _set_title(self, title):
self._title = title
self._need_redraw = True
def _set_panels(self, panels):
self._panels = (
(panels if isinstance(panels, list) else [panels])
if panels is not None
else panels
)
if self.panels is not None:
self._validate_panels()
self._need_redraw = True
def _validate_panels(self):
"""Verify elements of panel list are of the right type and fully specified."""
invalid_ex = pexdoc.exh.addai("panels")
specified_ex = pexdoc.exh.addex(
TypeError, "Panel *[panel_num]* is not fully specified"
)
for num, obj in enumerate(self.panels):
invalid_ex(not isinstance(obj, Panel))
specified_ex(not obj._complete, _F("panel_num", num))
@pexdoc.pcontracts.contract(fname="file_name", ftype="None|str", compress=bool)
def save(self, fname, ftype=None, compress=True):
r"""
Save the figure to a file.
:param fname: File name
:type fname: `FileName <https://pexdoc.readthedocs.io/en/stable/
ptypes.html#filename>`_
:param ftype: File type, either 'PNG' or 'EPS' (case insensitive). The
PNG format is a `raster
<https://en.wikipedia.org/wiki/Raster_graphics>`_ format
while the EPS format is a
`vector <https://en.wikipedia.org/wiki/
Vector_graphics>`_ format
:type ftype: string
:param compress: Flag that indicates whether the file saved is to be
compressed (True) or not (False). Only relevant for
PNG file type
:type compress: boolean
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.save
:raises:
* RuntimeError (Argument \`compress\` is not valid)
* RuntimeError (Argument \`fname\` is not valid)
* RuntimeError (Argument \`ftype\` is not valid)
* RuntimeError (Could not determine file type)
* RuntimeError (Figure object is not fully specified)
* RuntimeError (Incongruent file type and file extension)
* RuntimeError (Number of tick locations and number of tick labels
mismatch)
* RuntimeError (Unsupported file type: *[file_type]*)
* ValueError (Figure cannot be plotted with a logarithmic independent
axis because panel *[panel_num]*, series *[series_num]* contains
negative independent data points)
.. [[[end]]]
"""
unsupported_ex = pexdoc.exh.addex(
RuntimeError, "Unsupported file type: *[file_type]*"
)
no_ftype_ex = pexdoc.exh.addex(RuntimeError, "Could not determine file type")
incongruent_ftype = pexdoc.exh.addex(
RuntimeError, "Incongruent file type and file extension"
)
sup_ftypes = ["png", "eps", "pdf"]
unsupported_ex(
bool((ftype is not None) and (ftype.lower() not in sup_ftypes)),
_F("file_type", ftype),
)
basename, extension = os.path.splitext(fname)
extension = extension.lstrip(".")
no_ftype_ex(bool((ftype is None) and (extension.lower() not in sup_ftypes)))
incongruent_ftype(
bool(
(ftype is not None)
and extension
and (ftype.upper() != extension.upper())
)
)
ftype = (ftype or extension).upper()
extension = extension or ftype.lower()
fname = "{0}.{1}".format(basename, extension)
bbox = self._create_figure(raise_exception=True)
dpi = self.dpi if ftype == "PNG" else None
bbox = bbox if ftype == "PNG" else "tight"
# Matplotlib seems to have a problem with ~/, expand it to $HOME
fname = os.path.expanduser(fname)
pmisc.make_dir(fname)
self._fig_width, self._fig_height = self._fig_dims()
self._fig.savefig(
fname,
dpi=dpi,
bbox="tight",
format=ftype,
bbox_extra_artists=(self._title_obj,),
)
plt.close("all")
if (ftype == "PNG") and compress:
img = PIL.Image.open(fname)
# Remove alpha channel
img = img.convert("RGB")
# Move to index image if possible (maximum number of colors used
# has to be less that 256 as the palette is 8 bits)
# getcolors returns None if the number of colors exceeds the
# maxcolors argument
ncolors = img.getcolors(maxcolors=256)
if ncolors is not None:
img = img.convert("P", palette=PIL.Image.ADAPTIVE)
img.save(fname, quality=100, optimize=True)
def show(self):
"""
Display the figure.
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.show
:raises:
* RuntimeError (Figure object is not fully specified)
* RuntimeError (Number of tick locations and number of tick labels
mismatch)
* ValueError (Figure cannot be plotted with a logarithmic independent
axis because panel *[panel_num]*, series *[series_num]* contains
negative independent data points)
.. [[[end]]]
"""
self._create_figure(raise_exception=True)
self._fig_width, self._fig_height = self._fig_dims()
plt.show()
# Managed attributes
_complete = property(_get_complete)
axes_list = property(_get_axes_list, doc="Matplotlib figure axes handle list")
"""
Get Matplotlib figure axes handle list.
:code:`None` is returned if figure not fully specified. Useful if
annotations or further customizations to the panel(s) are needed. Each
panel has an entry in the list, which is sorted in the order the panels are
plotted (top to bottom). Each panel entry is a dictionary containing the
following key-value pairs:
* **number** (*integer*) -- panel number, panel 0 is the top-most panel
* **primary** (*Matplotlib axis object*) -- axis handle for the primary
axis, None if the figure has not primary axis
* **secondary** (*Matplotlib axis object*) -- axis handle for the
secondary axis, None if the figure has no secondary axis
:type: list
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.axes_list
:raises: (when retrieved)
* RuntimeError (Number of tick locations and number of tick labels
mismatch)
* ValueError (Figure cannot be plotted with a logarithmic independent
axis because panel *[panel_num]*, series *[series_num]* contains
negative independent data points)
.. [[[end]]]
"""
dpi = property(_get_dpi, _set_dpi, doc="Figure dots per inch (DPI)")
r"""
Get or set the dots per inch (DPI) of the figure.
:type: `PositiveRealNum <https://pexdoc.readthedocs.io/en/
stable/ptypes.html#positiverealnum>`_ or None
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for pplot.figure.Figure.dpi
:raises: (when assigned) RuntimeError (Argument \`dpi\` is not valid)
.. [[[end]]]
"""
fig = property(_get_fig, doc="Figure handle")
"""
Get the Matplotlib figure handle.
Useful if annotations or further customizations to the figure are needed.
:code:`None` is returned if figure is not fully specified
:type: Matplotlib figure handle or None
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for pplot.figure.Figure.fig
:raises: (when retrieved)
* RuntimeError (Number of tick locations and number of tick labels
mismatch)
* ValueError (Figure cannot be plotted with a logarithmic independent
axis because panel *[panel_num]*, series *[series_num]* contains
negative independent data points)
.. [[[end]]]
"""
fig_height = property(
_get_fig_height, _set_fig_height, doc="height of the hard copy plot"
)
r"""
Get or set the height (in inches) of the hard copy plot.
:code:`None` is returned if figure is not fully specified.
:type: `PositiveRealNum <https://pexdoc.readthedocs.io/en/
stable/ptypes.html#positiverealnum>`_ or None
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.fig_height
:raises: (when assigned)
* RuntimeError (Argument \`fig_height\` is not valid)
* RuntimeError (Figure size is too small: minimum width *[min_width]*,
minimum height *[min_height]*)
* RuntimeError (Number of tick locations and number of tick labels
mismatch)
* ValueError (Figure cannot be plotted with a logarithmic independent
axis because panel *[panel_num]*, series *[series_num]* contains
negative independent data points)
.. [[[end]]]
"""
fig_width = property(
_get_fig_width, _set_fig_width, doc="Width of the hard copy plot"
)
r"""
Get or set the width (in inches) of the hard copy plot.
:code:`None` is returned if figure is not fully specified.
:type: `PositiveRealNum <https://pexdoc.readthedocs.io/en/
stable/ptypes.html#positiverealnum>`_ or None
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.fig_width
:raises: (when assigned)
* RuntimeError (Argument \`fig_width\` is not valid)
* RuntimeError (Figure size is too small: minimum width *[min_width]*,
minimum height *[min_height]*)
* RuntimeError (Number of tick locations and number of tick labels
mismatch)
* ValueError (Figure cannot be plotted with a logarithmic independent
axis because panel *[panel_num]*, series *[series_num]* contains
negative independent data points)
.. [[[end]]]
"""
indep_axis_scale = property(_get_indep_axis_scale, doc="Independent axis scale")
"""
Get the scale of the figure independent axis.
:code:`None` is returned if figure is not fully specified.
:type: float or None if figure has no panels associated with it
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.indep_axis_scale
:raises: (when retrieved)
* RuntimeError (Number of tick locations and number of tick labels
mismatch)
* ValueError (Figure cannot be plotted with a logarithmic independent
axis because panel *[panel_num]*, series *[series_num]* contains
negative independent data points)
.. [[[end]]]
"""
indep_axis_ticks = property(
_get_indep_axis_ticks,
_set_indep_axis_ticks,
doc="Independent axis tick locations",
)
r"""
Get or set the independent axis (scaled) tick locations.
:type: list
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.indep_axis_ticks
:raises:
* When assigned
* RuntimeError (Argument \`indep_axis_ticks\` is not valid)
* When retrieved
* RuntimeError (Number of tick locations and number of tick labels
mismatch)
* ValueError (Figure cannot be plotted with a logarithmic
independent axis because panel *[panel_num]*, series *[series_num]*
contains negative independent data points)
.. [[[end]]]
"""
indep_axis_tick_labels = property(
_get_indep_axis_tick_labels,
_set_indep_axis_tick_labels,
doc="Independent axis tick labels",
)
r"""
Get or set the independent axis tick labels.
Labels are ignored for figures with a logarithmic independent axis
:type: list of strings
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.indep_axis_tick_labels
:raises:
* When assigned
* RuntimeError (Argument \`indep_axis_tick_labels\` is not valid)
* RuntimeError (Number of tick locations and number of tick labels
mismatch)
* ValueError (Figure cannot be plotted with a logarithmic
independent axis because panel *[panel_num]*, series *[series_num]*
contains negative independent data points)
* When retrieved
* RuntimeError (Number of tick locations and number of tick labels
mismatch)
* ValueError (Figure cannot be plotted with a logarithmic
independent axis because panel *[panel_num]*, series *[series_num]*
contains negative independent data points)
.. [[[end]]]
"""
indep_var_label = property(
_get_indep_var_label, _set_indep_var_label, doc="Figure independent axis label"
)
r"""
Get or set the figure independent variable label
:type: string or None
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.indep_var_label
:raises: (when assigned) RuntimeError (Argument \`indep_var_label\` is
not valid)
.. [[[end]]]
"""
indep_var_units = property(
_get_indep_var_units, _set_indep_var_units, doc="Figure independent axis units"
)
r"""
Get or set the figure independent variable units.
:type: string or None
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.indep_var_units
:raises: (when assigned) RuntimeError (Argument \`indep_var_units\` is
not valid)
.. [[[end]]]
"""
log_indep_axis = property(
_get_log_indep_axis, _set_log_indep_axis, doc="Figure log_indep_axis"
)
r"""
Get or set the figure logarithmic independent axis flag.
This flag indicates whether the independent axis is linear (False) or
logarithmic (True)
:type: boolean
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.log_indep_axis
:raises: (when assigned) RuntimeError (Argument \`log_indep_axis\` is
not valid)
.. [[[end]]]
"""
panels = property(_get_panels, _set_panels, doc="Figure panel(s)")
r"""
Get or set the figure panel(s).
:code:`None` is returned if no panels have been specified
:type: :py:class:`pplot.Panel`, list of
:py:class:`pplot.panel` or None
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.panels
:raises: (when assigned)
* RuntimeError (Argument \`panels\` is not valid)
* TypeError (Panel *[panel_num]* is not fully specified)
.. [[[end]]]
"""
title = property(_get_title, _set_title, doc="Figure title")
r"""
Get or set the figure title.
:type: string or None
.. [[[cog cog.out(exobj_plot.get_sphinx_autodoc()) ]]]
.. Auto-generated exceptions documentation for
.. pplot.figure.Figure.title
:raises: (when assigned) RuntimeError (Argument \`title\` is not
valid)
.. [[[end]]]
"""
|
[
"[email protected]"
] | |
6a5f99fc2d8fd1c5ad7da2f097eecb0cf51bf7cf
|
0ba2c3776618b5b8b76f4a23f21e9c6ad3f6e2e1
|
/afterclass/homework1/007_1.py
|
98e2ac33076dbf3ab283e7a973e4e7a0a135d6f8
|
[] |
no_license
|
WangDongDong1234/python_code
|
6dc5ce8210b1dcad7d57320c9e1946fd4b3fe302
|
6a785306a92d328a0d1427446ca773a9803d4cc0
|
refs/heads/master
| 2020-04-15T12:35:03.427589 | 2019-09-16T15:38:25 | 2019-09-16T15:38:25 | 164,681,323 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,826 |
py
|
#list记录以i为分段点的最长增长子序列的个数
#返回最大分段点的坐标
def Max(list,n):
max=0
index=0;
for i in range(0,n):
if list[i]>max:
max=list[i]
index=i
return index;
def LIS(array,len,list,list_increase):
# list记录以i为分段点的最长增长子序列的个数
for i in range(0,len):
list.append(1)
list_increase[i].append(array[i])
for j in range(0,i):
if (array[i]>array[j])and(list[j]+1>list[i]):
list[i]=list[j]+1
for item in list_increase[j]:
if item not in list_increase[i]:
list_increase[i].append(item)
location=Max(list,len)
return location
arr=input()
arr_tmp=arr.strip(" ").split(" ")
#起初输入的数组
array_0=[]
array=[]
for item in arr_tmp:
array.append(int(item))
array_0.append(int(item))
list1=[]
list_increase=[]
for i in range(0,len(array_0)):
tmp_list=[]
list_increase.append(tmp_list)
index=LIS(array,len(array),list1,list_increase)
#print(list1)
#print(list_increase)
array.reverse()
list_reduce=[]
list2=[]
for i in range(0,len(array_0)):
tmp_list = []
list_reduce.append(tmp_list)
index2=LIS(array,len(array),list2,list_reduce)
list2.reverse()
list_reduce.reverse()
#print(list2)
#print(list_reduce)
sum=0
index=0
for i in range(0, len(list1)):
if sum<(list1[i]+list2[i]):
sum=list1[i]+list2[i]
index=i
list_increase[index].sort()
list_reduce[index].sort(reverse=True)
#print(list_increase[index])
#print(list_reduce[index])
print_list=[]
for item in list_increase[index]:
print_list.append(item)
for i in range(1,len(list_reduce[index])):
print_list.append(list_reduce[index][i])
for item in print_list:
print(item,end=" ")
|
[
"[email protected]"
] | |
35b823ee571526aabe931d1cf528fedc446c7de5
|
55e9f3b00fc2e488597bab5225ed321c86efbd4b
|
/sdk/test/test_frequency_response.py
|
8e97d49e14aa132ed3efe4ee80569106b6d29d8d
|
[
"MIT"
] |
permissive
|
bs-yapily/yapily-sdk-python
|
ad9d04c28f3d744830734c3444c1cef8215206fd
|
0bba45e351b674eb655425a51190f539c4e9896f
|
refs/heads/master
| 2020-08-26T17:18:53.156429 | 2019-10-22T11:01:16 | 2019-10-22T11:01:16 | 217,085,936 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 965 |
py
|
# coding: utf-8
"""
Yapily API
To access endpoints that require authentication, use your application key and secret created in the Dashboard (https://dashboard.yapily.com) # noqa: E501
OpenAPI spec version: 0.0.155
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import yapily
from yapily.models.frequency_response import FrequencyResponse # noqa: E501
from yapily.rest import ApiException
class TestFrequencyResponse(unittest.TestCase):
"""FrequencyResponse unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testFrequencyResponse(self):
"""Test FrequencyResponse"""
# FIXME: construct object with mandatory attributes with example values
# model = yapily.models.frequency_response.FrequencyResponse() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
2eb15e7a7809dccc58b91240a1a0bdbde8f2ea7a
|
162e0e4791188bd44f6ce5225ff3b1f0b1aa0b0d
|
/examples/linear_model/plot_logistic_l1_l2_sparsity.py
|
afccba025af1f2bb50d6e3b57e30535232120bfa
|
[] |
no_license
|
testsleeekGithub/trex
|
2af21fa95f9372f153dbe91941a93937480f4e2f
|
9d27a9b44d814ede3996a37365d63814214260ae
|
refs/heads/master
| 2020-08-01T11:47:43.926750 | 2019-11-06T06:47:19 | 2019-11-06T06:47:19 | 210,987,245 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,328 |
py
|
"""
==============================================
L1 Penalty and Sparsity in Logistic Regression
==============================================
Comparison of the sparsity (percentage of zero coefficients) of solutions when
L1, L2 and Elastic-Net penalty are used for different values of C. We can see
that large values of C give more freedom to the model. Conversely, smaller
values of C constrain the model more. In the L1 penalty case, this leads to
sparser solutions. As expected, the Elastic-Net penalty sparsity is between
that of L1 and L2.
We classify 8x8 images of digits into two classes: 0-4 against 5-9.
The visualization shows coefficients of the models for varying C.
"""
print(__doc__)
# Authors: Alexandre Gramfort <[email protected]>
# Mathieu Blondel <[email protected]>
# Andreas Mueller <[email protected]>
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from mrex.linear_model import LogisticRegression
from mrex import datasets
from mrex.preprocessing import StandardScaler
X, y = datasets.load_digits(return_X_y=True)
X = StandardScaler().fit_transform(X)
# classify small against large digits
y = (y > 4).astype(np.int)
l1_ratio = 0.5 # L1 weight in the Elastic-Net regularization
fig, axes = plt.subplots(3, 3)
# Set regularization parameter
for i, (C, axes_row) in enumerate(zip((1, 0.1, 0.01), axes)):
# turn down tolerance for short training time
clf_l1_LR = LogisticRegression(C=C, penalty='l1', tol=0.01, solver='saga')
clf_l2_LR = LogisticRegression(C=C, penalty='l2', tol=0.01, solver='saga')
clf_en_LR = LogisticRegression(C=C, penalty='elasticnet', solver='saga',
l1_ratio=l1_ratio, tol=0.01)
clf_l1_LR.fit(X, y)
clf_l2_LR.fit(X, y)
clf_en_LR.fit(X, y)
coef_l1_LR = clf_l1_LR.coef_.ravel()
coef_l2_LR = clf_l2_LR.coef_.ravel()
coef_en_LR = clf_en_LR.coef_.ravel()
# coef_l1_LR contains zeros due to the
# L1 sparsity inducing norm
sparsity_l1_LR = np.mean(coef_l1_LR == 0) * 100
sparsity_l2_LR = np.mean(coef_l2_LR == 0) * 100
sparsity_en_LR = np.mean(coef_en_LR == 0) * 100
print("C=%.2f" % C)
print("{:<40} {:.2f}%".format("Sparsity with L1 penalty:", sparsity_l1_LR))
print("{:<40} {:.2f}%".format("Sparsity with Elastic-Net penalty:",
sparsity_en_LR))
print("{:<40} {:.2f}%".format("Sparsity with L2 penalty:", sparsity_l2_LR))
print("{:<40} {:.2f}".format("Score with L1 penalty:",
clf_l1_LR.score(X, y)))
print("{:<40} {:.2f}".format("Score with Elastic-Net penalty:",
clf_en_LR.score(X, y)))
print("{:<40} {:.2f}".format("Score with L2 penalty:",
clf_l2_LR.score(X, y)))
if i == 0:
axes_row[0].set_title("L1 penalty")
axes_row[1].set_title("Elastic-Net\nl1_ratio = %s" % l1_ratio)
axes_row[2].set_title("L2 penalty")
for ax, coefs in zip(axes_row, [coef_l1_LR, coef_en_LR, coef_l2_LR]):
ax.imshow(np.abs(coefs.reshape(8, 8)), interpolation='nearest',
cmap='binary', vmax=1, vmin=0)
ax.set_xticks(())
ax.set_yticks(())
axes_row[0].set_ylabel('C = %s' % C)
plt.show()
|
[
"[email protected]"
] | |
71e8829afac3e0a0c65027c407736ec43eeb6262
|
0cba5529e387ba0f077b4e8ddeb96f914004f5df
|
/malaya/emotion.py
|
dcd419468d7b3fce6dc88b499f1cc790ea1925c7
|
[
"MIT"
] |
permissive
|
AsyrafAzlan/Malaya
|
dc78398ee6880578f40c5646a48882a5913217ae
|
3d5166173cf74881f7a56fffaaf391813c55d4f1
|
refs/heads/master
| 2021-05-21T22:47:41.863857 | 2020-04-03T15:00:21 | 2020-04-03T15:00:21 | 252,841,526 | 1 | 0 |
MIT
| 2020-04-03T21:04:44 | 2020-04-03T21:04:44 | null |
UTF-8
|
Python
| false | false | 1,861 |
py
|
from malaya.supervised import softmax
from malaya.path import PATH_EMOTION, S3_PATH_EMOTION
from herpetologist import check_type
_emotion_label = ['anger', 'fear', 'joy', 'love', 'sadness', 'surprise']
_availability = [
'bert',
'tiny-bert',
'albert',
'tiny-albert',
'xlnet',
'alxlnet',
]
def available_transformer_model():
"""
List available transformer emotion analysis models.
"""
return _availability
def multinomial(**kwargs):
"""
Load multinomial emotion model.
Returns
-------
BAYES : malaya._models._sklearn_model.BAYES class
"""
return softmax.multinomial(
PATH_EMOTION, S3_PATH_EMOTION, 'emotion', _emotion_label, **kwargs
)
@check_type
def transformer(model: str = 'xlnet', **kwargs):
"""
Load Transformer emotion model.
Parameters
----------
model : str, optional (default='bert')
Model architecture supported. Allowed values:
* ``'bert'`` - BERT architecture from google.
* ``'tiny-bert'`` - BERT architecture from google with smaller parameters.
* ``'albert'`` - ALBERT architecture from google.
* ``'tiny-albert'`` - ALBERT architecture from google with smaller parameters.
* ``'xlnet'`` - XLNET architecture from google.
* ``'alxlnet'`` - XLNET architecture from google + Malaya.
Returns
-------
MODEL : Transformer class
"""
model = model.lower()
size = size.lower()
if model not in _availability:
raise Exception(
'model not supported, please check supported models from malaya.emotion.available_transformer_model()'
)
return softmax.transformer(
PATH_EMOTION,
S3_PATH_EMOTION,
'emotion',
_emotion_label,
model = model,
size = size,
validate = validate,
)
|
[
"[email protected]"
] | |
f68c22a3ebcff8045d9ad3131f9b30a050725a36
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/nouns/_filthiness.py
|
d1e4e34d83d291300555681e0bf38feb72c2e796
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 319 |
py
|
#calss header
class _FILTHINESS():
def __init__(self,):
self.name = "FILTHINESS"
self.definitions = [u'the quality of being very dirty']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
[
"[email protected]"
] | |
b033e8f0b13e41e324b11e403739c993c52bbe7e
|
a4a01e251b194f6d3c6654a2947a33fec2c03e80
|
/PythonWeb/Ajax/1809/Day02/1808/AjaxDemo02/run01.py
|
35ac2bfbdbdab18d5da55f05332beae995cd1c85
|
[] |
no_license
|
demo112/1809
|
033019043e2e95ebc637b40eaf11c76bfd089626
|
e22972229e5e7831dce2aae0b53ce19a6e3bb106
|
refs/heads/master
| 2020-04-09T07:10:49.906231 | 2019-02-27T13:08:45 | 2019-02-27T13:08:45 | 160,143,869 | 0 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,861 |
py
|
from flask import Flask, render_template, request
from flask_sqlalchemy import SQLAlchemy
import json
import pymysql
pymysql.install_as_MySQLdb()
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI']="mysql://root:123456@localhost:3306/flask"
db = SQLAlchemy(app)
class Login(db.Model):
__tablename__ = "login"
id = db.Column(db.Integer,primary_key=True)
lname = db.Column(db.String(30))
lpwd = db.Column(db.String(30))
uname = db.Column(db.String(30))
def to_dict(self):
dic = {
'id':self.id,
'lname' : self.lname,
'lpwd' : self.lpwd,
'uname' : self.uname,
}
return dic
@app.route('/00-homework')
def homework():
return render_template('00-homework.html')
@app.route('/00-server')
def server00():
lname = request.args.get('lname')
login=Login.query.filter_by(lname=lname).first()
if login:
return "用户名称已经存在"
else:
return "通过"
@app.route('/01-post')
def post():
return render_template("01-post.html")
@app.route('/01-server',methods=['POST'])
def server01():
uname = request.form['uname']
uage = request.form['uage']
return "传递过来的uname的值为:%s,传递过来的uage的值为:%s" % (uname,uage)
@app.route('/02-form',methods=['GET','POST'])
def form():
if request.method == 'GET':
return render_template('02-form.html')
else:
uname = request.form['uname']
uage = request.form['uage']
return "传递过来的uname的值为:%s,传递过来的uage的值为:%s" % (uname,uage)
@app.route('/03-getlogin')
def getlogin():
return render_template('03-getlogin.html')
@app.route('/03-server')
def server03():
logins = Login.query.all()
str1 = ""
for login in logins:
str1 += str(login.id)
str1 += login.lname
str1 += login.lpwd
str1 += login.uname
return str1
@app.route('/04-json')
def json_views():
return render_template("04-json.html")
@app.route('/04-server')
def server04():
# list = ["王老六","RapWang","隔壁老顽固"]
# dic = {
# 'name':'TeacherWang',
# 'age' : 35,
# 'gender' : 'Male',
# }
# jsonStr=json.dumps(dic)
list = [
{
"name":"wangwc",
"age":35,
"gender":"Male",
},
{
'name':'RapWang',
'age':40,
'gender':'Female',
}
]
jsonStr=json.dumps(list)
return jsonStr
@app.route('/05-json-login')
def json_login():
return render_template('05-json-login.html')
@app.route('/05-server')
def server05():
#得到id为一的Login的信息
login=Login.query.filter_by(id=1).first()
jsonStr=json.dumps(login.to_dict())
return jsonStr
if __name__ == "__main__":
app.run(debug=True)
|
[
"[email protected]"
] | |
35e5326d1aad1c103b3e76b9efefdd92864a2926
|
45edff14271724c5bf27e62e96eeb635840eae22
|
/ML/ensemble_learning/util.py
|
d998161fe6c0a48ae7207841cc63d1e0147b0db8
|
[] |
no_license
|
DaiJitao/machine_learning
|
1e41208dc94836a97e57a4b0f5778f8da2bb81d4
|
49e1db9ecbfbf886a11ce416eea402d214cf2049
|
refs/heads/master
| 2021-06-25T23:52:06.066315 | 2021-02-07T16:17:50 | 2021-02-07T16:17:50 | 209,712,507 | 3 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 347 |
py
|
"""
决策树常用的工具类:指标的计算、数据的加载
"""
import numpy as np
def load_data():
'''
根据《统计学习方法》第八章8.1.3产生数据.
:return:
'''
dataset_label = np.array([[0, 1], [1, 1], [2, 1], [3, -1], [4, -1], [5, -1], [6, 1], [7, 1], [8, 1], [9, -1]])
return dataset_label
|
[
"[email protected]"
] | |
bbae3698bee755a86e113f6ff4e7d52fe4f8a1ca
|
7b12eb45c1ea76ad9c186b858b5dfebf2c5b862a
|
/.history/DEBER_20210905000023.py
|
9516b0bda58c56e4e39bbf9f8a08dc9dc32c935e
|
[
"MIT"
] |
permissive
|
Alopezm5/PROYECTO-PARTE-1
|
a1dce04009b24852c1c60e69bdf602ad3af0574b
|
bd7a8594edf08d41c6ca544cf6bac01ea4fcb684
|
refs/heads/main
| 2023-07-25T11:22:17.994770 | 2021-09-07T03:27:34 | 2021-09-07T03:27:34 | 403,670,226 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,447 |
py
|
import os
class Empresa():
def __init__(self,nom="",ruc=0,dire="",tele=0,ciud="",tipEmpr=""):
self.nombre=nom
self.ruc=ruc
self.direccion=dire
self.telefono=tele
self.ciudad=ciud
self.tipoEmpresa=tipEmpr
def datosEmpresa(self):#3
self.nombre=input("Ingresar nombre de la empresa: ")
self.ruc=int(input("Ingresar ruc de la empresa: "))
self.direccion=input("Ingresar la direccion de la empresa: ")
self.telefono=int(input("Ingresar el numero de telefono de la empresa: "))
self.ciudad=input("Ingresar ciudad donde esta la empresa: ")
self.tipoEmpresa=input("Ingresar tipo de empresa publica o privada: ")
def mostrarEmpresa(self):
print("")
print("Empresa")
print("La empresa de nombre {}\n De RUC #{} \n Está ubicada en {}\n Se puede comunicar al #{}\n Está empresa esta en la ciudad de {}\n Es una entidad {}".format(self.nombre,self.ruc,self.direccion, self.telefono,self.ciudad, self.tipoEmpresa))
class Empleado(Empresa):
def __init__(self,nom="",cedu=0,dire="",tele=0,email="",estado="",profe=""):
self.nombre=nom
self.cedula=cedu
self.direccion=dire
self.telefono=tele
self.correo=email
self.estadocivil=estado
self.profesion=profe
def empleado(self):
self.nombre=input("Ingresar nombre del empleado: ")
self.cedula=int(input("Ingresar numero de cedula del empleado: "))
self.direccion=input("Ingresar la direccion del empleado: ")
self.telefono=int(input("Ingresar numero de contacto del empleado: "))
self.correo=input("Ingresar correo personal del empleado: ")
def empleadoObrero(self):
self.estadocivil=input("Ingresar estado civil del empleado: ")
def empleadoOficina(self):
self.profesion=input("Ingresar profesion del empleado: ")
def mostrarempleado(self):
print("El empleado: {} con # de C.I. {} \n Con direccion {}, y numero de contacto{}\n Y correo {}".format(self.nombre,self.cedula,self.direccion,self.telefono,self.correo))
class Departamento(Empleado):
def __init__(self,dep=""):
self.departamento=dep
def departa(self):
self.departamento=input("Ingresar el departamento al que pertenece el empleado: ")
def mostrarDeparta(self):
print("El empleado pertenece al departamento de: {}".format(self.departamento))
class Pagos(Empleado):
def __init__(self, desper=0,valhora=0,hotraba=0,extra=0,suel=0,hrecar=0,hextra=0,pres=0,mcou=0,valho=0,sobtiem=0,comofi=0,antobre=0,iemple=0,cuopres=0,tot=0,liquid=0,cuota=0,anti=0,comi=0,fNomina="",fIngreso="",iess=0):
self.permisos=desper
self.valorhora=valhora
self.horastrabajadas=hotraba
self.valextra=extra
self.sueldo= suel
self.horasRecargo= hrecar
self.horasExtraordinarias=hextra
self.prestamo= pres
self.mesCuota= mcou
self.valor_hora= valho
self.sobretiempo=sobtiem
self.comEmpOficina = comofi
self.antiEmpObrero = antobre
self.iessEmpleado = iemple
self.cuotaPrestamo=cuopres
self.totdes = tot
self.liquidoRecibir = liquid
self.mesCuota=cuota
self.antiguedad=anti
self.comision=comi
self.fechaNomina=fNomina
self.fechaIngreso=fIngreso
self.iess=iess
def pagoNormal(self):
self.sueldo=float(input("Ingresar sueldo del trabajador: $ "))
self.prestamo=float(input("Ingresar monto del prestamo que ha generado el empleado: $ "))
self.mesCuota=int(input("Ingresar meses a diferir el prestamo: "))
self.comision=float(input("Ingresar valor de la comsion: "))
self.antiguedad=int(input("Ingresar antiguedad: "))
self.iess=float(input("Ingresar valor del iees recordar que debe ser porcentuado Ejemplo si quiere decir 20% debe ingresar 0.20"))
def pagoExtra(self):
self.horasRecargo=int(input("Ingresar horas de recargo: "))
self.horasExtraordinarias=int(input("Ingresar horas extraordinarias: "))
self.fechaNomina=float(input("Ingresar fecha de nomida (formato año-mes-dia): "))
self.fechaIngreso=float(input("Ingresar fecha de ingreso (formato año-mes-dia): "))
def calculoSueldo(self):
self.valor_hora=self.sueldo/240
self.sobretiempo= self.valor_hora * (self.horasRecargo*0.50+self.horasExtraordinarias*2)
self.comEmpOficina = self.comision*self.sueldo
self.antiEmpObrero = self.antiguedad*(self.fechaNomina - self.fechaIngreso)/365*self.sueldo
self.iessEmpleado = self.iess*(self.sueldo+self.sobretiempo)
self.cuotaPrestamo=self.prestamo/self.mesCuota
self.toting = self.sueldo+self.sobretiempo+ self.comEmpOficina + self.antiEmpObrero
self.totdes = self.iessEmpleado + self.prestamo
self.liquidoRecibir = self.toting - self.totdes
def mostrarSueldo(self):
print("El empleado tiene un sueldo de ${}")
emp=Empresa()
emp.datosEmpresa()
os.system ("cls")
emple=Empleado()
emple.empleado()
os.system ("cls")
emple.empleadoObrero()
emple.empleadoOficina()
os.system ("cls")
depa=Departamento()
depa.departa()
pag=Pagos()
pag.pagoNormal()
pag.pagoExtra()
pag.calculoSueldo()
os.system ("cls")
emp.mostrarEmpresa()
print("")
emple.mostrarempleado()
print("")
pag.mostrarSueldo()
|
[
"[email protected]"
] | |
c2154d3a5fe4c8670860e1c2b5ea7301a892ea20
|
780b6cca690a213ac908b1cd5faef5366a18dc4e
|
/314_print_names_to_columns/save1_nopass.py
|
8cb6c53bb39aa700c4f9bc48b51e4735762b74ba
|
[] |
no_license
|
katkaypettitt/pybites-all
|
899180a588e460b343c00529c6a742527e4ea1bc
|
391c07ecac0d92d5dc7c537bcf92eb6c1fdda896
|
refs/heads/main
| 2023-08-22T16:33:11.171732 | 2021-10-24T17:29:44 | 2021-10-24T17:29:44 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 330 |
py
|
from typing import List # not needed when we upgrade to 3.9
def print_names_to_columns(names: List[str], cols: int = 2) -> None:
name_list = [f'| {name:{9}}' for name in names]
output = ''
for i in range(0, len(name_list), cols):
output += ' '.join(name_list[i: i + cols]) + '\n'
print(output)
|
[
"[email protected]"
] | |
d3e323c429533162b102744f30b393fd5c2f8081
|
0951cb62572e75a8e8a7ef1f98092110bb73d20a
|
/pandas/tests/categorical/test_operators.py
|
09a0607b67a88f0f3b238c65434191cfa6e3562f
|
[
"BSD-3-Clause"
] |
permissive
|
ActiveState/pandas
|
452de0fe049412f273caf6ebc86b8d0ffa0c68e6
|
106a04f14e0c090f95784c311f3d07c35e6ef276
|
refs/heads/master
| 2023-08-30T09:05:13.587536 | 2018-01-04T15:25:01 | 2018-01-04T15:25:01 | 112,227,117 | 1 | 4 |
BSD-3-Clause
| 2023-07-28T17:52:11 | 2017-11-27T17:32:22 |
Python
|
UTF-8
|
Python
| false | false | 11,023 |
py
|
# -*- coding: utf-8 -*-
import pytest
import pandas as pd
import numpy as np
import pandas.util.testing as tm
from pandas import Categorical, Series, DataFrame, date_range
from pandas.tests.categorical.common import TestCategorical
class TestCategoricalOpsWithFactor(TestCategorical):
def test_categories_none_comparisons(self):
factor = Categorical(['a', 'b', 'b', 'a',
'a', 'c', 'c', 'c'], ordered=True)
tm.assert_categorical_equal(factor, self.factor)
def test_comparisons(self):
result = self.factor[self.factor == 'a']
expected = self.factor[np.asarray(self.factor) == 'a']
tm.assert_categorical_equal(result, expected)
result = self.factor[self.factor != 'a']
expected = self.factor[np.asarray(self.factor) != 'a']
tm.assert_categorical_equal(result, expected)
result = self.factor[self.factor < 'c']
expected = self.factor[np.asarray(self.factor) < 'c']
tm.assert_categorical_equal(result, expected)
result = self.factor[self.factor > 'a']
expected = self.factor[np.asarray(self.factor) > 'a']
tm.assert_categorical_equal(result, expected)
result = self.factor[self.factor >= 'b']
expected = self.factor[np.asarray(self.factor) >= 'b']
tm.assert_categorical_equal(result, expected)
result = self.factor[self.factor <= 'b']
expected = self.factor[np.asarray(self.factor) <= 'b']
tm.assert_categorical_equal(result, expected)
n = len(self.factor)
other = self.factor[np.random.permutation(n)]
result = self.factor == other
expected = np.asarray(self.factor) == np.asarray(other)
tm.assert_numpy_array_equal(result, expected)
result = self.factor == 'd'
expected = np.repeat(False, len(self.factor))
tm.assert_numpy_array_equal(result, expected)
# comparisons with categoricals
cat_rev = Categorical(
["a", "b", "c"], categories=["c", "b", "a"], ordered=True)
cat_rev_base = Categorical(
["b", "b", "b"], categories=["c", "b", "a"], ordered=True)
cat = Categorical(["a", "b", "c"], ordered=True)
cat_base = Categorical(
["b", "b", "b"], categories=cat.categories, ordered=True)
# comparisons need to take categories ordering into account
res_rev = cat_rev > cat_rev_base
exp_rev = np.array([True, False, False])
tm.assert_numpy_array_equal(res_rev, exp_rev)
res_rev = cat_rev < cat_rev_base
exp_rev = np.array([False, False, True])
tm.assert_numpy_array_equal(res_rev, exp_rev)
res = cat > cat_base
exp = np.array([False, False, True])
tm.assert_numpy_array_equal(res, exp)
# Only categories with same categories can be compared
def f():
cat > cat_rev
pytest.raises(TypeError, f)
cat_rev_base2 = Categorical(
["b", "b", "b"], categories=["c", "b", "a", "d"])
def f():
cat_rev > cat_rev_base2
pytest.raises(TypeError, f)
# Only categories with same ordering information can be compared
cat_unorderd = cat.set_ordered(False)
assert not (cat > cat).any()
def f():
cat > cat_unorderd
pytest.raises(TypeError, f)
# comparison (in both directions) with Series will raise
s = Series(["b", "b", "b"])
pytest.raises(TypeError, lambda: cat > s)
pytest.raises(TypeError, lambda: cat_rev > s)
pytest.raises(TypeError, lambda: s < cat)
pytest.raises(TypeError, lambda: s < cat_rev)
# comparison with numpy.array will raise in both direction, but only on
# newer numpy versions
a = np.array(["b", "b", "b"])
pytest.raises(TypeError, lambda: cat > a)
pytest.raises(TypeError, lambda: cat_rev > a)
# Make sure that unequal comparison take the categories order in
# account
cat_rev = Categorical(
list("abc"), categories=list("cba"), ordered=True)
exp = np.array([True, False, False])
res = cat_rev > "b"
tm.assert_numpy_array_equal(res, exp)
class TestCategoricalOps(object):
def test_datetime_categorical_comparison(self):
dt_cat = Categorical(date_range('2014-01-01', periods=3), ordered=True)
tm.assert_numpy_array_equal(dt_cat > dt_cat[0],
np.array([False, True, True]))
tm.assert_numpy_array_equal(dt_cat[0] < dt_cat,
np.array([False, True, True]))
def test_reflected_comparison_with_scalars(self):
# GH8658
cat = Categorical([1, 2, 3], ordered=True)
tm.assert_numpy_array_equal(cat > cat[0],
np.array([False, True, True]))
tm.assert_numpy_array_equal(cat[0] < cat,
np.array([False, True, True]))
def test_comparison_with_unknown_scalars(self):
# https://github.com/pandas-dev/pandas/issues/9836#issuecomment-92123057
# and following comparisons with scalars not in categories should raise
# for unequal comps, but not for equal/not equal
cat = Categorical([1, 2, 3], ordered=True)
pytest.raises(TypeError, lambda: cat < 4)
pytest.raises(TypeError, lambda: cat > 4)
pytest.raises(TypeError, lambda: 4 < cat)
pytest.raises(TypeError, lambda: 4 > cat)
tm.assert_numpy_array_equal(cat == 4,
np.array([False, False, False]))
tm.assert_numpy_array_equal(cat != 4,
np.array([True, True, True]))
@pytest.mark.parametrize('data,reverse,base', [
(list("abc"), list("cba"), list("bbb")),
([1, 2, 3], [3, 2, 1], [2, 2, 2])]
)
def test_comparisons(self, data, reverse, base):
cat_rev = Series(
Categorical(data, categories=reverse, ordered=True))
cat_rev_base = Series(
Categorical(base, categories=reverse, ordered=True))
cat = Series(Categorical(data, ordered=True))
cat_base = Series(
Categorical(base, categories=cat.cat.categories, ordered=True))
s = Series(base)
a = np.array(base)
# comparisons need to take categories ordering into account
res_rev = cat_rev > cat_rev_base
exp_rev = Series([True, False, False])
tm.assert_series_equal(res_rev, exp_rev)
res_rev = cat_rev < cat_rev_base
exp_rev = Series([False, False, True])
tm.assert_series_equal(res_rev, exp_rev)
res = cat > cat_base
exp = Series([False, False, True])
tm.assert_series_equal(res, exp)
scalar = base[1]
res = cat > scalar
exp = Series([False, False, True])
exp2 = cat.values > scalar
tm.assert_series_equal(res, exp)
tm.assert_numpy_array_equal(res.values, exp2)
res_rev = cat_rev > scalar
exp_rev = Series([True, False, False])
exp_rev2 = cat_rev.values > scalar
tm.assert_series_equal(res_rev, exp_rev)
tm.assert_numpy_array_equal(res_rev.values, exp_rev2)
# Only categories with same categories can be compared
def f():
cat > cat_rev
pytest.raises(TypeError, f)
# categorical cannot be compared to Series or numpy array, and also
# not the other way around
pytest.raises(TypeError, lambda: cat > s)
pytest.raises(TypeError, lambda: cat_rev > s)
pytest.raises(TypeError, lambda: cat > a)
pytest.raises(TypeError, lambda: cat_rev > a)
pytest.raises(TypeError, lambda: s < cat)
pytest.raises(TypeError, lambda: s < cat_rev)
pytest.raises(TypeError, lambda: a < cat)
pytest.raises(TypeError, lambda: a < cat_rev)
@pytest.mark.parametrize('ctor', [
lambda *args, **kwargs: Categorical(*args, **kwargs),
lambda *args, **kwargs: Series(Categorical(*args, **kwargs)),
])
def test_unordered_different_order_equal(self, ctor):
# https://github.com/pandas-dev/pandas/issues/16014
c1 = ctor(['a', 'b'], categories=['a', 'b'], ordered=False)
c2 = ctor(['a', 'b'], categories=['b', 'a'], ordered=False)
assert (c1 == c2).all()
c1 = ctor(['a', 'b'], categories=['a', 'b'], ordered=False)
c2 = ctor(['b', 'a'], categories=['b', 'a'], ordered=False)
assert (c1 != c2).all()
c1 = ctor(['a', 'a'], categories=['a', 'b'], ordered=False)
c2 = ctor(['b', 'b'], categories=['b', 'a'], ordered=False)
assert (c1 != c2).all()
c1 = ctor(['a', 'a'], categories=['a', 'b'], ordered=False)
c2 = ctor(['a', 'b'], categories=['b', 'a'], ordered=False)
result = c1 == c2
tm.assert_numpy_array_equal(np.array(result), np.array([True, False]))
def test_unordered_different_categories_raises(self):
c1 = Categorical(['a', 'b'], categories=['a', 'b'], ordered=False)
c2 = Categorical(['a', 'c'], categories=['c', 'a'], ordered=False)
with tm.assert_raises_regex(TypeError,
"Categoricals can only be compared"):
c1 == c2
def test_compare_different_lengths(self):
c1 = Categorical([], categories=['a', 'b'])
c2 = Categorical([], categories=['a'])
msg = "Categories are different lengths"
with tm.assert_raises_regex(TypeError, msg):
c1 == c2
def test_numeric_like_ops(self):
df = DataFrame({'value': np.random.randint(0, 10000, 100)})
labels = ["{0} - {1}".format(i, i + 499) for i in range(0, 10000, 500)]
cat_labels = Categorical(labels, labels)
df = df.sort_values(by=['value'], ascending=True)
df['value_group'] = pd.cut(df.value, range(0, 10500, 500),
right=False, labels=cat_labels)
# numeric ops should not succeed
for op in ['__add__', '__sub__', '__mul__', '__truediv__']:
pytest.raises(TypeError,
lambda: getattr(df, op)(df))
# reduction ops should not succeed (unless specifically defined, e.g.
# min/max)
s = df['value_group']
for op in ['kurt', 'skew', 'var', 'std', 'mean', 'sum', 'median']:
pytest.raises(TypeError,
lambda: getattr(s, op)(numeric_only=False))
# mad technically works because it takes always the numeric data
# numpy ops
s = Series(Categorical([1, 2, 3, 4]))
pytest.raises(TypeError, lambda: np.sum(s))
# numeric ops on a Series
for op in ['__add__', '__sub__', '__mul__', '__truediv__']:
pytest.raises(TypeError, lambda: getattr(s, op)(2))
# invalid ufunc
pytest.raises(TypeError, lambda: np.log(s))
|
[
"[email protected]"
] | |
010885dad083a7b1ec9ebb80c5c3d64b92989605
|
37930870719caede967fdf6905c032e22d086e8b
|
/scripts/imaging/chaining/slam/light_parametric__mass_light_dark__source_parametric.py
|
80e4df39df68667dc5cd365fcf51cfac21c6f9f0
|
[] |
no_license
|
Cywtim/autolens_workspace
|
cbede944c0f85ee95cd7362fee957ef77e701280
|
da40cafee8dc26e5d8b1041888fb280598e74a5e
|
refs/heads/master
| 2023-04-05T14:22:06.091992 | 2021-04-15T20:29:28 | 2021-04-15T20:29:28 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,711 |
py
|
"""
SLaM (Source, Light and Mass): Light Parametric + Mass Total + Source Parametric
================================================================================
SLaM pipelines break the analysis down into multiple pipelines which focus on modeling a specific aspect of the strong
lens, first the Source, then the (lens) Light and finally the Mass. Each of these pipelines has it own inputs which
which customize the model and analysis in that pipeline.
The models fitted in earlier pipelines determine the model used in later pipelines. For example, if the SOURCE PIPELINE
uses a parametric `EllSersic` profile for the bulge, this will be used in the subsequent MASS LIGHT DARK PIPELINE.
Using a SOURCE PARAMETRIC PIPELINE, LIGHT PIPELINE and a MASS LIGHT DARK PIPELINE this SLaM script fits `Imaging` of
a strong lens system, where in the final model:
- The lens galaxy's light is a bulge `EllSersic`.
- The lens galaxy's stellar mass distribution is a bulge tied to the light model above.
- The lens galaxy's dark matter mass distribution is modeled as a `EllNFWMCRLudlow`.
- The source galaxy's light is a parametric `EllSersic`.
This runner uses the SLaM pipelines:
`source_parametric/source_parametric__with_lens_light`
`light_parametric/with_lens_light`
`mass_total/mass_light_dark`
Check them out for a detailed description of the analysis!
"""
# %matplotlib inline
# from pyprojroot import here
# workspace_path = str(here())
# %cd $workspace_path
# print(f"Working Directory has been set to `{workspace_path}`")
import os
import sys
from os import path
import autofit as af
import autolens as al
import autolens.plot as aplt
sys.path.insert(0, os.getcwd())
import slam
"""
__Dataset__
Load the `Imaging` data, define the `Mask2D` and plot them.
"""
dataset_name = "light_sersic__mass_mlr_nfw__source_sersic"
dataset_path = path.join("dataset", "imaging", "with_lens_light", dataset_name)
imaging = al.Imaging.from_fits(
image_path=path.join(dataset_path, "image.fits"),
noise_map_path=path.join(dataset_path, "noise_map.fits"),
psf_path=path.join(dataset_path, "psf.fits"),
pixel_scales=0.1,
)
mask = al.Mask2D.circular(
shape_native=imaging.shape_native, pixel_scales=imaging.pixel_scales, radius=3.0
)
imaging = imaging.apply_mask(mask=mask)
imaging_plotter = aplt.ImagingPlotter(imaging=imaging)
imaging_plotter.subplot_imaging()
"""
__Paths__
The path the results of all chained searches are output:
"""
path_prefix = path.join("imaging", "slam", dataset_name)
"""
__Redshifts__
The redshifts of the lens and source galaxies, which are used to perform unit converions of the model and data (e.g.
from arc-seconds to kiloparsecs, masses to solar masses, etc.).
"""
redshift_lens = 0.5
redshift_source = 1.0
"""
__HYPER SETUP__
The `SetupHyper` determines which hyper-mode features are used during the model-fit.
"""
setup_hyper = al.SetupHyper(
hyper_galaxies_lens=False,
hyper_galaxies_source=False,
hyper_image_sky=None,
hyper_background_noise=None,
)
"""
__SOURCE PARAMETRIC PIPELINE (with lens light)__
The SOURCE PARAMETRIC PIPELINE (with lens light) uses three searches to initialize a robust model for the
source galaxy's light, which in this example:
- Uses a parametric `EllSersic` bulge.
- Uses an `EllIsothermal` model for the lens's total mass distribution with an `ExternalShear`.
__Settings__:
- Mass Centre: Fix the mass profile centre to (0.0, 0.0) (this assumption will be relaxed in the MASS LIGHT DARK
PIPELINE).
"""
analysis = al.AnalysisImaging(dataset=imaging)
bulge = af.Model(al.lp.EllSersic)
bulge.centre = (0.0, 0.0)
source_parametric_results = slam.source_parametric.with_lens_light(
path_prefix=path_prefix,
analysis=analysis,
setup_hyper=setup_hyper,
lens_bulge=bulge,
lens_disk=None,
mass=af.Model(al.mp.EllIsothermal),
shear=af.Model(al.mp.ExternalShear),
source_bulge=af.Model(al.lp.EllSersic),
mass_centre=(0.0, 0.0),
redshift_lens=redshift_lens,
redshift_source=redshift_source,
)
"""
__LIGHT PARAMETRIC PIPELINE__
The LIGHT PARAMETRIC PIPELINE uses one search to fit a complex lens light model to a high level of accuracy, using the
lens mass model and source light model fixed to the maximum log likelihood result of the SOURCE PARAMETRIC PIPELINE.
In this example it:
- Uses a parametric `EllSersic` bulge [Do not use the results of the SOURCE PARAMETRIC PIPELINE to initialize priors].
- Uses an `EllIsothermal` model for the lens's total mass distribution [fixed from SOURCE PARAMETRIC PIPELINE].
- Uses the `EllSersic` model representing a bulge for the source's light [fixed from SOURCE PARAMETRIC PIPELINE].
- Carries the lens redshift, source redshift and `ExternalShear` of the SOURCE PIPELINE through to the MASS
PIPELINE [fixed values].
"""
bulge = af.Model(al.lp.EllSersic)
light_results = slam.light_parametric.with_lens_light(
path_prefix=path_prefix,
analysis=analysis,
setup_hyper=setup_hyper,
source_results=source_parametric_results,
lens_bulge=bulge,
lens_disk=None,
)
"""
__MASS LIGHT DARK PIPELINE (with lens light)__
The MASS LIGHT DARK PIPELINE (with lens light) uses one search to fits a complex lens mass model to a high level of
accuracy, using the source model of the SOURCE PIPELINE and the lens light model of the LIGHT PARAMETRIC PIPELINE to
initialize the model priors . In this example it:
- Uses a parametric `EllSersic` bulge for the lens galaxy's light and its stellar mass [12 parameters: fixed from
LIGHT PARAMETRIC PIPELINE].
- The lens galaxy's dark matter mass distribution is a `EllNFWMCRLudlow` whose centre is aligned with bulge of
the light and stellar mass mdoel above [5 parameters].
- Uses the `EllSersic` model representing a bulge for the source's light [priors initialized from SOURCE
PARAMETRIC PIPELINE].
- Carries the lens redshift, source redshift and `ExternalShear` of the SOURCE PARAMETRIC PIPELINE through to the MASS
LIGHT DARK PIPELINE.
"""
analysis = al.AnalysisImaging(dataset=imaging)
lens_bulge = af.Model(al.lmp.EllSersic)
dark = af.Model(al.mp.EllNFWMCRLudlow)
dark.centre = lens_bulge.centre
mass_results = slam.mass_light_dark.with_lens_light(
path_prefix=path_prefix,
analysis=analysis,
setup_hyper=setup_hyper,
source_results=source_parametric_results,
light_results=light_results,
lens_bulge=lens_bulge,
lens_disk=None,
lens_envelope=None,
dark=dark,
)
"""
Finish.
"""
|
[
"[email protected]"
] | |
d1564abb5583ba7d937b0d846491cf7aa40a1cb2
|
00ef8e1eb57b73427508b20aadf0266da6b1f900
|
/rlf/exp_mgr/viz_utils.py
|
f323dee2afc60a42bb37336d3b28e50fe18fb7b4
|
[] |
no_license
|
amy12xx/rl-toolkit
|
f4643935cc8afd960356bfeae74c233d2596dea9
|
8254df8346752ea0226ae2064cc1eabc839567b0
|
refs/heads/master
| 2023-08-14T00:56:52.270642 | 2021-09-28T15:59:32 | 2021-09-28T15:59:32 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,503 |
py
|
"""
Utilities for manipulating images, rendering images, and rendering videos.
"""
import os
import os.path as osp
from argparse import Namespace
from typing import List, Optional, Union
import cv2
import matplotlib.pyplot as plt
import numpy as np
import rlf.rl.utils as rutils
try:
import wandb
except:
pass
def append_text_to_image(
image: np.ndarray, lines: List[str], from_bottom: bool = False
) -> np.ndarray:
"""
Args:
image: The NxMx3 frame to add the text to.
lines: The list of strings (new line separated) to add to the image.
Returns:
image: (np.array): The modified image with the text appended.
"""
h, w, c = image.shape
font_size = 0.5
font_thickness = 1
font = cv2.FONT_HERSHEY_SIMPLEX
blank_image = np.zeros(image.shape, dtype=np.uint8)
if from_bottom:
y = image.shape[0]
else:
y = 0
for line in lines:
textsize = cv2.getTextSize(line, font, font_size, font_thickness)[0]
if from_bottom:
y -= textsize[1] + 10
else:
y += textsize[1] + 10
x = 10
cv2.putText(
blank_image,
line,
(x, y),
font,
font_size,
(255, 255, 255),
font_thickness,
lineType=cv2.LINE_AA,
)
final = image + blank_image
return final
def save_agent_obs(frames, imdim, vid_dir, name):
use_dir = osp.join(vid_dir, name + "_frames")
if not osp.exists(use_dir):
os.makedirs(use_dir)
if imdim != 1:
raise ValueError("Only gray scale is supported right now")
for i in range(frames.shape[0]):
for frame_j in range(frames.shape[1]):
fname = f"{i}_{frame_j}.jpg"
frame = frames[i, frame_j].cpu().numpy()
cv2.imwrite(osp.join(use_dir, fname), frame)
print(f"Wrote observation sequence to {use_dir}")
def save_mp4(frames, vid_dir, name, fps=60.0, no_frame_drop=False, should_print=True):
frames = np.array(frames)
if len(frames[0].shape) == 4:
new_frames = frames[0]
for i in range(len(frames) - 1):
new_frames = np.concatenate([new_frames, frames[i + 1]])
frames = new_frames
if not osp.exists(vid_dir):
os.makedirs(vid_dir)
vid_file = osp.join(vid_dir, name + ".mp4")
if osp.exists(vid_file):
os.remove(vid_file)
w, h = frames[0].shape[:-1]
videodims = (h, w)
fourcc = cv2.VideoWriter_fourcc("m", "p", "4", "v")
video = cv2.VideoWriter(vid_file, fourcc, fps, videodims)
for frame in frames:
frame = frame[..., 0:3][..., ::-1]
video.write(frame)
video.release()
if should_print:
print(f"Rendered to {vid_file}")
def plot_traj_data(
pred: np.ndarray,
real: np.ndarray,
save_name: str,
log_name: str,
save_path_info: Union[Namespace, str],
step: int,
y_axis_name: str = "State %i",
no_wb: Optional[bool] = None,
title: str = "",
ylim=None,
):
"""
Plots each state dimension of a trajectory comparing a predicted and real trajectory.
:param pred: Shape [H, D] for a trajectory of length H and state dimension D.
D plots will be created.
:param real: Shape [H, D].
:param save_name: Appended to log_name. This should likely be unique so
files on the disk are not overriden. Include file extension.
:param log_name: Has %i in the name to dynamically insert the state dimension.
Should NOT be unique so the log key is updated.
:param save_path_info: The save path will either be extracted from the args or the
path passed as a string.
:param y_axis_name: string with %i to dynamically insert state dimension.
"""
save_name = log_name + "_" + save_name
if isinstance(save_path_info, str):
save_path = osp.join(save_path_info, save_name)
else:
save_path = osp.join(rutils.get_save_dir(save_path_info), save_name)
if no_wb is None:
if not isinstance(save_path_info, Namespace) and "no_wb" not in vars(
save_path_info
):
raise ValueError(
f"Could not find property `no_wb` in the passed `save_path_info`"
)
no_wb = save_path_info.no_wb
per_state_mse = np.mean((pred - real) ** 2, axis=0)
per_state_sqrt_mse = np.sqrt(per_state_mse)
H, state_dim = real.shape
for state_i in range(state_dim):
use_save_path = save_path % state_i
plt.plot(np.arange(H), real[:, state_i], label="Real")
plt.plot(np.arange(H), pred[:, state_i], label="Pred")
plt.grid(b=True, which="major", color="lightgray", linestyle="--")
plt.xlabel("t")
plt.ylabel(y_axis_name % state_i)
if ylim is not None:
plt.ylim(ylim)
if isinstance(title, list):
use_title = title[state_i]
else:
use_title = title
if len(use_title) != 0:
use_title += "\n"
use_title += "MSE %.4f, SQRT MSE %.4f" % (
per_state_mse[state_i],
per_state_sqrt_mse[state_i],
)
plt.title(use_title)
plt.legend()
rutils.plt_save(use_save_path)
if not no_wb:
use_full_log_name = log_name % state_i
wandb.log(
{use_full_log_name: [wandb.Image(use_save_path)]},
step=step,
)
return np.mean(per_state_mse)
|
[
"[email protected]"
] | |
2aeb217b02dbe82cdc5445f4bec4aafb01b07802
|
68049b03dbbd9a3d778571794472e07c05fb00ad
|
/python/courses/jose_portilla/flask/sandbox/10_databases/10_1_flask_and_databases_practice/setupdatabase.py
|
e2f38694c1a0eb91547cf484e4e8aa594a19934b
|
[] |
no_license
|
tjkhara/notes
|
c9e96ecea6efed860c521eb7df562c5715091aea
|
5602a25ba23104e4154700108f1b8a3a0144f712
|
refs/heads/master
| 2023-01-20T07:42:47.129359 | 2020-11-24T06:43:24 | 2020-11-24T06:43:24 | 285,811,022 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 497 |
py
|
from basic import db, Puppy
# creates all the tables
# takes classes and converts them into tables
db.create_all()
sam = Puppy('Sammy', 3)
frank = Puppy('Frankie', 4)
miles = Puppy('Miles', 10)
# These will say none because they are not in the database yet
# They don't have any ids
print(sam.id)
print(frank.id)
print(miles.id)
# Add these two objects to the database
db.session.add_all([sam, frank, miles])
# commit changes
db.session.commit()
print(sam.id)
print(frank.id)
print(miles.id)
|
[
"[email protected]"
] | |
3a9884fb534bd51716b75014723d49e7b5590761
|
59c55725576bbf0e2f6617507ba2f1db639abb3f
|
/analytic_billing_plan/wizard/analytic_billing_plan_line_make_sale.py
|
921890470694b17358282339a41cfc55af455bcf
|
[] |
no_license
|
bmya/eficent-odoo-addons
|
e3426ebaf1f59e52726253fc1dd36a09d9363059
|
5d8ddfa384ab4417f42bda103b71d926848035f6
|
refs/heads/7.0
| 2021-01-21T16:48:55.312452 | 2015-11-04T14:11:19 | 2015-11-04T14:11:19 | 45,649,141 | 1 | 3 | null | 2015-11-06T00:35:17 | 2015-11-06T00:35:17 | null |
UTF-8
|
Python
| false | false | 11,703 |
py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Eficent (<http://www.eficent.com/>)
# <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime
from openerp.osv import fields, osv, orm
from openerp.tools.translate import _
class analytic_billing_plan_line_make_sale(orm.TransientModel):
_name = "analytic.billing.plan.line.make.sale"
_description = "Analytic billing plan line make sale"
def _get_order_lines(self, cr, uid, context=None):
"""
Returns the order lines associated to the analytic accounts selected.
"""
if context is None:
context = {}
record_ids = context and context.get('active_ids', False)
if record_ids:
order_line_ids = []
line_plan_obj = self.pool.get('analytic.billing.plan.line')
for line in line_plan_obj.browse(cr, uid, record_ids,
context=context):
for order_line in line.order_line_ids:
order_line_id = order_line and order_line.id
order_line_ids.extend([order_line_id])
if order_line_ids:
return order_line_ids
return False
def _get_default_shop(self, cr, uid, context=None):
company_id = self.pool.get('res.users').browse(
cr, uid, uid, context=context).company_id.id
shop_ids = self.pool.get('sale.shop').search(
cr, uid, [('company_id', '=', company_id)], context=context)
if not shop_ids:
raise osv.except_osv(_('Error!'),
_('There is no default shop '
'for the current user\'s company!'))
return shop_ids[0]
_columns = {
'order_line_ids': fields.many2many('sale.order.line',
'make_sale_order_line_rel',
'order_line_id',
'make_sale_order_id'),
'shop_id': fields.many2one('sale.shop', 'Shop', required=True),
'invoice_quantity': fields.selection([('order',
'Ordered Quantities')],
'Invoice on',
help="The sales order will "
"automatically create the "
"invoice proposition "
"(draft invoice).",
required=True),
'order_policy': fields.selection([('manual', 'On Demand')],
'Create Invoice',
help="""This field controls how
invoice and delivery
operations are synchronized.""",
required=True),
}
_defaults = {
'order_line_ids': _get_order_lines,
'shop_id': _get_default_shop,
'order_policy': 'manual',
'invoice_quantity': 'order',
}
def make_sales_orders(self, cr, uid, ids, context=None):
"""
To make sales.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: the ID or list of IDs
@param context: A standard dictionary
@return: A dictionary which of fields with values.
"""
if context is None:
context = {}
record_ids = context and context.get('active_ids', False)
make_order = self.browse(cr, uid, ids[0], context=context)
res = []
if record_ids:
billing_plan_obj = self.pool.get('analytic.billing.plan.line')
order_obj = self.pool.get('sale.order')
order_line_obj = self.pool.get('sale.order.line')
partner_obj = self.pool.get('res.partner')
acc_pos_obj = self.pool.get('account.fiscal.position')
list_line = []
customer_data = False
company_id = False
sale_id = False
account_id = False
for line in billing_plan_obj.browse(cr, uid, record_ids,
context=context):
uom_id = line.product_uom_id
if not line.customer_id:
raise osv.except_osv(
_('Could not create sale order !'),
_('You have to enter a customer.'))
if customer_data is not False \
and line.customer_id != customer_data:
raise osv.except_osv(
_('Could not create sale order !'),
_('You have to select lines '
'from the same customer.'))
else:
customer_data = line.customer_id
partner_addr = partner_obj.address_get(
cr, uid, [customer_data.id], ['default',
'invoice',
'delivery',
'contact'])
newdate = datetime.today()
partner = customer_data
pricelist_id = partner.property_product_pricelist \
and partner.property_product_pricelist.id \
or False
price_unit = line.price_unit
line_company_id = line.company_id \
and line.company_id.id \
or False
if company_id is not False \
and line_company_id != company_id:
raise osv.except_osv(
_('Could not create sale order !'),
_('You have to select lines '
'from the same company.'))
else:
company_id = line_company_id
shop_id = make_order.shop_id \
and make_order.shop_id.id \
or False
line_account_id = line.account_id \
and line.account_id.id \
or False
if account_id is not False \
and line_account_id != account_id:
raise osv.except_osv(
_('Could not create billing request!'),
_('You have to select lines from the '
'same analytic account.'))
else:
account_id = line_account_id
sale_order_line = {
'name': line.name,
'product_uom_qty': line.unit_amount,
'product_id': line.product_id.id,
'product_uom': uom_id.id,
'price_unit': price_unit,
'notes': line.notes,
}
taxes = False
if line.product_id:
taxes_ids = line.product_id.product_tmpl_id.taxes_id
taxes = acc_pos_obj.map_tax(
cr, uid, partner.property_account_position,
taxes_ids)
if taxes:
sale_order_line.update({
'tax_id': [(6, 0, taxes)]
})
list_line.append(sale_order_line)
if sale_id is False:
sale_id = order_obj.create(cr, uid, {
'origin': '',
'shop_id': shop_id,
'partner_id': customer_data.id,
'pricelist_id': pricelist_id,
'partner_invoice_id': partner_addr['invoice'],
'partner_order_id': partner_addr['contact'],
'partner_shipping_id': partner_addr['delivery'],
'date_order':
newdate.strftime('%Y-%m-%d %H:%M:%S'),
'fiscal_position':
partner.property_account_position and
partner.property_account_position.id or False,
'company_id': company_id,
'payment_term':
partner.property_payment_term and
partner.property_payment_term.id or False,
'project_id': account_id,
'invoice_quantity': make_order.invoice_quantity,
'order_policy': make_order.order_policy,
}, context=context)
if line.account_id.user_id:
order_obj.message_subscribe_users(
cr, uid, [sale_id],
user_ids=[line.account_id.user_id.id])
sale_order_line.update({
'order_id': sale_id
})
order_line_id = order_line_obj.create(cr, uid,
sale_order_line,
context=context)
values = {
'order_line_ids': [(4, order_line_id)]
}
billing_plan_obj.write(cr, uid, [line.id], values,
context=context)
res.append(order_line_id)
return {
'domain': "[('id','in', ["+','.join(map(str, res))+"])]",
'name': _('Billing request lines'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'sale.order.line',
'view_id': False,
'context': False,
'type': 'ir.actions.act_window'
}
analytic_billing_plan_line_make_sale()
|
[
"[email protected]"
] | |
e4fd0b88f086e8155bee37b5546c0096f7760d3e
|
e78154abbb8bacf5afccda9da371684cbeabad36
|
/envs/ALPHA-POPEGO/lib/python2.5/site-packages/ipython-0.8.2-py2.5.egg/IPython/Release.py
|
c22250cf389d6cc8e86540e756de11ec217a66b1
|
[
"BSD-3-Clause"
] |
permissive
|
enterstudio/popego
|
1a196fabc374c0f45764e5c74bd7752236424040
|
2d09e793d9d2f297139edb325b8a70ddda9b2705
|
refs/heads/master
| 2021-04-09T16:39:40.781634 | 2016-10-14T16:53:47 | 2016-10-14T16:53:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,806 |
py
|
# -*- coding: utf-8 -*-
"""Release data for the IPython project.
$Id: Release.py 2855 2007-11-06 06:53:49Z vivainio $"""
#*****************************************************************************
# Copyright (C) 2001-2006 Fernando Perez <[email protected]>
#
# Copyright (c) 2001 Janko Hauser <[email protected]> and Nathaniel Gray
# <[email protected]>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#*****************************************************************************
# Name of the package for release purposes. This is the name which labels
# the tarballs and RPMs made by distutils, so it's best to lowercase it.
name = 'ipython'
# For versions with substrings (like 0.6.16.svn), use an extra . to separate
# the new substring. We have to avoid using either dashes or underscores,
# because bdist_rpm does not accept dashes (an RPM) convention, and
# bdist_deb does not accept underscores (a Debian convention).
revision = '2876M'
#version = '0.8.2.svn.r' + revision.rstrip('M')
version = '0.8.2'
description = "An enhanced interactive Python shell."
long_description = \
"""
IPython provides a replacement for the interactive Python interpreter with
extra functionality.
Main features:
* Comprehensive object introspection.
* Input history, persistent across sessions.
* Caching of output results during a session with automatically generated
references.
* Readline based name completion.
* Extensible system of 'magic' commands for controlling the environment and
performing many tasks related either to IPython or the operating system.
* Configuration system with easy switching between different setups (simpler
than changing $PYTHONSTARTUP environment variables every time).
* Session logging and reloading.
* Extensible syntax processing for special purpose situations.
* Access to the system shell with user-extensible alias system.
* Easily embeddable in other Python programs.
* Integrated access to the pdb debugger and the Python profiler.
The latest development version is always available at the IPython subversion
repository_.
.. _repository: http://ipython.scipy.org/svn/ipython/ipython/trunk#egg=ipython-dev
"""
license = 'BSD'
authors = {'Fernando' : ('Fernando Perez','[email protected]'),
'Janko' : ('Janko Hauser','[email protected]'),
'Nathan' : ('Nathaniel Gray','[email protected]'),
'Ville' : ('Ville Vainio','[email protected]')
}
url = 'http://ipython.scipy.org'
download_url = 'http://ipython.scipy.org/dist'
platforms = ['Linux','Mac OSX','Windows XP/2000/NT','Windows 95/98/ME']
keywords = ['Interactive','Interpreter','Shell']
|
[
"[email protected]"
] | |
31298541903089b84d357150a735501103053981
|
0a57f05221d425119cb2994c5686a95e01b33d46
|
/ex21.py
|
67a0f965521d5f8cce8027401d93c01786fc9214
|
[] |
no_license
|
luroto/lpthw
|
371ad2de422e7656b9f18461808d28847d17971f
|
e89329477d0c5c5b34d7998832b395c05385876b
|
refs/heads/master
| 2022-06-02T17:56:01.873932 | 2020-05-02T17:52:11 | 2020-05-02T17:52:11 | 260,742,781 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 668 |
py
|
def add(a,b):
print(f"ADDING {a} + {b}")
return a + b
def substract(a, b):
print(f"SUBSRACTING {a} - {b}")
return a - b
def multiply(a, b):
print(f"MULTIPLYING {a} * {b}")
return a * b
def divide(a, b):
print(f"DIVIDING {a} / {b}")
return (a / b)
print("Let's do some math with just functions")
age = add(30, 5)
height = substract(78, 4)
weight = multiply(90, 2)
iq = divide(100, 2)
print(f"Age: {age}, Height: {height}, Weight {weight}, IQ {iq}")
# A puzzle for the extra credit, type it in anyway
print("Here's a puzzle")
what = add(age, substract(height, multiply(weight, divide(iq, 2))))
print("That becomes: ", what, "Can you do it by hand?")
|
[
"[email protected]"
] | |
6a6ebe3550b44d0e3ce445ed0151ed8f95c18ec0
|
7889f7f0532db6a7f81e6f8630e399c90438b2b9
|
/2.1.2/_downloads/boxplot_demo1.py
|
aac441baa4f86269d657f3d8b96bfebf095017f7
|
[] |
no_license
|
matplotlib/matplotlib.github.com
|
ef5d23a5bf77cb5af675f1a8273d641e410b2560
|
2a60d39490941a524e5385670d488c86083a032c
|
refs/heads/main
| 2023-08-16T18:46:58.934777 | 2023-08-10T05:07:57 | 2023-08-10T05:08:30 | 1,385,150 | 25 | 59 | null | 2023-08-30T15:59:50 | 2011-02-19T03:27:35 | null |
UTF-8
|
Python
| false | false | 7,720 |
py
|
"""
========
Boxplots
========
Visualizing boxplots with matplotlib.
The following examples show off how to visualize boxplots with
Matplotlib. There are many options to control their appearance and
the statistics that they use to summarize the data.
"""
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.patches import Polygon
# Fixing random state for reproducibility
np.random.seed(19680801)
# fake up some data
spread = np.random.rand(50) * 100
center = np.ones(25) * 50
flier_high = np.random.rand(10) * 100 + 100
flier_low = np.random.rand(10) * -100
data = np.concatenate((spread, center, flier_high, flier_low), 0)
fig, axs = plt.subplots(2, 3)
# basic plot
axs[0, 0].boxplot(data)
axs[0, 0].set_title('basic plot')
# notched plot
axs[0, 1].boxplot(data, 1)
axs[0, 1].set_title('notched plot')
# change outlier point symbols
axs[0, 2].boxplot(data, 0, 'gD')
axs[0, 2].set_title('change outlier\npoint symbols')
# don't show outlier points
axs[1, 0].boxplot(data, 0, '')
axs[1, 0].set_title("don't show\noutlier points")
# horizontal boxes
axs[1, 1].boxplot(data, 0, 'rs', 0)
axs[1, 1].set_title('horizontal boxes')
# change whisker length
axs[1, 2].boxplot(data, 0, 'rs', 0, 0.75)
axs[1, 2].set_title('change whisker length')
fig.subplots_adjust(left=0.08, right=0.98, bottom=0.05, top=0.9,
hspace=0.4, wspace=0.3)
# fake up some more data
spread = np.random.rand(50) * 100
center = np.ones(25) * 40
flier_high = np.random.rand(10) * 100 + 100
flier_low = np.random.rand(10) * -100
d2 = np.concatenate((spread, center, flier_high, flier_low), 0)
data.shape = (-1, 1)
d2.shape = (-1, 1)
# Making a 2-D array only works if all the columns are the
# same length. If they are not, then use a list instead.
# This is actually more efficient because boxplot converts
# a 2-D array into a list of vectors internally anyway.
data = [data, d2, d2[::2, 0]]
# Multiple box plots on one Axes
fig, ax = plt.subplots()
ax.boxplot(data)
plt.show()
###############################################################################
# Below we'll generate data from five different probability distributions,
# each with different characteristics. We want to play with how an IID
# bootstrap resample of the data preserves the distributional
# properties of the original sample, and a boxplot is one visual tool
# to make this assessment
numDists = 5
randomDists = ['Normal(1,1)', ' Lognormal(1,1)', 'Exp(1)', 'Gumbel(6,4)',
'Triangular(2,9,11)']
N = 500
norm = np.random.normal(1, 1, N)
logn = np.random.lognormal(1, 1, N)
expo = np.random.exponential(1, N)
gumb = np.random.gumbel(6, 4, N)
tria = np.random.triangular(2, 9, 11, N)
# Generate some random indices that we'll use to resample the original data
# arrays. For code brevity, just use the same random indices for each array
bootstrapIndices = np.random.random_integers(0, N - 1, N)
normBoot = norm[bootstrapIndices]
expoBoot = expo[bootstrapIndices]
gumbBoot = gumb[bootstrapIndices]
lognBoot = logn[bootstrapIndices]
triaBoot = tria[bootstrapIndices]
data = [norm, normBoot, logn, lognBoot, expo, expoBoot, gumb, gumbBoot,
tria, triaBoot]
fig, ax1 = plt.subplots(figsize=(10, 6))
fig.canvas.set_window_title('A Boxplot Example')
fig.subplots_adjust(left=0.075, right=0.95, top=0.9, bottom=0.25)
bp = ax1.boxplot(data, notch=0, sym='+', vert=1, whis=1.5)
plt.setp(bp['boxes'], color='black')
plt.setp(bp['whiskers'], color='black')
plt.setp(bp['fliers'], color='red', marker='+')
# Add a horizontal grid to the plot, but make it very light in color
# so we can use it for reading data values but not be distracting
ax1.yaxis.grid(True, linestyle='-', which='major', color='lightgrey',
alpha=0.5)
# Hide these grid behind plot objects
ax1.set_axisbelow(True)
ax1.set_title('Comparison of IID Bootstrap Resampling Across Five Distributions')
ax1.set_xlabel('Distribution')
ax1.set_ylabel('Value')
# Now fill the boxes with desired colors
boxColors = ['darkkhaki', 'royalblue']
numBoxes = numDists*2
medians = list(range(numBoxes))
for i in range(numBoxes):
box = bp['boxes'][i]
boxX = []
boxY = []
for j in range(5):
boxX.append(box.get_xdata()[j])
boxY.append(box.get_ydata()[j])
boxCoords = list(zip(boxX, boxY))
# Alternate between Dark Khaki and Royal Blue
k = i % 2
boxPolygon = Polygon(boxCoords, facecolor=boxColors[k])
ax1.add_patch(boxPolygon)
# Now draw the median lines back over what we just filled in
med = bp['medians'][i]
medianX = []
medianY = []
for j in range(2):
medianX.append(med.get_xdata()[j])
medianY.append(med.get_ydata()[j])
ax1.plot(medianX, medianY, 'k')
medians[i] = medianY[0]
# Finally, overplot the sample averages, with horizontal alignment
# in the center of each box
ax1.plot([np.average(med.get_xdata())], [np.average(data[i])],
color='w', marker='*', markeredgecolor='k')
# Set the axes ranges and axes labels
ax1.set_xlim(0.5, numBoxes + 0.5)
top = 40
bottom = -5
ax1.set_ylim(bottom, top)
ax1.set_xticklabels(np.repeat(randomDists, 2),
rotation=45, fontsize=8)
# Due to the Y-axis scale being different across samples, it can be
# hard to compare differences in medians across the samples. Add upper
# X-axis tick labels with the sample medians to aid in comparison
# (just use two decimal places of precision)
pos = np.arange(numBoxes) + 1
upperLabels = [str(np.round(s, 2)) for s in medians]
weights = ['bold', 'semibold']
for tick, label in zip(range(numBoxes), ax1.get_xticklabels()):
k = tick % 2
ax1.text(pos[tick], top - (top*0.05), upperLabels[tick],
horizontalalignment='center', size='x-small', weight=weights[k],
color=boxColors[k])
# Finally, add a basic legend
fig.text(0.80, 0.08, str(N) + ' Random Numbers',
backgroundcolor=boxColors[0], color='black', weight='roman',
size='x-small')
fig.text(0.80, 0.045, 'IID Bootstrap Resample',
backgroundcolor=boxColors[1],
color='white', weight='roman', size='x-small')
fig.text(0.80, 0.015, '*', color='white', backgroundcolor='silver',
weight='roman', size='medium')
fig.text(0.815, 0.013, ' Average Value', color='black', weight='roman',
size='x-small')
plt.show()
###############################################################################
# Here we write a custom function to bootstrap confidence intervals.
# We can then use the boxplot along with this function to show these intervals.
def fakeBootStrapper(n):
'''
This is just a placeholder for the user's method of
bootstrapping the median and its confidence intervals.
Returns an arbitrary median and confidence intervals
packed into a tuple
'''
if n == 1:
med = 0.1
CI = (-0.25, 0.25)
else:
med = 0.2
CI = (-0.35, 0.50)
return med, CI
inc = 0.1
e1 = np.random.normal(0, 1, size=(500,))
e2 = np.random.normal(0, 1, size=(500,))
e3 = np.random.normal(0, 1 + inc, size=(500,))
e4 = np.random.normal(0, 1 + 2*inc, size=(500,))
treatments = [e1, e2, e3, e4]
med1, CI1 = fakeBootStrapper(1)
med2, CI2 = fakeBootStrapper(2)
medians = [None, None, med1, med2]
conf_intervals = [None, None, CI1, CI2]
fig, ax = plt.subplots()
pos = np.array(range(len(treatments))) + 1
bp = ax.boxplot(treatments, sym='k+', positions=pos,
notch=1, bootstrap=5000,
usermedians=medians,
conf_intervals=conf_intervals)
ax.set_xlabel('treatment')
ax.set_ylabel('response')
plt.setp(bp['whiskers'], color='k', linestyle='-')
plt.setp(bp['fliers'], markersize=3.0)
plt.show()
|
[
"[email protected]"
] | |
c8c91b8f93916d59a46e5052ed5bf42d766b5c99
|
e914da03391c81b69ae47c3dfaabb119259eb66f
|
/aon_decoder.py
|
4f5b351c967cae4c175b18aac6ed5d606fc3f548
|
[] |
no_license
|
koder-ua/python_practice
|
25f67e7c2333c0f96a2a711947e87951769570db
|
a68b8fc9c12e841b7355c745db6d104205ea568f
|
refs/heads/master
| 2021-01-22T04:22:58.642582 | 2015-12-15T14:16:40 | 2015-12-15T14:16:40 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,930 |
py
|
#!/usr/bin/env python
# -*- coding:utf8 -*-
"""
Homework for Automatic Number Identification (ANI)
https://github.com/koder-ua/python-classes/blob/master/slides/pdf/FF_tasks.pdf
Slide #7
"""
def decode(string):
"""
ANI decoder:
- combine repeated characters (2333# -> 3)
- remove single characters (1234 -> None)
- repeat last character before "##" (33## -> 33")
:param string: string
:return string: processed string
"""
# split all repeated symbols as a standalone strings
# string = ["".join(grp) for _, grp in itertools.groupby(string)]
splitted_string = []
n = 0
k = 0
while n < len(string):
while k < len(string) - 1:
if string[k] == string[k + 1]:
k += 1
else:
break
k += 1
splitted_string.append(string[n:k])
n = k
# get first character from splitted strings + remove single-length strings
string = "".join([i[0] for i in splitted_string if len(i) != 1])
result = ""
for i, v in enumerate(string):
if v == "#":
if i == 0 and len(string) > 1: # checking leading '#' in string
continue
elif i == 0:
return None
else:
result += string[i - 1]
else:
result += string[i]
return result
def test_decode():
assert decode("") == ""
assert decode("1") == ""
assert decode("11111") == "1"
assert decode("11#") == "1"
assert decode("11##") == "11"
assert decode("11122234###55") == "1225"
assert decode("##") is None
assert decode("12345##") is None
assert decode("221133444##") == "21344"
assert decode("###33###22##") == "3322"
assert decode("###33###22##1#") == "3322"
print("Passed successfully")
def main():
"main"
test_decode()
return 0
if __name__ == "__main__":
exit(main())
|
[
"[email protected]"
] | |
8cc39834a3986a41c0b6c4717eda289d67aa0f2a
|
7d3cb9e6ac0f2a0f217fb8ad77076fd4f719a437
|
/xen_signature/apps/pdf_to_image/migrations/0003_auto_20181020_1658.py
|
75cf4f4498115f51b134898cac32d0c1bc38dea3
|
[] |
no_license
|
FlashBanistan/django-xen-signature
|
b88b0698b00390e019ebb419d74043f1e36777ba
|
b390e9aa069c89021e63e41a554489ccf9d685a5
|
refs/heads/master
| 2020-04-02T06:11:24.486660 | 2018-10-26T17:17:20 | 2018-10-26T17:17:20 | 154,135,107 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 741 |
py
|
# Generated by Django 2.1.2 on 2018-10-20 16:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pdf_to_image', '0002_auto_20181020_1657'),
]
operations = [
migrations.RenameField(
model_name='documentimage',
old_name='image_height',
new_name='height',
),
migrations.RenameField(
model_name='documentimage',
old_name='image_width',
new_name='width',
),
migrations.AlterField(
model_name='documentimage',
name='image',
field=models.ImageField(height_field='height', upload_to='', width_field='width'),
),
]
|
[
"[email protected]"
] | |
29aa7eefb7323c5953972bcecbf05797b238b684
|
e42cce21fbb3c4fe3f271c2029d9659270a968ab
|
/vmrunapi/vmrunapi.py
|
cde0c05a165dbfc2cd3c7b87f6803f601bfd2453
|
[] |
no_license
|
cloudbase/maas-hacks
|
d086a91338e45121dafb33734ba4977e31851dbc
|
0e2cc5537ff64376505c1e9e77dcdf3657fc4d78
|
refs/heads/master
| 2016-09-06T13:02:15.808249 | 2014-04-30T00:24:58 | 2014-04-30T00:24:58 | 17,869,386 | 5 | 0 | null | 2014-05-06T01:23:22 | 2014-03-18T14:43:58 |
Python
|
UTF-8
|
Python
| false | false | 3,400 |
py
|
#!/usr/bin/python
import flask
import os
import re
import subprocess
import sys
if sys.platform == 'win32':
from win32com.shell import shell
from win32com.shell import shellcon
app = flask.Flask(__name__)
STARTED = "started"
STOPPED = "stopped"
def _get_matching_vmx_path(path, mac_address):
mac_address_re = re.compile(r'^ethernet(\d+)\.address(\s*)=(\s*)\"%s\"$' %
mac_address.upper())
for root, dirs, file_names in os.walk(path):
for file_name in file_names:
if os.path.splitext(file_name)[1].lower() == '.vmx':
vmx_path = os.path.join(root, file_name)
with open(vmx_path, 'rb') as f:
for l in f:
if mac_address_re.match(l):
return vmx_path
def _get_vmx_base_path():
if sys.platform == 'darwin':
return os.path.expanduser("~/Documents/Virtual Machines")
elif sys.platform == 'win32':
documents_dir = shell.SHGetFolderPath(0, shellcon.CSIDL_PERSONAL,
None, 0)
return os.path.join(documents_dir, "Virtual Machines")
else:
return os.path.expanduser("~/vmware")
def _get_vmrun():
if sys.platform == 'darwin':
return ("/Applications/VMware Fusion.app/Contents/Library/vmrun",
"fusion")
else:
# Make sure to have vmrun in the PATH
return ("vmrun", "ws")
def _execute_process(args):
p = subprocess.Popen(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False)
(out, err) = p.communicate()
return (out, err, p.returncode)
def _exec_vmrun_cmd(cmd, vmx_path=None):
(vmrun_path, vmrun_type) = _get_vmrun()
args = [vmrun_path, "-T", vmrun_type, cmd]
if vmx_path:
args.append(vmx_path)
(out, err, exit_code) = _execute_process(args)
if exit_code:
raise Exception("vmrun failed: %s" % out)
return out
@app.route('/vmrun/vm/find_by_mac_address/<string:mac_address>',
methods = ['GET'])
def get_vmx_path_bymac_address(mac_address):
base_path = _get_vmx_base_path()
vmx_path = _get_matching_vmx_path(base_path, mac_address)
if not vmx_path:
flask. abort(404)
else:
return vmx_path
def _get_json_vmx_path():
if not flask.request.json:
flask.abort(400)
vmx_path = flask.request.json.get('vmx_path')
if not vmx_path:
flask.abort(400)
if not os.path.exists(vmx_path):
flask.abort(404)
return vmx_path
@app.route('/vmrun/vm/start', methods = ['POST'])
def start_vm():
vmx_path = _get_json_vmx_path()
_exec_vmrun_cmd("start", vmx_path)
return STARTED
@app.route('/vmrun/vm/stop', methods = ['POST'])
def stop_vm():
vmx_path = _get_json_vmx_path()
_exec_vmrun_cmd("stop", vmx_path)
return STARTED
@app.route('/vmrun/vm/status', methods = ['POST'])
def get_vm_status():
status = STOPPED
vmx_path = _get_json_vmx_path()
running_vmx_paths = _exec_vmrun_cmd("list").split("\n")[1:-1]
for running_vmx_path in running_vmx_paths:
if vmx_path == running_vmx_path:
status = STARTED
break
return status
if __name__ == '__main__':
app.run(host="0.0.0.0", port=6000, debug = True)
|
[
"[email protected]"
] | |
57c5f0267b758e4eb4c42389e10c758178243ed3
|
c703b8ac3b5545857f6c95efa2d61eaf7a664021
|
/iPERCore/models/networks/discriminators/patch_dis.py
|
83491547d5a6977f9e719ae8d16041dd07558ae4
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license",
"Apache-2.0",
"BSD-2-Clause"
] |
permissive
|
iPERDance/iPERCore
|
d29681d229b3098b3517b1abf4f7ea65f579de73
|
fcf9a18ffd66bf3fdd3eea4153a3bc4785131848
|
refs/heads/main
| 2023-07-30T15:04:15.835396 | 2023-04-12T14:21:23 | 2023-04-12T14:21:23 | 313,664,064 | 2,520 | 339 |
Apache-2.0
| 2023-05-12T03:26:52 | 2020-11-17T15:36:25 |
Python
|
UTF-8
|
Python
| false | false | 2,757 |
py
|
# Copyright (c) 2020-2021 impersonator.org authors (Wen Liu and Zhixin Piao). All rights reserved.
import torch
import torch.nn as nn
import functools
class PatchDiscriminator(nn.Module):
"""Defines a PatchGAN discriminator"""
def __init__(self, input_nc, ndf=32, n_layers=3, max_nf_mult=8,
norm_type="batch", use_sigmoid=False):
"""Construct a PatchGAN discriminator
Parameters:
input_nc (int) -- the number of channels in input images
ndf (int) -- the number of filters in the last conv layer
n_layers (int) -- the number of conv layers in the discriminator
norm_layer -- normalization layer
"""
super(PatchDiscriminator, self).__init__()
norm_layer = self._get_norm_layer(norm_type)
if type(norm_layer) == functools.partial: # no need to use bias as BatchNorm2d has affine parameters
use_bias = norm_layer.func != nn.BatchNorm2d
else:
use_bias = norm_layer != nn.BatchNorm2d
kw = 4
padw = 1
sequence = [nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), nn.LeakyReLU(0.2, True)]
nf_mult = 1
nf_mult_prev = 1
for n in range(1, n_layers): # gradually increase the number of filters
nf_mult_prev = nf_mult
nf_mult = min(2 ** n, max_nf_mult)
sequence += [
nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=2, padding=padw, bias=use_bias),
norm_layer(ndf * nf_mult),
nn.LeakyReLU(0.2, True)
]
nf_mult_prev = nf_mult
nf_mult = min(2 ** n_layers, max_nf_mult)
sequence += [
nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=1, padding=padw, bias=use_bias),
norm_layer(ndf * nf_mult),
nn.LeakyReLU(0.2, True)
]
sequence += [nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)] # output 1 channel prediction map
if use_sigmoid:
sequence += [nn.Sigmoid()]
self.model = nn.Sequential(*sequence)
def _get_norm_layer(self, norm_type="batch"):
if norm_type == "batch":
norm_layer = functools.partial(nn.BatchNorm2d, affine=True)
elif norm_type == "instance":
norm_layer = functools.partial(nn.InstanceNorm2d, affine=False)
elif norm_type == "batchnorm2d":
norm_layer = nn.BatchNorm2d
else:
raise NotImplementedError(f"normalization layer [{norm_type}] is not found")
return norm_layer
def forward(self, input):
"""Standard forward."""
return self.model(input)
|
[
"[email protected]"
] | |
e0a7315e974496146f931f1dccb8aff89ce1264d
|
1ca94f20401cc0bd33a7a935dea2f3c66776dbe4
|
/users/models.py
|
b8680d9a1d986368544da5d9676214693646fa7a
|
[] |
no_license
|
liangsongyou/news-18
|
468d06a854e3bf6b5389e6efbb2b1a812d45fef6
|
45619e32d7f950d75949912ee8c570903f6c39f3
|
refs/heads/master
| 2020-04-11T15:59:26.136085 | 2018-12-15T13:54:16 | 2018-12-15T13:54:16 | 161,909,795 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 171 |
py
|
from django.contrib.auth.models import AbstractUser
from django.db import models
class CustomUser(AbstractUser):
age = models.PositiveIntegerField(default=0)
|
[
"[email protected]"
] | |
06a59e32f037c215fdd5e541b97856291bb4a2c7
|
fded1e6c9cdd64bd96f0bad1d2877a65a0202631
|
/src/simulacra/cluster/__init__.py
|
4c576ebc5c4b160c199211499b0c315b87a4a462
|
[
"MIT"
] |
permissive
|
johnvonlzf/simulacra
|
46100c33be6992b1f45d7272884689579c28bd37
|
b89fd0abf59debf077a4ce4cc46d5e0c58f53b4d
|
refs/heads/master
| 2022-01-23T23:04:08.191552 | 2018-10-14T15:53:23 | 2018-10-14T15:53:23 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 79 |
py
|
from .interface import *
from .job_creation import *
from .processing import *
|
[
"[email protected]"
] | |
2cb549fab7ccf5db93a112f7980fa14fbc3ffbd0
|
8e7e51ff8b9c1103d10aa86c3d1cb446cfb25e4c
|
/djeniesecurity/djeniesecurity/urls.py
|
c409e1093e267c2e36d190bdc95028974c4ec905
|
[] |
no_license
|
huogerac/modulo4
|
b2c6e07f5e2928182a03edac503d0a4468736007
|
b30e056fb5a4703255982a349ed184beaea010fd
|
refs/heads/master
| 2021-01-17T21:25:03.926382 | 2013-09-23T10:21:51 | 2013-09-23T10:21:51 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 572 |
py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'djeniesecurity.views.home', name='home'),
# url(r'^djeniesecurity/', include('djeniesecurity.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('',
url(r'', include('cms.urls')),
url(r'', include('sms.urls')),
)
|
[
"[email protected]"
] | |
b12c0fb45f697b54880348bc5234ea5e8967228d
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/benchmark/startCirq1197.py
|
1ebb841cfc54a0fc26e0f2bd3522d7dfdaa63405
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,855 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=5
# total number=51
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=3
c.append(cirq.H.on(input_qubit[1])) # number=4
c.append(cirq.H.on(input_qubit[2])) # number=5
c.append(cirq.H.on(input_qubit[3])) # number=6
c.append(cirq.H.on(input_qubit[4])) # number=21
for i in range(2):
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=7
c.append(cirq.H.on(input_qubit[3])) # number=8
c.append(cirq.H.on(input_qubit[0])) # number=17
c.append(cirq.H.on(input_qubit[1])) # number=18
c.append(cirq.H.on(input_qubit[2])) # number=19
c.append(cirq.H.on(input_qubit[3])) # number=20
c.append(cirq.H.on(input_qubit[0])) # number=31
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=32
c.append(cirq.H.on(input_qubit[0])) # number=33
c.append(cirq.H.on(input_qubit[1])) # number=44
c.append(cirq.CZ.on(input_qubit[0],input_qubit[1])) # number=45
c.append(cirq.H.on(input_qubit[1])) # number=46
c.append(cirq.X.on(input_qubit[1])) # number=41
c.append(cirq.H.on(input_qubit[1])) # number=48
c.append(cirq.CZ.on(input_qubit[0],input_qubit[1])) # number=49
c.append(cirq.H.on(input_qubit[1])) # number=50
c.append(cirq.X.on(input_qubit[0])) # number=26
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=27
c.append(cirq.H.on(input_qubit[1])) # number=37
c.append(cirq.CZ.on(input_qubit[0],input_qubit[1])) # number=38
c.append(cirq.H.on(input_qubit[1])) # number=39
c.append(cirq.X.on(input_qubit[1])) # number=35
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=36
c.append(cirq.X.on(input_qubit[2])) # number=11
c.append(cirq.X.on(input_qubit[3])) # number=12
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[2])) # number=43
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[2])) # number=47
c.append(cirq.X.on(input_qubit[0])) # number=13
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=22
c.append(cirq.X.on(input_qubit[1])) # number=23
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=24
c.append(cirq.X.on(input_qubit[2])) # number=15
c.append(cirq.X.on(input_qubit[1])) # number=29
c.append(cirq.Y.on(input_qubit[4])) # number=28
c.append(cirq.X.on(input_qubit[3])) # number=16
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 5
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq1197.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close()
|
[
"[email protected]"
] | |
e1241643f1fdabd9675e8ec25ea0a5b2350349a4
|
62d6a37e1fb1b224b53e14a1cf151ef0571aa20f
|
/tests/fixtures/tests.py
|
abc94a63d35f2bfe008a3e1bdcf4d4b144ec1bb5
|
[] |
no_license
|
katrid/orun
|
4fa0f291a1ef43f16bc1857a170fc0b2e5e06739
|
bfc6dae06182124ba75b1f3761d81ba8ca387dea
|
refs/heads/master
| 2023-08-30T03:58:34.570527 | 2023-08-09T04:05:30 | 2023-08-09T04:05:30 | 66,562,767 | 14 | 4 | null | 2023-01-06T22:29:37 | 2016-08-25T14:01:44 |
Python
|
UTF-8
|
Python
| false | false | 2,024 |
py
|
from orun.test import TestCase
from orun.apps import apps
from orun.db import connection
class FixturesTest(TestCase):
fixtures = {
'fixtures': [
'fixtures.author.csv', 'fixtures.author.tsv', 'data.xml', 'fixtures.book.tsv', 'fixtures.book.csv',
'metadata.%(db_vendor)s.sql',
],
}
def test_load_data(self):
Author = apps['fixtures.author']
Book = apps['fixtures.book']
objs = list(Author.objects.all())
self.assertEqual(len(objs), 9)
book = Book.objects.get(pk=1)
self.assertEqual(book.author.name, 'Xml Author 1')
book = Book.objects.get(pk=2)
self.assertEqual(book.author.name, 'Author 2')
def test_xml_objects(self):
Object = apps['ir.object']
obj1 = Object.objects.get_object('fixtures/xml/author/1')
self.assertEqual(obj1.name, 'fixtures/xml/author/1')
author1 = obj1.content_object
self.assertEqual(author1.name, 'Xml Author 1')
self.assertEqual(obj1.name, 'fixtures/xml/author/1')
obj2 = Object.objects.get_object('fixtures/xml/author/2')
author2 = obj2.content_object
self.assertEqual(obj2.name, 'fixtures/xml/author/2')
self.assertEqual(author2.name, 'Xml Author 2')
# test deleted
with self.assertRaises(Object.DoesNotExist):
Object.objects.get_object('fixtures/xml/author/4/delete')
Author = apps['fixtures.author']
with self.assertRaises(Author.DoesNotExist):
Author.objects.get(name='Xml Author 4')
def test_sql_fixtures(self):
with connection.cursor() as cursor:
# Testing created view
cursor.execute('''select * from books order by id''')
books = cursor.fetchall()
self.assertEqual(len(books), 2)
self.assertEqual(books[0][0], 1)
self.assertEqual(books[1][0], 2)
def test_web_fixtures(self):
View = apps['ui.view']
views = View.objects.all()
|
[
"[email protected]"
] | |
e29b795b791a39c16bac4e8f849e8d67b243c733
|
18dca9a552f5aa9303536613ec39f19cebf6647c
|
/CreateTrainingFiles/ArxivMAG/prepare_arxiv_hd2v_file.py
|
b73e1d6597b04b0f270151a1756b21c7527be5e2
|
[
"MIT"
] |
permissive
|
ashwath92/MastersThesis
|
9a39ed7eec825ed559d09507721c21bd12e2ab9c
|
f74755dc0c32f316da3c860dd5dbfa4c9cad97b3
|
refs/heads/master
| 2021-08-16T12:01:33.282459 | 2020-06-27T16:00:16 | 2020-06-27T16:00:16 | 197,282,312 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 14,315 |
py
|
""" Prepapres a file from Arxiv data (as well as additional mag contexts) as per the hyperdoc2vec format.
All the citation markers in the output file are MAG Ids (for the ACL papers, the mapping from ACL to
MAG can be found in /home/ashwath/Programs/ArxivCS/SQLITEDB/arxivcs_mag_mapping.sqlite3 -- table name: arxivcs_mag).
Adjacent citations are not comma-separated, but instead just placed next to each other.
The input files have citation markers with UUIDs. These UUIDs, defined in /vol2/unarXive/arxiv-txt-data/metadata.db
and mapped to mag ids in the bibitemmagidmap table, have been preprocessed in read_bibitemmagidmap_into_pickle.py
and inserted into a dictionary in a pickle."""
import os
import re
import csv
import pickle
import sqlite3
import psycopg2
import psycopg2.extras
from time import time
from gensim.parsing import preprocessing
from gensim.utils import to_unicode
import contractions
import pandas as pd
from tqdm import tqdm
import concurrent.futures
from multiprocessing import Pool, cpu_count
basepath = '/home/ashwath/Programs'
dbpath = os.path.join(basepath, 'ArxivCS', 'SQLITEDB', 'arxivcs_mag_mapping.sqlite3')
def db_connect(set_params=False, path = dbpath):
""" Connects to sqlite3 db given via a parameter/uses a default parameter.
It sets timeout=10 to prevent sqlite getting locked in between inserts. It
also sets detect_types to allow datetime/date datatypes in tables. """
connection = sqlite3.connect(path, timeout=10,
detect_types=sqlite3.PARSE_DECLTYPES)
if set_params is True:
# Speed up insertions: only called while creating the database
connection.execute('PRAGMA main.journal_mode=WAL;')
connection.execute('PRAGMA main.cache_size=10000;')
connection.execute('PRAGMA main.locking_mode=EXCLUSIVE;')
return connection
# GLOBALS
# Hyperdoc2vec markers for citations
docid_prefix='=-='
docid_suffix='-=-'
# IMPORTANT: I need a set of mag ids which are cited so that i can use it to add extra mag content.
allmagpaperids = set()
# mag arxiv mapping db connection
sconn = db_connect()
scur = sconn.cursor()
# NOT BEING USED, I have now pre-loaded this into a Pandas series in a pickle
# context connection: for getting the mag id of the CITED papers
#meta_db_path = '/vol2/unarXive/arxiv-txt-data/metadata.db'
#cconn = db_connect(path=meta_db_path)
#ccur = cconn.cursor()
# Get the uuid_mag_id dict which has been precomputed into a pickle file (from the sqlite3 db)
with open('Pickles/uuid_magid_dict.pickle', 'rb') as picc:
uuid_magid_dict = pickle.load(picc)
# Some arxiv ids are mapped to 2 magids, keep only 1 (data problem)
# 72246 rows in the results (out of 72315): 69 duplicates
# Training set is all years until 2016 (2017 is the test set)
# Training set: 62296 papers
# Test set: 9954 papers
trainingquery = """select arxiv_id, mag_id
from arxivcs_mag
where arxiv_id not like '17%'
group by mag_id;
"""
# Write test set
testsetquery = """select arxiv_id, mag_id
from arxivcs_mag
where arxiv_id like '17%'
group by mag_id;
"""
# shape: (18642, 2)
testresdf = pd.read_sql_query(testsetquery, sconn)
testresdf.to_csv('AdditionalOutputs/test_ids.tsv', index=False, sep='\t')
# shape: (53614, 2)
trainresdf = pd.read_sql_query(trainingquery, sconn)
trainresdf.to_csv('AdditionalOutputs/training_ids.tsv', index=False, sep='\t')
# Get a Series of mag ids for which we have full text
mag_id_series = trainresdf['mag_id']
# IMP: There seems to be some problem with the data?? Multiple arxiv ids are mapped to the same mag id
# Doing select mag_id from arxivcs_mag, and read_sql_query, then
# df[df.isin(df[df.duplicated()])] gives 69 records.
# Get a set of mag ids (mapped from arxiv of course) which have full text
inarxiv_papers_set = set(mag_id_series.tolist())
# POSTGRES connection obj and cursor
pconn = psycopg2.connect("dbname=MAG19 user=mag password=1maG$ host=shetland.informatik.uni-freiburg.de")
pcur = pconn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
# POSTGRES QUERY
magonly_query = """
SELECT titleandabstract.paperid, papertitle, abstract, contexts, referenceids
FROM
(
SELECT papers.paperid, papertitle, abstract FROM papers INNER JOIN paperabstracts
ON papers.paperid=paperabstracts.paperid
WHERE papers.paperid=%s) AS titleandabstract INNER JOIN
(
SELECT paperid, string_agg(paperreferenceid::character varying, ',') AS referenceids,
string_agg(citationcontext, ' ||--|| ') AS contexts
FROM papercitationcontexts
WHERE paperid=%s
GROUP BY paperid
) AS listofcontexts
ON titleandabstract.paperid=listofcontexts.paperid;"""
# Arxiv citing, cited list based on mag ids
arxiv_citing_cited_file = open('AdditionalOutputs/arxivmag_references.tsv', 'w')
fieldnames = ['citing_mag_id', 'cited_mag_id']
writer = csv.DictWriter(arxiv_citing_cited_file, delimiter="\t", fieldnames=fieldnames)
writer.writeheader()
citation_pattern = re.compile(r'(\{\{cite:)([a-zA-z0-9-]+)(\}\})')
replaced_citation_pattern = re.compile(r'(=-=)([0-9]+)(-=-)')
def get_mag_from_uuid(matchobject):
""" This function takes the uuid and gets the corresponding mag id"""
cited_uuid = matchobject.group(2)
fetched_mag_id = uuid_magid_dict.get(cited_uuid)
if fetched_mag_id is None:
# If the uuid does not map to a mag id, replace with the word citation.
#wordindex_magid_dict[i] = 'citation'
return 'citation'
else:
allmagpaperids.add(fetched_mag_id)
return '{}{}{}'.format(docid_prefix, fetched_mag_id, docid_suffix)
def read_arxiv_addmagids(arxivfilename_plus_mag):
""" Read arxiv full text, replace citations with mag id
arxivfilename_plus_mag is a list of lists with the filename (arxiv name+ path.txt)
and the correspondingly mapped mag id in each list"""
print(arxivfilename_plus_mag, 'here')
arxiv_filepath = arxivfilename_plus_mag[0]
mag_id = arxivfilename_plus_mag[1]
allmagpaperids.add(mag_id)
with open(arxiv_filepath, 'r') as arxivfile:
content = arxivfile.read().replace('\n', ' ')
# Replace all {{cite:ac7d7c84-d6e0-461d-a1fc-36f7ee323c07}}, i.e. \{\}cite:.*\}\}
# Get all the word indices which need to be replaced and put it in a dict with
# the corresponding mag id from the db.
# Do the replacements in the words list
content = citation_pattern.sub(get_mag_from_uuid, content)
# Make sure to add the citing paper mag id as the first word in the line
content = '{} {}\n'.format(mag_id, content)
# Write to refs file:
write_refs_file(content, mag_id)
return content
def write_refs_file(content, mag_id):
""" writes into the refs file (citing paper id, cited paperid)"""
for citationmarker in replaced_citation_pattern.finditer(content):
# group(2) gets the magid from the match object
fetched_mag_id = citationmarker.group(2)
writer.writerow({'citing_mag_id': mag_id,'cited_mag_id': fetched_mag_id})
def clean_text(text):
""" Cleans the text in the only argument in various steps
ARGUMENTS: text: content/title, string
RETURNS: cleaned text, string"""
# Replace newlines by space. We want only one doc vector.
text = text.replace('\n', ' ').lower()
# Remove URLs
#text = re.sub(r"http\S+", "", text)
# Expand contractions: you're to you are and so on.
text = contractions.fix(text)
# Don't Remove stop words
#text = preprocessing.remove_stopwords(text)
#text = preprocessing.strip_tags(text)
# Remove punctuation -- all special characters
text = preprocessing.strip_multiple_whitespaces(preprocessing.strip_punctuation(text))
return text
def add_additional_papers(outfile):
""" Add additional papers for which full text from Arxiv is not present. Care is taken that while
adding references to THESE papers, these references should be in the set of papers stored
in the allmagpaperids set (otherwise, there will be additional papers in the reference part
of the concat contexts which are not in the files in the text.
ALSO NOTE that allmagpaperids contains all papers which either cite or are cited so far
inarxiv_papers_set contains the set of papers which are in arxiv (citing)
A set difference (allmagpaperids - inarxiv_papers_set) gives the set of mag_ids for which we
get additional text"""
additional_mag_ids = allmagpaperids - inarxiv_papers_set
for paperid in tqdm(additional_mag_ids):
pcur.execute(magonly_query, (paperid, paperid))
# Get paperid, contexts, abstract, title, refids of current paper id
for row in pcur:
# row is a dict with keys:
# dict_keys(['paperid', 'papertitle', 'abstract', 'contexts', 'referenceids'])
paperid = row.get('paperid')
# Get all contexts and reference ids (delimiters set in the pSQL query)
contexts = row.get('contexts').replace('\n', ' ')
referenceids = row.get('referenceids')
title = clean_text(row.get('papertitle'))
abstract = clean_text(row.get('abstract'))
print(title)
# Get a single string for all the contexts
if contexts is not None and referenceids is not None:
contexts = contexts.split(' ||--|| ')
referenceids = referenceids.split(',')
contexts_with_refs = []
# Go through context, refid pairs, one at a time
for context, referenceid in zip(contexts, referenceids):
# VERY VERY IMPORTANT: check if the referenceid is not present in the allmagpaperids set,
# IGNORE IT! DESIGN DECISION: the other choice is to have a LOT of passes.
if referenceid in allmagpaperids:
writer.writerow({'citing_mag_id': paperid,'cited_mag_id': referenceid})
contextlist = clean_text(context).split()
# Insert the reference id as the MIDDLE word of the context
# NOTE, when multiple reference ids are present, only 1 is inserted. Mag issue.
# In the eg. nips file, it's like this: this paper uses our previous work on weight space
# probabilities =-=nips05_0451-=- =-=nips05_0507-=-.
index_to_insert = len(contextlist) // 2
value_to_insert = docid_prefix + referenceid + docid_suffix
# Add the ref id with the prefix and suffix
contextlist.insert(index_to_insert, value_to_insert)
# Insert the context with ref id into the contexts_with_refs list
contexts_with_refs.append(' '.join(contextlist))
# else: do nothing, next iteration
# After all the contexts azre iterated to, make them a string.
contexts_concatenated = ' '.join(contexts_with_refs)
else:
contexts_concatenated = ''
# Do not write these to file????? OR
# Concatenate the paperid, title, abstract and the contexts together.
content = "{} {} {} {}\n".format(paperid, title, abstract, contexts_concatenated)
content = to_unicode(content)
if content.strip() != '':
outfile.write(content)
print("Written file for {}".format(paperid))
def run_multiprocessing_pool():
""" Uses all the cores to read the arxiv files, add the mag ids, and write to
a single consolidated output file. It also adds additional mag contexts+abstracts at the end"""
output_file = open('arxiv_hd2v_training.txt', 'w')
workers = cpu_count()
# Create a list of lists with [[arxivid, magid], [arxivid, magid], ...]
arxiv_filepath = '/vol2/unarXive/arxiv-txt-data'
trainresdf['arxiv_id'] = trainresdf['arxiv_id'].apply(lambda x: '{}/{}.txt'.format(arxiv_filepath, x))
# arxivmag_list is a list of lists
arxivmag_list = trainresdf.values.tolist()
#with Pool(processes=workers) as pool:
#with concurrent.futures.ProcessPoolExecutor(max_workers=64) as executor:
# VERY VERY VERY VERY IMPORTANT: ThreadPoolExecutor allows the concurrent child
# processes to update the global allmagids... variable together (they share state).
# Child processes do not share state in ProcessPool, any changes to global vars in
# the function are immutable.
with concurrent.futures.ThreadPoolExecutor(max_workers=64) as executor:
# chunk size =1
# It writes in the same order as the iterable is called.
#content = pool.map(read_arxiv_addmagids, arxivmag_list, chunksize=len(arxivmag_list)//workers)
#output_file.write(content)
content = executor.map(read_arxiv_addmagids, arxivmag_list, chunksize=len(arxivmag_list)//workers)
# content is a generator
#print(content, type(content), 'outside')
# content is an iterable, a generator with all the content values returned from read_arxiv_addmagids
for text in content:
output_file.write(text)
# Add additional content : abstact + title + concatenated contexts from MAG
# Note that the citation marker (cited paper id) is always placed bang in the centre
# of the context.
add_additional_papers(output_file)
output_file.close()
def main():
"""Main function """
start = time()
run_multiprocessing_pool()
# Pickle the sets so that we can add additional contexts later from MAG based on them.
with open('Pickles/inarxiv_papers_set.pickle', 'wb') as picc:
pickle.dump(inarxiv_papers_set, picc)
with open('Pickles/allmagpapers_en_magcontexts.pickle', 'wb') as picc2:
pickle.dump(allmagpaperids, picc2)
# Close files and db connections
arxiv_citing_cited_file.close()
sconn.close()
pconn.close()
print("Time taken:{}".format(time() - start))
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
1e6895e6f359a03fff2e6129c7a5e162e1c1d48a
|
4ad53199feb82d911bd2edbe0b5713da8c1909c1
|
/pytablewriter/style/__init__.py
|
6be6ff1844a0928139b11cb5ac086bac8216c4f9
|
[
"MIT"
] |
permissive
|
thombashi/pytablewriter
|
9bf8b73da0eb18dba835e951021fd581958a4d12
|
49f9da777625a5b920c2c87c5e086d33d19a80d4
|
refs/heads/master
| 2023-08-19T05:13:15.333317 | 2023-07-01T08:03:47 | 2023-07-01T08:03:47 | 59,484,958 | 609 | 43 |
MIT
| 2021-09-20T15:26:45 | 2016-05-23T13:25:53 |
Python
|
UTF-8
|
Python
| false | false | 1,006 |
py
|
from dataproperty import Align, Format
from ._cell import Cell
from ._font import FontSize, FontStyle, FontWeight
from ._style import DecorationLine, Style, ThousandSeparator, VerticalAlign
from ._styler import (
GFMarkdownStyler,
HtmlStyler,
LatexStyler,
MarkdownStyler,
NullStyler,
ReStructuredTextStyler,
TextStyler,
get_align_char,
)
from ._styler_interface import StylerInterface
from ._theme import ColSeparatorStyleFilterFunc, StyleFilterFunc, Theme, fetch_theme, list_themes
__all__ = (
"Align",
"Format",
"Cell",
"FontSize",
"FontStyle",
"FontWeight",
"Style",
"ThousandSeparator",
"VerticalAlign",
"DecorationLine",
"GFMarkdownStyler",
"HtmlStyler",
"LatexStyler",
"MarkdownStyler",
"NullStyler",
"ReStructuredTextStyler",
"StylerInterface",
"TextStyler",
"ColSeparatorStyleFilterFunc",
"StyleFilterFunc",
"Theme",
"get_align_char",
"fetch_theme",
"list_themes",
)
|
[
"[email protected]"
] | |
5cc40f6f01d9530255a06e81239788b2eae2fb46
|
981ecc9cf59dd6f839c3e40d26601efb1d073558
|
/src/face_recognition/youtube_dl/aes.py
|
c5bb3c4ef1561847a1025a0b35095a2224582efe
|
[
"MIT"
] |
permissive
|
lodemo/CATANA
|
469e0684b816f09ac74f186552b463cc77db369e
|
a349f460772511ccbb16429b40bfb50f774d45d4
|
refs/heads/master
| 2023-03-30T04:07:12.070332 | 2021-02-03T21:47:32 | 2021-02-03T21:47:32 | 102,767,095 | 12 | 6 |
MIT
| 2023-03-24T21:55:24 | 2017-09-07T17:36:45 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 16,123 |
py
|
from __future__ import unicode_literals
import base64
from math import ceil
from .utils import bytes_to_intlist, intlist_to_bytes
BLOCK_SIZE_BYTES = 16
def aes_ctr_decrypt(data, key, counter):
"""
Decrypt with aes in counter mode
@param {int[]} data cipher
@param {int[]} key 16/24/32-Byte cipher key
@param {instance} counter Instance whose next_value function (@returns {int[]} 16-Byte block)
returns the next counter block
@returns {int[]} decrypted data
"""
expanded_key = key_expansion(key)
block_count = int(ceil(float(len(data)) / BLOCK_SIZE_BYTES))
decrypted_data = []
for i in range(block_count):
counter_block = counter.next_value()
block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES]
block += [0] * (BLOCK_SIZE_BYTES - len(block))
cipher_counter_block = aes_encrypt(counter_block, expanded_key)
decrypted_data += xor(block, cipher_counter_block)
decrypted_data = decrypted_data[:len(data)]
return decrypted_data
def aes_cbc_decrypt(data, key, iv):
"""
Decrypt with aes in CBC mode
@param {int[]} data cipher
@param {int[]} key 16/24/32-Byte cipher key
@param {int[]} iv 16-Byte IV
@returns {int[]} decrypted data
"""
expanded_key = key_expansion(key)
block_count = int(ceil(float(len(data)) / BLOCK_SIZE_BYTES))
decrypted_data = []
previous_cipher_block = iv
for i in range(block_count):
block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES]
block += [0] * (BLOCK_SIZE_BYTES - len(block))
decrypted_block = aes_decrypt(block, expanded_key)
decrypted_data += xor(decrypted_block, previous_cipher_block)
previous_cipher_block = block
decrypted_data = decrypted_data[:len(data)]
return decrypted_data
def aes_cbc_encrypt(data, key, iv):
"""
Encrypt with aes in CBC mode. Using PKCS#7 padding
@param {int[]} data cleartext
@param {int[]} key 16/24/32-Byte cipher key
@param {int[]} iv 16-Byte IV
@returns {int[]} encrypted data
"""
expanded_key = key_expansion(key)
block_count = int(ceil(float(len(data)) / BLOCK_SIZE_BYTES))
encrypted_data = []
previous_cipher_block = iv
for i in range(block_count):
block = data[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES]
remaining_length = BLOCK_SIZE_BYTES - len(block)
block += [remaining_length] * remaining_length
mixed_block = xor(block, previous_cipher_block)
encrypted_block = aes_encrypt(mixed_block, expanded_key)
encrypted_data += encrypted_block
previous_cipher_block = encrypted_block
return encrypted_data
def key_expansion(data):
"""
Generate key schedule
@param {int[]} data 16/24/32-Byte cipher key
@returns {int[]} 176/208/240-Byte expanded key
"""
data = data[:] # copy
rcon_iteration = 1
key_size_bytes = len(data)
expanded_key_size_bytes = (key_size_bytes // 4 + 7) * BLOCK_SIZE_BYTES
while len(data) < expanded_key_size_bytes:
temp = data[-4:]
temp = key_schedule_core(temp, rcon_iteration)
rcon_iteration += 1
data += xor(temp, data[-key_size_bytes: 4 - key_size_bytes])
for _ in range(3):
temp = data[-4:]
data += xor(temp, data[-key_size_bytes: 4 - key_size_bytes])
if key_size_bytes == 32:
temp = data[-4:]
temp = sub_bytes(temp)
data += xor(temp, data[-key_size_bytes: 4 - key_size_bytes])
for _ in range(3 if key_size_bytes == 32 else 2 if key_size_bytes == 24 else 0):
temp = data[-4:]
data += xor(temp, data[-key_size_bytes: 4 - key_size_bytes])
data = data[:expanded_key_size_bytes]
return data
def aes_encrypt(data, expanded_key):
"""
Encrypt one block with aes
@param {int[]} data 16-Byte state
@param {int[]} expanded_key 176/208/240-Byte expanded key
@returns {int[]} 16-Byte cipher
"""
rounds = len(expanded_key) // BLOCK_SIZE_BYTES - 1
data = xor(data, expanded_key[:BLOCK_SIZE_BYTES])
for i in range(1, rounds + 1):
data = sub_bytes(data)
data = shift_rows(data)
if i != rounds:
data = mix_columns(data)
data = xor(data, expanded_key[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES])
return data
def aes_decrypt(data, expanded_key):
"""
Decrypt one block with aes
@param {int[]} data 16-Byte cipher
@param {int[]} expanded_key 176/208/240-Byte expanded key
@returns {int[]} 16-Byte state
"""
rounds = len(expanded_key) // BLOCK_SIZE_BYTES - 1
for i in range(rounds, 0, -1):
data = xor(data, expanded_key[i * BLOCK_SIZE_BYTES: (i + 1) * BLOCK_SIZE_BYTES])
if i != rounds:
data = mix_columns_inv(data)
data = shift_rows_inv(data)
data = sub_bytes_inv(data)
data = xor(data, expanded_key[:BLOCK_SIZE_BYTES])
return data
def aes_decrypt_text(data, password, key_size_bytes):
"""
Decrypt text
- The first 8 Bytes of decoded 'data' are the 8 high Bytes of the counter
- The cipher key is retrieved by encrypting the first 16 Byte of 'password'
with the first 'key_size_bytes' Bytes from 'password' (if necessary filled with 0's)
- Mode of operation is 'counter'
@param {str} data Base64 encoded string
@param {str,unicode} password Password (will be encoded with utf-8)
@param {int} key_size_bytes Possible values: 16 for 128-Bit, 24 for 192-Bit or 32 for 256-Bit
@returns {str} Decrypted data
"""
NONCE_LENGTH_BYTES = 8
data = bytes_to_intlist(base64.b64decode(data.encode('utf-8')))
password = bytes_to_intlist(password.encode('utf-8'))
key = password[:key_size_bytes] + [0] * (key_size_bytes - len(password))
key = aes_encrypt(key[:BLOCK_SIZE_BYTES], key_expansion(key)) * (key_size_bytes // BLOCK_SIZE_BYTES)
nonce = data[:NONCE_LENGTH_BYTES]
cipher = data[NONCE_LENGTH_BYTES:]
class Counter(object):
__value = nonce + [0] * (BLOCK_SIZE_BYTES - NONCE_LENGTH_BYTES)
def next_value(self):
temp = self.__value
self.__value = inc(self.__value)
return temp
decrypted_data = aes_ctr_decrypt(cipher, key, Counter())
plaintext = intlist_to_bytes(decrypted_data)
return plaintext
RCON = (0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36)
SBOX = (0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5, 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76,
0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0, 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0,
0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC, 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15,
0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A, 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75,
0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0, 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84,
0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B, 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF,
0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85, 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8,
0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5, 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2,
0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17, 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73,
0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88, 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB,
0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C, 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79,
0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9, 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08,
0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6, 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A,
0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E, 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E,
0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94, 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF,
0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68, 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16)
SBOX_INV = (0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb,
0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb,
0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e,
0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25,
0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92,
0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84,
0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06,
0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b,
0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73,
0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e,
0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b,
0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4,
0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f,
0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef,
0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61,
0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d)
MIX_COLUMN_MATRIX = ((0x2, 0x3, 0x1, 0x1),
(0x1, 0x2, 0x3, 0x1),
(0x1, 0x1, 0x2, 0x3),
(0x3, 0x1, 0x1, 0x2))
MIX_COLUMN_MATRIX_INV = ((0xE, 0xB, 0xD, 0x9),
(0x9, 0xE, 0xB, 0xD),
(0xD, 0x9, 0xE, 0xB),
(0xB, 0xD, 0x9, 0xE))
RIJNDAEL_EXP_TABLE = (0x01, 0x03, 0x05, 0x0F, 0x11, 0x33, 0x55, 0xFF, 0x1A, 0x2E, 0x72, 0x96, 0xA1, 0xF8, 0x13, 0x35,
0x5F, 0xE1, 0x38, 0x48, 0xD8, 0x73, 0x95, 0xA4, 0xF7, 0x02, 0x06, 0x0A, 0x1E, 0x22, 0x66, 0xAA,
0xE5, 0x34, 0x5C, 0xE4, 0x37, 0x59, 0xEB, 0x26, 0x6A, 0xBE, 0xD9, 0x70, 0x90, 0xAB, 0xE6, 0x31,
0x53, 0xF5, 0x04, 0x0C, 0x14, 0x3C, 0x44, 0xCC, 0x4F, 0xD1, 0x68, 0xB8, 0xD3, 0x6E, 0xB2, 0xCD,
0x4C, 0xD4, 0x67, 0xA9, 0xE0, 0x3B, 0x4D, 0xD7, 0x62, 0xA6, 0xF1, 0x08, 0x18, 0x28, 0x78, 0x88,
0x83, 0x9E, 0xB9, 0xD0, 0x6B, 0xBD, 0xDC, 0x7F, 0x81, 0x98, 0xB3, 0xCE, 0x49, 0xDB, 0x76, 0x9A,
0xB5, 0xC4, 0x57, 0xF9, 0x10, 0x30, 0x50, 0xF0, 0x0B, 0x1D, 0x27, 0x69, 0xBB, 0xD6, 0x61, 0xA3,
0xFE, 0x19, 0x2B, 0x7D, 0x87, 0x92, 0xAD, 0xEC, 0x2F, 0x71, 0x93, 0xAE, 0xE9, 0x20, 0x60, 0xA0,
0xFB, 0x16, 0x3A, 0x4E, 0xD2, 0x6D, 0xB7, 0xC2, 0x5D, 0xE7, 0x32, 0x56, 0xFA, 0x15, 0x3F, 0x41,
0xC3, 0x5E, 0xE2, 0x3D, 0x47, 0xC9, 0x40, 0xC0, 0x5B, 0xED, 0x2C, 0x74, 0x9C, 0xBF, 0xDA, 0x75,
0x9F, 0xBA, 0xD5, 0x64, 0xAC, 0xEF, 0x2A, 0x7E, 0x82, 0x9D, 0xBC, 0xDF, 0x7A, 0x8E, 0x89, 0x80,
0x9B, 0xB6, 0xC1, 0x58, 0xE8, 0x23, 0x65, 0xAF, 0xEA, 0x25, 0x6F, 0xB1, 0xC8, 0x43, 0xC5, 0x54,
0xFC, 0x1F, 0x21, 0x63, 0xA5, 0xF4, 0x07, 0x09, 0x1B, 0x2D, 0x77, 0x99, 0xB0, 0xCB, 0x46, 0xCA,
0x45, 0xCF, 0x4A, 0xDE, 0x79, 0x8B, 0x86, 0x91, 0xA8, 0xE3, 0x3E, 0x42, 0xC6, 0x51, 0xF3, 0x0E,
0x12, 0x36, 0x5A, 0xEE, 0x29, 0x7B, 0x8D, 0x8C, 0x8F, 0x8A, 0x85, 0x94, 0xA7, 0xF2, 0x0D, 0x17,
0x39, 0x4B, 0xDD, 0x7C, 0x84, 0x97, 0xA2, 0xFD, 0x1C, 0x24, 0x6C, 0xB4, 0xC7, 0x52, 0xF6, 0x01)
RIJNDAEL_LOG_TABLE = (0x00, 0x00, 0x19, 0x01, 0x32, 0x02, 0x1a, 0xc6, 0x4b, 0xc7, 0x1b, 0x68, 0x33, 0xee, 0xdf, 0x03,
0x64, 0x04, 0xe0, 0x0e, 0x34, 0x8d, 0x81, 0xef, 0x4c, 0x71, 0x08, 0xc8, 0xf8, 0x69, 0x1c, 0xc1,
0x7d, 0xc2, 0x1d, 0xb5, 0xf9, 0xb9, 0x27, 0x6a, 0x4d, 0xe4, 0xa6, 0x72, 0x9a, 0xc9, 0x09, 0x78,
0x65, 0x2f, 0x8a, 0x05, 0x21, 0x0f, 0xe1, 0x24, 0x12, 0xf0, 0x82, 0x45, 0x35, 0x93, 0xda, 0x8e,
0x96, 0x8f, 0xdb, 0xbd, 0x36, 0xd0, 0xce, 0x94, 0x13, 0x5c, 0xd2, 0xf1, 0x40, 0x46, 0x83, 0x38,
0x66, 0xdd, 0xfd, 0x30, 0xbf, 0x06, 0x8b, 0x62, 0xb3, 0x25, 0xe2, 0x98, 0x22, 0x88, 0x91, 0x10,
0x7e, 0x6e, 0x48, 0xc3, 0xa3, 0xb6, 0x1e, 0x42, 0x3a, 0x6b, 0x28, 0x54, 0xfa, 0x85, 0x3d, 0xba,
0x2b, 0x79, 0x0a, 0x15, 0x9b, 0x9f, 0x5e, 0xca, 0x4e, 0xd4, 0xac, 0xe5, 0xf3, 0x73, 0xa7, 0x57,
0xaf, 0x58, 0xa8, 0x50, 0xf4, 0xea, 0xd6, 0x74, 0x4f, 0xae, 0xe9, 0xd5, 0xe7, 0xe6, 0xad, 0xe8,
0x2c, 0xd7, 0x75, 0x7a, 0xeb, 0x16, 0x0b, 0xf5, 0x59, 0xcb, 0x5f, 0xb0, 0x9c, 0xa9, 0x51, 0xa0,
0x7f, 0x0c, 0xf6, 0x6f, 0x17, 0xc4, 0x49, 0xec, 0xd8, 0x43, 0x1f, 0x2d, 0xa4, 0x76, 0x7b, 0xb7,
0xcc, 0xbb, 0x3e, 0x5a, 0xfb, 0x60, 0xb1, 0x86, 0x3b, 0x52, 0xa1, 0x6c, 0xaa, 0x55, 0x29, 0x9d,
0x97, 0xb2, 0x87, 0x90, 0x61, 0xbe, 0xdc, 0xfc, 0xbc, 0x95, 0xcf, 0xcd, 0x37, 0x3f, 0x5b, 0xd1,
0x53, 0x39, 0x84, 0x3c, 0x41, 0xa2, 0x6d, 0x47, 0x14, 0x2a, 0x9e, 0x5d, 0x56, 0xf2, 0xd3, 0xab,
0x44, 0x11, 0x92, 0xd9, 0x23, 0x20, 0x2e, 0x89, 0xb4, 0x7c, 0xb8, 0x26, 0x77, 0x99, 0xe3, 0xa5,
0x67, 0x4a, 0xed, 0xde, 0xc5, 0x31, 0xfe, 0x18, 0x0d, 0x63, 0x8c, 0x80, 0xc0, 0xf7, 0x70, 0x07)
def sub_bytes(data):
return [SBOX[x] for x in data]
def sub_bytes_inv(data):
return [SBOX_INV[x] for x in data]
def rotate(data):
return data[1:] + [data[0]]
def key_schedule_core(data, rcon_iteration):
data = rotate(data)
data = sub_bytes(data)
data[0] = data[0] ^ RCON[rcon_iteration]
return data
def xor(data1, data2):
return [x ^ y for x, y in zip(data1, data2)]
def rijndael_mul(a, b):
if(a == 0 or b == 0):
return 0
return RIJNDAEL_EXP_TABLE[(RIJNDAEL_LOG_TABLE[a] + RIJNDAEL_LOG_TABLE[b]) % 0xFF]
def mix_column(data, matrix):
data_mixed = []
for row in range(4):
mixed = 0
for column in range(4):
# xor is (+) and (-)
mixed ^= rijndael_mul(data[column], matrix[row][column])
data_mixed.append(mixed)
return data_mixed
def mix_columns(data, matrix=MIX_COLUMN_MATRIX):
data_mixed = []
for i in range(4):
column = data[i * 4: (i + 1) * 4]
data_mixed += mix_column(column, matrix)
return data_mixed
def mix_columns_inv(data):
return mix_columns(data, MIX_COLUMN_MATRIX_INV)
def shift_rows(data):
data_shifted = []
for column in range(4):
for row in range(4):
data_shifted.append(data[((column + row) & 0b11) * 4 + row])
return data_shifted
def shift_rows_inv(data):
data_shifted = []
for column in range(4):
for row in range(4):
data_shifted.append(data[((column - row) & 0b11) * 4 + row])
return data_shifted
def inc(data):
data = data[:] # copy
for i in range(len(data) - 1, -1, -1):
if data[i] == 255:
data[i] = 0
else:
data[i] = data[i] + 1
break
return data
__all__ = ['aes_encrypt', 'key_expansion', 'aes_ctr_decrypt', 'aes_cbc_decrypt', 'aes_decrypt_text']
|
[
"[email protected]"
] | |
77d950e90b279c3c0f8a7d1aa395e6b81e1a73ab
|
06c1e0add693ba6d9fa3421f6e3713ea1df47036
|
/snakeskin/path_finders/__init__.py
|
8a012c900caefe2b281e754cabfa768043c569a9
|
[
"Apache-2.0"
] |
permissive
|
ewanbarr/snakeskin
|
95dc9069c61de60097aa78b0d68218e3ed240445
|
b41a5393e9b4ab42fd6245e022dd4923be01815b
|
refs/heads/master
| 2020-04-15T07:20:24.400522 | 2015-12-07T04:55:34 | 2015-12-07T04:55:34 | 33,218,383 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 44 |
py
|
from base_path_finder import BasePathFinder
|
[
"[email protected]"
] | |
a29e5f81d40ccd06b6053ab1c38c7a185c9ec5fc
|
7851871aa904c8e02b88690ef4423f8d988f8a90
|
/square_no_list.py
|
a8a23de9e4c0dfc5702f6d0c31443e03f6b7e24a
|
[] |
no_license
|
sharda2001/list
|
98809d9e0913adf9691523eb380fef4aa13fb703
|
40c2328e7da6dd410945e9febf767ba78f66cea1
|
refs/heads/main
| 2023-06-17T09:54:10.122583 | 2021-07-13T03:52:00 | 2021-07-13T03:52:00 | 377,053,205 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 102 |
py
|
numbers = [1, 2, 3, 4, 5]
squared_numbers = [number ** 2 for number in numbers]
print(squared_numbers)
|
[
"[email protected]"
] | |
c4ab791f131770d16025600c9969fa275bcb485e
|
6527b66fd08d9e7f833973adf421faccd8b765f5
|
/yuancloud/recicler/localizaciones/l10n_be_invoice_bba/__init__.py
|
8c3517b22a87f1e464f6866fc7d7621f263d5a7d
|
[] |
no_license
|
cash2one/yuancloud
|
9a41933514e57167afb70cb5daba7f352673fb4d
|
5a4fd72991c846d5cb7c5082f6bdfef5b2bca572
|
refs/heads/master
| 2021-06-19T22:11:08.260079 | 2017-06-29T06:26:15 | 2017-06-29T06:26:15 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 210 |
py
|
# -*- encoding: utf-8 -*-
# Part of YuanCloud. See LICENSE file for full copyright and licensing details.
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
import partner
import invoice
|
[
"[email protected]"
] | |
ae825fe3516b3c4458a8137c101f289786af735c
|
3ced55b04ec82df5257f0e3b500fba89ddf73a8a
|
/src/stk/molecular/topology_graphs/cage/two_plus_four/two_plus_four.py
|
80aa18537329f8d918ee7fea003280f088245115
|
[
"MIT"
] |
permissive
|
rdguerrerom/stk
|
317282d22f5c4c99a1a8452023c490fd2f711357
|
1ac2ecbb5c9940fe49ce04cbf5603fd7538c475a
|
refs/heads/master
| 2023-08-23T21:04:46.854062 | 2021-10-16T14:01:38 | 2021-10-16T14:01:38 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,112 |
py
|
"""
Two Plus Four
=============
"""
from ..cage import Cage
from ..vertices import LinearVertex, NonLinearVertex
from ...topology_graph import Edge
class TwoPlusFour(Cage):
"""
Represents a capsule cage topology graph.
Unoptimized construction
.. moldoc::
import moldoc.molecule as molecule
import stk
bb1 = stk.BuildingBlock(
smiles='BrCCBr',
functional_groups=[stk.BromoFactory()],
)
bb2 = stk.BuildingBlock(
smiles='Brc1c(Br)cc(Br)c(Br)c1',
functional_groups=[stk.BromoFactory()],
)
cage = stk.ConstructedMolecule(
topology_graph=stk.cage.TwoPlusFour((bb1, bb2)),
)
moldoc_display_molecule = molecule.Molecule(
atoms=(
molecule.Atom(
atomic_number=atom.get_atomic_number(),
position=position,
) for atom, position in zip(
cage.get_atoms(),
cage.get_position_matrix(),
)
),
bonds=(
molecule.Bond(
atom1_id=bond.get_atom1().get_id(),
atom2_id=bond.get_atom2().get_id(),
order=bond.get_order(),
) for bond in cage.get_bonds()
),
)
:class:`.Collapser` optimized construction
.. moldoc::
import moldoc.molecule as molecule
import stk
bb1 = stk.BuildingBlock(
smiles='BrCCBr',
functional_groups=[stk.BromoFactory()],
)
bb2 = stk.BuildingBlock(
smiles='Brc1c(Br)cc(Br)c(Br)c1',
functional_groups=[stk.BromoFactory()],
)
cage = stk.ConstructedMolecule(
topology_graph=stk.cage.TwoPlusFour(
building_blocks=(bb1, bb2),
optimizer=stk.Collapser(),
),
)
moldoc_display_molecule = molecule.Molecule(
atoms=(
molecule.Atom(
atomic_number=atom.get_atomic_number(),
position=position,
) for atom, position in zip(
cage.get_atoms(),
cage.get_position_matrix(),
)
),
bonds=(
molecule.Bond(
atom1_id=bond.get_atom1().get_id(),
atom2_id=bond.get_atom2().get_id(),
order=bond.get_order(),
) for bond in cage.get_bonds()
),
)
Nonlinear building blocks with four functional groups are
required for this topology.
Linear building blocks with two functional groups are required for
this topology.
When using a :class:`dict` for the `building_blocks` parameter,
as in :ref:`cage-topology-graph-examples`:
*Multi-Building Block Cage Construction*, a
:class:`.BuildingBlock`, with the following number of functional
groups, needs to be assigned to each of the following vertex ids:
| 4-functional groups: (0, 1)
| 2-functional groups: (2, 3, 4, 5)
See :class:`.Cage` for more details and examples.
"""
_vertex_prototypes = (
NonLinearVertex(0, [0, 0, -1]),
NonLinearVertex(1, [0, 0, 1]),
LinearVertex(2, [2, 0, 0], False),
LinearVertex(3, [-2, 0, 0], False),
LinearVertex(4, [0, 2, 0], False),
LinearVertex(5, [0, -2, 0], False),
)
_edge_prototypes = (
Edge(0, _vertex_prototypes[2], _vertex_prototypes[0]),
Edge(1, _vertex_prototypes[2], _vertex_prototypes[1]),
Edge(2, _vertex_prototypes[3], _vertex_prototypes[0]),
Edge(3, _vertex_prototypes[3], _vertex_prototypes[1]),
Edge(4, _vertex_prototypes[4], _vertex_prototypes[0]),
Edge(5, _vertex_prototypes[4], _vertex_prototypes[1]),
Edge(6, _vertex_prototypes[5], _vertex_prototypes[0]),
Edge(7, _vertex_prototypes[5], _vertex_prototypes[1])
)
_num_windows = 4
_num_window_types = 1
|
[
"[email protected]"
] | |
0799da7f0ed0a7e68edd997eeaa9deedf6405066
|
8f70b40ef1c657ee14accfe6e2f8b1ebb1bebb7e
|
/schoolinfo/urls.py
|
e842061d58d48d531db89aafe7420297d52ef38e
|
[] |
no_license
|
TejashviVerma/School_ERP
|
e3d6f1aabe92167c2b55c0b1682dde505bb04edd
|
11406da8b1d8701b7ea55f75c76f1cbf44a72c53
|
refs/heads/master
| 2023-08-03T15:10:11.481306 | 2020-09-13T18:02:40 | 2020-09-13T18:02:40 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 117 |
py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.home, name="addSchoolInfo"),
]
|
[
"[email protected]"
] | |
6305425047bc6275d2a171616fbdffe8a360ec2c
|
674f5dde693f1a60e4480e5b66fba8f24a9cb95d
|
/armulator/armv6/opcodes/concrete/rsb_register_shifted_register_a1.py
|
584074016b2edaaf59d9ac2ff84cb51509bec935
|
[
"MIT"
] |
permissive
|
matan1008/armulator
|
75211c18ebc9cd9d33a02890e76fc649483c3aad
|
44f4275ab1cafff3cf7a1b760bff7f139dfffb07
|
refs/heads/master
| 2023-08-17T14:40:52.793120 | 2023-08-08T04:57:02 | 2023-08-08T04:57:02 | 91,716,042 | 29 | 7 |
MIT
| 2023-08-08T04:55:59 | 2017-05-18T16:37:55 |
Python
|
UTF-8
|
Python
| false | false | 837 |
py
|
from armulator.armv6.bits_ops import substring, bit_at
from armulator.armv6.opcodes.abstract_opcodes.rsb_register_shifted_register import RsbRegisterShiftedRegister
from armulator.armv6.shift import decode_reg_shift
class RsbRegisterShiftedRegisterA1(RsbRegisterShiftedRegister):
@staticmethod
def from_bitarray(instr, processor):
rm = substring(instr, 3, 0)
type_o = substring(instr, 6, 5)
rs = substring(instr, 11, 8)
rd = substring(instr, 15, 12)
rn = substring(instr, 19, 16)
s = bit_at(instr, 20)
if rd == 0b1111 or rn == 0b1111 or rm == 0b1111 or rs == 0b1111:
print('unpredictable')
else:
shift_t = decode_reg_shift(type_o)
return RsbRegisterShiftedRegisterA1(instr, setflags=s, m=rm, s=rs, d=rd, n=rn, shift_t=shift_t)
|
[
"[email protected]"
] | |
8d26a6f969809cb725345cdc97e909cdc61f535b
|
97a39cfdbd0ae4310eef729785630438278d3279
|
/manage.py
|
4dfa3b998a58a9b60a40062cf56854fe68d23419
|
[
"Apache-2.0"
] |
permissive
|
cvlucian/confidant
|
e9ddf15885ec6a4442422a00d7c9d2a84f8dfa20
|
8e273fb813d57ae831343f7d047b32a8f62458cb
|
refs/heads/master
| 2021-01-13T09:37:39.757319 | 2020-09-23T14:35:53 | 2020-09-23T14:35:53 | 72,053,900 | 1 | 0 |
NOASSERTION
| 2020-09-23T14:36:19 | 2016-10-26T23:44:55 |
Python
|
UTF-8
|
Python
| false | false | 809 |
py
|
from flask.ext.script import Manager
import confidant.workarounds # noqa
from confidant import app
from scripts.utils import ManageGrants
from scripts.utils import RevokeGrants
from scripts.bootstrap import GenerateSecretsBootstrap
from scripts.bootstrap import DecryptSecretsBootstrap
manager = Manager(app.app)
# Ensure KMS grants are setup for services
manager.add_command("manage_kms_auth_grants", ManageGrants)
# Revoke all KMS grants
manager.add_command("revoke_all_kms_auth_grants", RevokeGrants)
# Generate encrypted blob from a file
manager.add_command("generate_secrets_bootstrap", GenerateSecretsBootstrap)
# Show the YAML formatted secrets_bootstrap in a decrypted form
manager.add_command("decrypt_secrets_bootstrap", DecryptSecretsBootstrap)
if __name__ == "__main__":
manager.run()
|
[
"[email protected]"
] | |
78df3320c27ab2b3e2c072df6c4e2ef16a3b7759
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_116/1469.py
|
8fbe59658076b2a46a7c77ed1bf039f34b16f0ae
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,234 |
py
|
import numpy as np
def checkWin(p):
#2 is X, 3 is O, check using the MOD method
if p == 0:
return "no"
if p % 2 == 0 and p % 3 != 0:
return 'X'
if p % 2 != 0 and p % 3 == 0:
return 'O'
else:
return 'draw'
def solve(filename):
fin = open(filename + '.in', 'r')
fout = open(filename + '.out', 'w')
T = int(fin.readline())
for case in xrange(T):
answer = ""
board = np.zeros((4, 4), np.int)
for i in xrange(4):
line = fin.readline().strip()
for j in xrange(4):
if line[j] == 'X':
board[i, j] = 2
elif line[j] == 'O':
board[i, j] = 3
elif line[j] == 'T':
board[i, j] = 1
#check rows and columns
prods = []
for i in xrange(4):
row_prod = np.prod(board[i, :])
col_prod = np.prod(board[:, i])
prods.append(checkWin(row_prod))
prods.append(checkWin(col_prod))
#print checkWin(row_prod), checkWin(col_prod)
#diagonals
prod_diag1 = 1
prod_diag2 = 1
for i in xrange(4):
prod_diag1 *= board[i, i]
prod_diag2 *= board[i, 3 - i]
prods.append(checkWin(prod_diag1))
prods.append(checkWin(prod_diag2))
#check answers
if 'no' in prods:
if 'X' not in prods and 'O' not in prods:
answer = 'Game has not completed'
elif 'X' in prods and 'O' not in prods:
answer = 'X won'
elif 'X' not in prods and 'O' in prods:
answer = 'O won'
else:
if 'X' not in prods and 'O' not in prods:
answer = 'Draw'
elif 'X' in prods and 'O' not in prods:
answer = 'X won'
elif 'X' not in prods and 'O' in prods:
answer = 'O won'
print answer
fout.write(('Case #%d: ' % (case + 1)) + str(answer) + '\n')
fin.readline()
fin.close()
fout.close()
if __name__ == "__main__":
# solve("A-tiny")
# solve("A-small-attempt0")
solve("A-large")
#solve("input")
|
[
"[email protected]"
] | |
0dc35183393d83eb31bf25b1f1f39d1850886c4d
|
17ef1c7483843540ce4d063708afa65430b9301f
|
/tests/test_allocate.py
|
b4281fbb11694e1dbc38fd7af714e2195439f9b5
|
[
"MIT"
] |
permissive
|
CivicKnowledge/synpums
|
e01f8815c5fe118ec748c248b84c862a1db15a3f
|
dd3793388862aa7b43eee2fc2aa96fcf21014267
|
refs/heads/main
| 2023-01-03T09:04:37.021235 | 2020-10-31T00:17:15 | 2020-10-31T00:17:15 | 304,128,332 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 835 |
py
|
import unittest
import warnings
import pandas as pd
import rowgenerators as rg
from synpums import *
from synpums.util import *
warnings.filterwarnings("ignore")
state = 'RI'
year = 2018
release = 5
cache_dir = '/tmp/synpums'
class TestAllocate(unittest.TestCase):
def test_basic(self):
tasks = AllocationTask.get_tasks(cache_dir, 'RI', ignore_completed=False)
task = tasks[24]
task.init()
print(task.m90_rms_error)
task.initialize_weights_sample()
print(f"te={task.total_error}, rms={task.m90_rms_error}")
args = dict(N=2000, min_iter=1000, step_size_max=15, step_size_min=1, reversal_rate=.4, max_ssm=150)
rows = task.vector_walk(**args)
print(f"te={task.total_error}, rms={task.m90_rms_error}")
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
5a5e0ce76558c3b94ad2149478844745d1f5087a
|
67f19ebb1fb3189e4c2f99484c1dc13af5099edb
|
/wii_packages/enso/gage_don_h/gage_don_h.py
|
08da11557b1626666c779f60cf484d446bd3aa80
|
[] |
no_license
|
delguoqing/PyLMPlayer
|
609c4fe35e56e4ce3ce30eeb2e9244aad5ea1609
|
db8a1edf70ac1c11deffddc458788b3a2c2078df
|
refs/heads/master
| 2021-01-22T05:06:00.491732 | 2013-09-13T04:54:23 | 2013-09-13T04:54:23 | 8,878,510 | 5 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 343 |
py
|
def func0(this, _global):
this.stop()
def func1(this, _global):
this.gotoAndPlay("fever")
def func2(this, _global):
if 2 <= this.fever_gage._play_head <= 23:
this.fever_gage.gotoAndPlay("toNormal")
this.stop()
def func3(this, _global):
this.fever_gage.gotoAndPlay("toFever")
this.stop()
DATA = (
func0,
func1,
func2,
func3,
)
|
[
"[email protected]"
] | |
8d13198a10bafeba6b94dad3cf02953c983de332
|
67325192c1e528a39d457f11e61b480d68826708
|
/mods/mcpython/Item/gold_block.py
|
248d5e0998a17f7d438e81b093ded15dc48a62bd
|
[
"MIT"
] |
permissive
|
vashistaarav1611/mcpython-a-minecraft-clone-in-python
|
5851b377b54fd2b28c106112c7b18f397b71ab50
|
c16cd66f319efdeec4130e1a43f5a857caf1ea13
|
refs/heads/master
| 2023-02-01T22:48:51.787106 | 2020-12-21T15:02:25 | 2020-12-21T15:02:25 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 222 |
py
|
from .Item import *
class GoldBlock(Item):
def getName(self):
return "minecraft:gold_block"
def getTexturFile(self):
return "./assets/textures/items/gold_block.png"
handler.register(GoldBlock)
|
[
"[email protected]"
] | |
087bc7514170d26a886ceb157ad850b49b661a4b
|
adea9fc9697f5201f4cb215571025b0493e96b25
|
/napalm_yang/models/openconfig/network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/__init__.py
|
50aadd5a1163361e23e5c78af06fbd8a3a0a4ce6
|
[
"Apache-2.0"
] |
permissive
|
andyjsharp/napalm-yang
|
d8a8b51896ef7c6490f011fe265db46f63f54248
|
ef80ebbfb50e188f09486380c88b058db673c896
|
refs/heads/develop
| 2021-09-09T02:09:36.151629 | 2018-03-08T22:44:04 | 2018-03-08T22:44:04 | 114,273,455 | 0 | 0 | null | 2018-03-08T22:44:05 | 2017-12-14T16:33:35 |
Python
|
UTF-8
|
Python
| false | false | 30,938 |
py
|
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
unicode = str
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/connection-points/connection-point/endpoints/endpoint/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state parameters relating to the
endpoint
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__endpoint_id','__precedence','__type','__active',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
self.__type = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)
self.__precedence = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="precedence", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint16', is_config=False)
self.__endpoint_id = YANGDynClass(base=unicode, is_leaf=True, yang_name="endpoint-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'connection-points', u'connection-point', u'endpoints', u'endpoint', u'state']
def _get_endpoint_id(self):
"""
Getter method for endpoint_id, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/endpoint_id (string)
YANG Description: An identifier for the endpoint
"""
return self.__endpoint_id
def _set_endpoint_id(self, v, load=False):
"""
Setter method for endpoint_id, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/endpoint_id (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_endpoint_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_endpoint_id() directly.
YANG Description: An identifier for the endpoint
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="endpoint-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """endpoint_id must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="endpoint-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=False)""",
})
self.__endpoint_id = t
if hasattr(self, '_set'):
self._set()
def _unset_endpoint_id(self):
self.__endpoint_id = YANGDynClass(base=unicode, is_leaf=True, yang_name="endpoint-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=False)
def _get_precedence(self):
"""
Getter method for precedence, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/precedence (uint16)
YANG Description: The precedence of the endpoint - the lowest precendence
viable endpoint will be utilised as the active endpoint
within a connection
"""
return self.__precedence
def _set_precedence(self, v, load=False):
"""
Setter method for precedence, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/precedence (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_precedence is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_precedence() directly.
YANG Description: The precedence of the endpoint - the lowest precendence
viable endpoint will be utilised as the active endpoint
within a connection
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="precedence", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint16', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """precedence must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="precedence", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint16', is_config=False)""",
})
self.__precedence = t
if hasattr(self, '_set'):
self._set()
def _unset_precedence(self):
self.__precedence = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="precedence", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint16', is_config=False)
def _get_type(self):
"""
Getter method for type, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/type (identityref)
YANG Description: The type of endpoint that is referred to by the current
endpoint
"""
return self.__type
def _set_type(self, v, load=False):
"""
Setter method for type, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/type (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_type() directly.
YANG Description: The type of endpoint that is referred to by the current
endpoint
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """type must be of a type compatible with identityref""",
'defined-type': "openconfig-network-instance:identityref",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)""",
})
self.__type = t
if hasattr(self, '_set'):
self._set()
def _unset_type(self):
self.__type = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)
def _get_active(self):
"""
Getter method for active, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/active (boolean)
YANG Description: When the backup endpoint is active, the value of this
parameter is set to true
"""
return self.__active
def _set_active(self, v, load=False):
"""
Setter method for active, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/active (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_active is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_active() directly.
YANG Description: When the backup endpoint is active, the value of this
parameter is set to true
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """active must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
})
self.__active = t
if hasattr(self, '_set'):
self._set()
def _unset_active(self):
self.__active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
endpoint_id = __builtin__.property(_get_endpoint_id)
precedence = __builtin__.property(_get_precedence)
type = __builtin__.property(_get_type)
active = __builtin__.property(_get_active)
_pyangbind_elements = {'endpoint_id': endpoint_id, 'precedence': precedence, 'type': type, 'active': active, }
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/connection-points/connection-point/endpoints/endpoint/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state parameters relating to the
endpoint
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__endpoint_id','__precedence','__type','__active',)
_yang_name = 'state'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
self.__type = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)
self.__precedence = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="precedence", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint16', is_config=False)
self.__endpoint_id = YANGDynClass(base=unicode, is_leaf=True, yang_name="endpoint-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'connection-points', u'connection-point', u'endpoints', u'endpoint', u'state']
def _get_endpoint_id(self):
"""
Getter method for endpoint_id, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/endpoint_id (string)
YANG Description: An identifier for the endpoint
"""
return self.__endpoint_id
def _set_endpoint_id(self, v, load=False):
"""
Setter method for endpoint_id, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/endpoint_id (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_endpoint_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_endpoint_id() directly.
YANG Description: An identifier for the endpoint
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="endpoint-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """endpoint_id must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="endpoint-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=False)""",
})
self.__endpoint_id = t
if hasattr(self, '_set'):
self._set()
def _unset_endpoint_id(self):
self.__endpoint_id = YANGDynClass(base=unicode, is_leaf=True, yang_name="endpoint-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=False)
def _get_precedence(self):
"""
Getter method for precedence, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/precedence (uint16)
YANG Description: The precedence of the endpoint - the lowest precendence
viable endpoint will be utilised as the active endpoint
within a connection
"""
return self.__precedence
def _set_precedence(self, v, load=False):
"""
Setter method for precedence, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/precedence (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_precedence is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_precedence() directly.
YANG Description: The precedence of the endpoint - the lowest precendence
viable endpoint will be utilised as the active endpoint
within a connection
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="precedence", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint16', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """precedence must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="precedence", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint16', is_config=False)""",
})
self.__precedence = t
if hasattr(self, '_set'):
self._set()
def _unset_precedence(self):
self.__precedence = YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), is_leaf=True, yang_name="precedence", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint16', is_config=False)
def _get_type(self):
"""
Getter method for type, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/type (identityref)
YANG Description: The type of endpoint that is referred to by the current
endpoint
"""
return self.__type
def _set_type(self, v, load=False):
"""
Setter method for type, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/type (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_type is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_type() directly.
YANG Description: The type of endpoint that is referred to by the current
endpoint
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """type must be of a type compatible with identityref""",
'defined-type': "openconfig-network-instance:identityref",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)""",
})
self.__type = t
if hasattr(self, '_set'):
self._set()
def _unset_type(self):
self.__type = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'LOCAL': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}, u'oc-ni-types:REMOTE': {'@namespace': u'http://openconfig.net/yang/network-instance-types', '@module': u'openconfig-network-instance-types'}},), is_leaf=True, yang_name="type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)
def _get_active(self):
"""
Getter method for active, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/active (boolean)
YANG Description: When the backup endpoint is active, the value of this
parameter is set to true
"""
return self.__active
def _set_active(self, v, load=False):
"""
Setter method for active, mapped from YANG variable /network_instances/network_instance/connection_points/connection_point/endpoints/endpoint/state/active (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_active is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_active() directly.
YANG Description: When the backup endpoint is active, the value of this
parameter is set to true
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """active must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
})
self.__active = t
if hasattr(self, '_set'):
self._set()
def _unset_active(self):
self.__active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)
endpoint_id = __builtin__.property(_get_endpoint_id)
precedence = __builtin__.property(_get_precedence)
type = __builtin__.property(_get_type)
active = __builtin__.property(_get_active)
_pyangbind_elements = {'endpoint_id': endpoint_id, 'precedence': precedence, 'type': type, 'active': active, }
|
[
"[email protected]"
] | |
c2a50a2894a8886745a3b0cf6176b87cdd9ff324
|
bd14c979335112b7718b0feda18ebf0e3b40fe5c
|
/contest_093/b_small_and_large_integers_2nd.py
|
5090fc480a7ed5adb7ee90d373f591aadebb6a25
|
[] |
no_license
|
ababa831/atcoder_beginners
|
22c57b15333d110126d1b1afadc0ff5e8784fc4f
|
1a30882ce7f20f312045d5dc7bfaa5688cc8a88e
|
refs/heads/master
| 2023-03-07T15:47:19.750682 | 2020-03-04T19:53:45 | 2020-03-04T19:53:45 | 143,360,607 | 1 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 245 |
py
|
# Accepted
a, b, k = map(int, input().split())
lower_list = [i for i in range(a, a + k)]
upper_list = [i for i in range(b, b - k, -1)]
out_list = sorted(set(lower_list + upper_list))
for out in out_list:
if a <= out <= b:
print(out)
|
[
"[email protected]"
] | |
dbc0f0130cf61ccefa2cb7304519c144f1dc48bf
|
a3c34ad9425cf9c16a09423278b81c20edd8d77a
|
/sms_frame/models/sms_compose.py
|
3cbd3cbd6f0bcc7f98da112e58038923a071a292
|
[] |
no_license
|
meswapnilwagh/Odoo9
|
d1dca7de18ac555abe2da96fb78f0d3bd3835650
|
91f1e545ab597ca89283b8dc5dbf3d7f5bd5df5b
|
refs/heads/9.0
| 2020-04-08T00:24:25.179940 | 2016-02-22T08:49:56 | 2016-02-22T08:49:56 | 52,294,854 | 0 | 1 | null | 2016-02-22T18:20:41 | 2016-02-22T18:20:40 | null |
UTF-8
|
Python
| false | false | 3,144 |
py
|
# -*- coding: utf-8 -*
from datetime import datetime
from openerp import api, fields, models
class SmsCompose(models.Model):
_name = "sms.compose"
error_message = fields.Char(readonly=True)
record_id = fields.Integer()
model = fields.Char()
sms_template_id = fields.Many2one('sms.template', string="Template")
from_mobile_id = fields.Many2one('sms.number', required=True, string="From Mobile")
to_number = fields.Char(required=True, string='To Mobile Number', readonly=True)
sms_content = fields.Text(string='SMS Content')
@api.onchange('sms_template_id')
def _onchange_sms_template_id(self):
"""Prefills from mobile, sms_account and sms_content but allow them to manually change the content after"""
if self.sms_template_id.id != False:
sms_rendered_content = self.env['sms.template'].render_template(self.sms_template_id.template_body, self.sms_template_id.model_id.model, self.record_id)
self.from_mobile_id = self.sms_template_id.from_mobile_verified_id.id
self.sms_content = sms_rendered_content
@api.multi
def send_entity(self):
"""Attempt to send the sms, if any error comes back show it to the user and only log the smses that successfully sent"""
self.ensure_one()
gateway_model = self.from_mobile_id.account_id.account_gateway_id.gateway_model_name
my_sms = self.from_mobile_id.account_id.send_message(self.from_mobile_id.mobile_number, self.to_number, self.sms_content.encode('utf-8'), self.model, self.record_id)
#use the human readable error message if present
error_message = ""
if my_sms.human_read_error != "":
error_message = my_sms.human_read_error
else:
error_message = my_sms.response_string
#display the screen with an error code if the sms/mms was not successfully sent
if my_sms.delivary_state == "failed":
return {
'type':'ir.actions.act_window',
'res_model':'sms.compose',
'view_type':'form',
'view_mode':'form',
'target':'new',
'context':{'default_to_number':self.to_number,'default_record_id':self.record_id,'default_model':self.model, 'default_error_message':error_message}
}
else:
my_model = self.env['ir.model'].search([('model','=',self.model)])
#for single smses we only record succesful sms, failed ones reopen the form with the error message
sms_message = self.env['sms.message'].create({'record_id': self.record_id,'model_id':my_model[0].id,'account_id':self.from_mobile_id.account_id.id,'from_mobile':self.from_mobile_id.mobile_number,'to_mobile':self.to_number,'sms_content':self.sms_content,'status_string':my_sms.response_string, 'direction':'O','message_date':datetime.utcnow(), 'status_code':my_sms.delivary_state, 'sms_gateway_message_id':my_sms.message_id})
try:
self.env[self.model].search([('id','=', self.record_id)]).message_post(body=self.sms_content, subject="SMS Sent")
except:
#Message post only works if CRM module is installed
pass
|
[
"[email protected]"
] | |
10c70540a9623f4e0994a218263f3b689583ef58
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_049/ch25_2019_03_11_12_40_04_650432.py
|
1ccce705dfcb58b27c4c448e847adbc6418c6bc3
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 219 |
py
|
distancia=int(input('Qual a distância do trajeto? '))
def preco(distancia):
if distancia > 200:
return 100+(distancia-200)*0.45
else:
return distancia*0.5
print ("{:.2f}".format(preco(distancia))
|
[
"[email protected]"
] | |
737ec07de6c5ea89bf1610e81acecb3e9200babb
|
6b2a8dd202fdce77c971c412717e305e1caaac51
|
/solutions_5708284669460480_0/Python/zdan/B.py
|
e89eff79728bb389faaa4be1f8d9b26f813576ea
|
[] |
no_license
|
alexandraback/datacollection
|
0bc67a9ace00abbc843f4912562f3a064992e0e9
|
076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf
|
refs/heads/master
| 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,651 |
py
|
import sys
import itertools
import numpy as np
def occurrences(string, target):
if len(target) > 1 and target[0] == target[-1]:
count = start = 0
while True:
start = string.find(target, start) + 1
if start > 0:
count += 1
else:
return count
else:
return string.count(target)
def solve(K, L, S, keyboard, target):
target_set = set(target)
keyboard_set = set(keyboard)
#keyboard_prob = {key: keyboard.count(key)/float(len(keyboard)) for key in keyboard_set}
if S < L:
return 0.
if not target_set.issubset(keyboard_set):
return 0.
if len(keyboard_set) == 1:
return 0.
total_combinations = max_bananas = payout = 0
for combination in itertools.product(keyboard, repeat=S):
total_combinations += 1
bananas = occurrences(''.join(combination), target)
payout += bananas
if max_bananas < bananas:
max_bananas = bananas
return max_bananas - float(payout)/total_combinations
if __name__ == '__main__':
filename_in = sys.argv[1]
filename_out = filename_in.partition('.')[0] + '.out'
with open(filename_out, "w") as fout:
with open(filename_in, "r") as fin:
T = int(fin.readline())
for case in range(1, T+1):
K, L, S = [int(x) for x in fin.readline().split()]
keyboard = fin.readline().strip()
target = fin.readline().strip()
print >> fout, "Case #%i:" % case, solve(K, L, S, keyboard, target)
|
[
"[email protected]"
] | |
098f68ce0de1a4e85ab1ea096ed45ccf2fff3eeb
|
4bed9030031fc99f6ea3d5267bd9e773f54320f8
|
/sparse/repos/Calysto/matlab_kernel/setup.py
|
313419fcbb79751dd03972ceb291c85638644417
|
[
"BSD-3-Clause"
] |
permissive
|
yuvipanda/mybinder.org-analytics
|
c5f4b939541d29727bc8d3c023b4d140de756f69
|
7b654e3e21dea790505c626d688aa15640ea5808
|
refs/heads/master
| 2021-06-13T05:49:12.447172 | 2018-12-22T21:48:12 | 2018-12-22T21:48:12 | 162,839,358 | 1 | 1 |
BSD-3-Clause
| 2021-06-10T21:05:50 | 2018-12-22T20:01:52 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 1,680 |
py
|
import glob
from setuptools import setup, find_packages
with open('matlab_kernel/__init__.py', 'rb') as fid:
for line in fid:
line = line.decode('utf-8')
if line.startswith('__version__'):
version = line.strip().split()[-1][1:-1]
break
DISTNAME = 'matlab_kernel'
PACKAGE_DATA = {
DISTNAME: ['*.m'] + glob.glob('%s/**/*.*' % DISTNAME)
}
DATA_FILES = [
('share/jupyter/kernels/matlab', [
'%s/kernel.json' % DISTNAME
] + glob.glob('%s/images/*.png' % DISTNAME)
)
]
if __name__ == "__main__":
setup(name="matlab_kernel",
author="Steven Silvester, Antony Lee",
version=version,
url="https://github.com/Calysto/matlab_kernel",
license="BSD",
long_description=open("README.rst").read(),
classifiers=["Framework :: IPython",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: System :: Shells"],
packages=find_packages(include=["matlab_kernel", "matlab_kernel.*"]),
package_data=PACKAGE_DATA,
include_package_data=True,
data_files=DATA_FILES,
requires=["metakernel (>0.20.8)", "jupyter_client (>=4.4.0)",
"ipython (>=4.0.0)"],
install_requires=["metakernel>=0.20.8", "jupyter_client >=4.4.0",
"ipython>=4.0.0",
"backports.tempfile;python_version<'3.0'",
'wurlitzer>=1.0.2;platform_system!="Windows"']
)
|
[
"[email protected]"
] | |
7c7b6d5899ee3e4f388506f32f261fbed6508bac
|
3649308c5d709100c4dc90e661fc9f564f184877
|
/ocs/login/models.py
|
bc379435ce64eb699e183aa176c7f68a662e65a4
|
[] |
no_license
|
anirudhasj441/django
|
54171f6141d6938201146a6d3e9475477a3f0078
|
5bb202d13d4b17daca9aedf3b213908c3245757b
|
refs/heads/master
| 2021-07-09T06:18:11.597848 | 2021-03-07T17:58:32 | 2021-03-07T17:58:32 | 230,616,005 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,042 |
py
|
from django.db import models
from datetime import date
# Create your models here.
class Student(models.Model):
# s_id = models.AutoField(primary_key=True,default="1")
s_pnr = models.IntegerField(primary_key=True)
s_name = models.CharField(max_length=50)
s_dob = models.DateField(null=True,blank=True)
s_gender = models.CharField(max_length=50,default="")
s_passwd = models.CharField(max_length=300)
s_roll = models.IntegerField()
s_class = models.CharField(max_length=50)
s_contact = models.IntegerField()
s_email = models.EmailField()
def __str__(self):
return self.s_name
class Teacher(models.Model):
t_id = models.AutoField(primary_key=True)
tnr = models.IntegerField()
t_name = models.CharField(max_length=50)
t_dob = models.DateField(null=True,blank=True)
t_email = models.EmailField(default="")
t_cont = models.IntegerField(null=True)
t_passwd = models.CharField(max_length=300)
def __str__(self):
return self.t_name
|
[
"[email protected]"
] | |
210bc7bd0293918d3ca37014a57b68ebe2823f96
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03408/s214379251.py
|
fba6c07029d057c1512feb87f8d481f483ef4cb4
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 283 |
py
|
N = int(input())
ListP = []
for i in range (N):
ListP.append(input())
M = int(input())
ListN = []
for i in range (M):
ListN.append(input())
res = 0
mid = 0
for i in range(N):
mid += ListP.count(ListP[i])
mid += -ListN.count(ListP[i])
res = max(res,mid)
mid = 0
print(res)
|
[
"[email protected]"
] | |
07f87234adb59300c6bb17578632811553a04257
|
8cf633e92a0671c8201268620a0372f250c8aeb2
|
/205.同构字符串.py
|
f76217c78e58ac420845c37b25d7da82a86ce71d
|
[
"Unlicense"
] |
permissive
|
SprintGhost/LeetCode
|
76da5c785009d474542e5f2cdac275675b8e60b8
|
cdf1a86c83f2daedf674a871c4161da7e8fad17c
|
refs/heads/develop
| 2021-06-06T04:04:28.883692 | 2021-01-01T14:09:26 | 2021-01-01T14:09:26 | 230,635,046 | 0 | 0 |
Unlicense
| 2020-12-11T14:55:36 | 2019-12-28T16:34:39 |
Python
|
UTF-8
|
Python
| false | false | 1,636 |
py
|
#
# @lc app=leetcode.cn id=205 lang=python3
#
# [205] 同构字符串
#
# Accepted
# 30/30 cases passed (48 ms)
# Your runtime beats 55.2 % of python3 submissions
# Your memory usage beats 16.3 % of python3 submissions (14.1 MB)
# @lc code=start
class Solution:
def isIsomorphic(self, s: str, t: str) -> bool:
if (not s and t) or (not t and s):
return False
temp_s = dict()
list_s = list()
temp_t = dict()
list_t = list()
cs = 0
ct = 0
for index in range(0,len(s)):
if (s[index] in temp_s):
list_s.append(temp_s[s[index]])
else:
temp_s[s[index]] = cs
list_s.append(cs)
cs += 1
if (t[index] in temp_t):
list_t.append(temp_t[t[index]])
else:
temp_t[t[index]] = ct
list_t.append(ct)
ct += 1
if list_t[index] != list_s[index]:
return False
return True
# Accepted
# 30/30 cases passed (36 ms)
# Your runtime beats 93.12 % of python3 submissions
# Your memory usage beats 40.24 % of python3 submissions (13.7 MB)
class Solution:
def eigenValues(self, x):#
L, p, k = {}, 0, ''
for i in x:
if i not in L:
p += 1
k, L[i] = k+str(p), str(p)
else:
k += L[i]
return k
def isIsomorphic(self, s: str, t: str) -> bool:
return self.eigenValues(s) == self.eigenValues(t)
# A = Solution()
# print (A.isIsomorphic("aba", "baa"))
# @lc code=end
|
[
"[email protected]"
] | |
4f95ffbb37ddcbd7d1965a4ed8a986c5e52274fa
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_193/ch11_2019_08_22_19_37_28_357381.py
|
1ee4b70ca39ef725a52d1d1a1e107d4d7747a66f
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 115 |
py
|
def celsius_para_fahrenheit(x):
y = (x * 1.8) + 32
return y
a = 0
b = celsius_para_fahrenheit(a)
print(b)
|
[
"[email protected]"
] | |
554b02c0fd1b8bac352fe742a597f5be3d13b43d
|
8222dcbb226682a9112720927361877a92185407
|
/fluent_contents/plugins/sharedcontent/managers.py
|
7bd0a8f6915af76928eb41ced0dc3898c6d93cf6
|
[
"Apache-2.0"
] |
permissive
|
acolorbright/django-fluent-contents
|
ada4a5fedb590e5f679463221fce2f965730bac1
|
4e5c6e99134ceee804bb42391ec37e5e17ff5a7e
|
refs/heads/master
| 2023-04-12T05:31:19.179528 | 2018-05-14T11:10:16 | 2018-05-14T11:10:16 | 108,149,326 | 0 | 0 |
Apache-2.0
| 2023-04-04T00:22:27 | 2017-10-24T15:48:46 |
Python
|
UTF-8
|
Python
| false | false | 1,888 |
py
|
from django.conf import settings
from django.db.models import Q, Manager
from parler.managers import TranslatableQuerySet
from fluent_contents import appsettings
from fluent_contents.plugins.sharedcontent import appsettings as sharedcontent_appsettings
class SharedContentQuerySet(TranslatableQuerySet):
"""
The QuerySet for SharedContent models.
"""
def __init__(self, *args, **kwargs):
super(SharedContentQuerySet, self).__init__(*args, **kwargs)
self._parent_site = None
def _clone(self, klass=None, setup=False, **kw):
c = super(SharedContentQuerySet, self)._clone(klass, setup, **kw)
c._parent_site = self._parent_site
return c
def parent_site(self, site):
"""
Filter to the given site, only give content relevant for that site.
"""
# Avoid auto filter if site is already set.
self._parent_site = site
if sharedcontent_appsettings.FLUENT_SHARED_CONTENT_ENABLE_CROSS_SITE:
# Allow content to be shared between all sites:
return self.filter(Q(parent_site=site) | Q(is_cross_site=True))
else:
return self.filter(parent_site=site)
def _single_site(self):
"""
Make sure the queryset is filtered on a parent site, if that didn't happen already.
"""
if appsettings.FLUENT_CONTENTS_FILTER_SITE_ID and self._parent_site is None:
return self.parent_site(settings.SITE_ID)
else:
return self
def get_for_slug(self, slug):
"""
.. versionadded:: 1.0 Return the content for the given slug.
"""
return self._single_site().get(slug=slug)
class SharedContentManager(Manager.from_queryset(SharedContentQuerySet)):
"""
Extra methods attached to ``SharedContent.objects``, see :class:`SharedContentQuerySet`.
"""
pass
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.