blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8196a6d153f61f9ad7d3d169b3850fb382e2b167 | 6963f191a3574edcfaecc265a363bc10d4cdfc19 | /osf/management/commands/osf_shell.py | 11ed88684d009e3e84f839751c5ea9a4012a6410 | [
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-warranty-disclaimer",
"AGPL-3.0-only",
"LGPL-2.0-or-later",
"LicenseRef-scancode-proprietary-license",
"MPL-1.1",
"CPAL-1.0",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause",
"Apache-2.0"
] | permissive | rdm-dev12/RDM-osf.io | 8f3c2f7057b17512921292e84578d24ad4ca2bb5 | 14d9a924b8c6bc7d79fd34b87830ffa29acafed1 | refs/heads/timestamp-v18.2.7.file_upload_x_of_y | 2022-12-09T06:23:43.320341 | 2019-02-27T07:39:12 | 2019-02-27T07:39:12 | 172,862,723 | 0 | 0 | Apache-2.0 | 2022-09-16T17:58:51 | 2019-02-27T07:07:48 | Python | UTF-8 | Python | false | false | 7,816 | py | """Enhanced python shell.
Includes all features from django-extension's shell_plus command plus OSF-specific
niceties.
By default, sessions run in a transaction, so changes won't be commited until
you execute `commit()`.
All models are imported by default, as well as common OSF and Django objects.
To add more objects, set the `OSF_SHELL_USER_IMPORTS` Django setting
to a dictionary or a callable that returns a dictionary.
Example: ::
from django.apps import apps
def get_user_imports():
User = apps.get_model('osf.OSFUser')
Node = apps.get_model('osf.AbstractNode')
me = User.objects.get(username='[email protected]')
node = Node.objects.first()
return {
'me': me,
'node': node,
}
OSF_SHELL_USER_IMPORTS = get_user_imports
"""
from django.conf import settings
from django.db import transaction
from django.utils.termcolors import colorize
from django.db.models import Model
from django_extensions.management.commands import shell_plus
from django_extensions.management.utils import signalcommand
def header(text):
return colorize(text, fg='green', opts=('bold', ))
def format_imported_objects(models, osf, transaction, other, user):
def format_dict(d):
return ', '.join(sorted(d.keys()))
ret = """
{models_header}
{models}
{osf_header}
{osf}
{transaction_header}
{transaction}
{other_header}
{other}""".format(
models_header=header('Models:'),
models=format_dict(models),
osf_header=header('OSF:'),
osf=format_dict(osf),
transaction_header=header('Transaction:'),
transaction=format_dict(transaction),
other_header=header('Django:'),
other=format_dict(other),
)
if user:
ret += '\n\n{user_header}\n{user}'.format(
user_header=header('User Imports:'),
user=format_dict(user)
)
return ret
# kwargs will be the grouped imports, e.g. {'models': {...}, 'osf': {...}}
def make_banner(auto_transact=True, **kwargs):
logo = """
.+yhhys/`
`smmmmmmmmd:
`--.` ommmmmmmmmmm. `.--.
`odmmmmmh/ smmmhhyhdmmm- :ymmmmmdo.
-dmmmmmmmmmy .hho+++++sdo smmmmmmmmmm:
smmmmmmmmmmm: `++++++++: -mmmmmmmmmmmy
+mmmmmmmmmmmo: :+++++++.:+mmmmmmmmmmmo
+dmmmmmmmds++. .://:-``++odmmmmmmmmo
`:osyhys+++/ :+++oyhyso/`
`/shddds/``.-::-. `-::-.``/shdddy/`
-dmmmmmds++++/. ./++++sdmmmmmd:
hmmmmmmo+++++++. .++++++++dmmmmmd`
hmmmmmmo+++++++. .++++++++dmmmmmd`
-dmmmmmds++++/. ./++++sdmmmmmd:
`/shddhs/``.-::-. `-::-.``/shdddy/`
`:osyhys+++/ :+++oyhyso/`
+dmmmmmmmds++. .://:- `++odmmmmmmmmo
+mmmmmmmmmmmo: /++++++/`:+mmmmmmmmmmmo
smmmmmmmmmmm: `++++++++. -mmmmmmmmmmmy
-dmmmmmmmmmy `s++++++y/ smmmmmmmmmm:
`odmmmmmh/ hmmhyyhdmm/ :ymmmmmds.
`--.` `mmmmmmmmmmo `.--.
/mmmmmmmmh`
`+shhyo:
"""
greeting = 'Welcome to the OSF Shell. Happy hacking!'
imported_objects = format_imported_objects(**kwargs)
transaction_warning = """
*** TRANSACTION AUTOMATICALLY STARTED ***
To persist changes, run 'commit()'.
Keep in mind that changing documents will lock them.
This feature can be disabled with the '--no-transaction' flag."""
no_transaction_warning = """
*** AUTO-TRANSACTION DISABLED ***
All changes will persist. Transactions must be handled manually."""
template = """{logo}
{greeting}
{imported_objects}
{warning}
"""
if auto_transact:
warning = colorize(transaction_warning, fg='yellow')
else:
warning = colorize(no_transaction_warning, fg='red')
return template.format(
logo=colorize(logo, fg='cyan'),
greeting=colorize(greeting, opts=('bold', )),
imported_objects=imported_objects,
warning=warning,
)
class Command(shell_plus.Command):
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--no-transaction', action='store_false', dest='transaction',
help="Don't run session in transaction. Transactions must be "
'started manually with start_transaction()'
)
def get_osf_imports(self):
"""Return a dictionary of common OSF objects and utilities."""
from osf.management.utils import print_sql
from website import settings as website_settings
from framework.auth import Auth, get_user
ret = {
'print_sql': print_sql,
'Auth': Auth,
'get_user': get_user,
'website_settings': website_settings,
}
try: # faker isn't a prod requirement
from faker import Factory
except ImportError:
pass
else:
fake = Factory.create()
ret['fake'] = fake
return ret
def get_grouped_imports(self, options):
"""Return a dictionary of grouped import of the form:
{
'osf': {
'Auth': <framework.auth.Auth>,
....
}
'models': {...}
'transaction': {...}
'other': {...}
}
"""
auto_transact = options.get('transaction', True)
def start_transaction():
self.atomic.__enter__()
print('New transaction opened.')
def commit():
self.atomic.__exit__(None, None, None)
print('Transaction committed.')
if auto_transact:
start_transaction()
def rollback():
exc_type = RuntimeError
exc_value = exc_type('Transaction rollback')
self.atomic.__exit__(exc_type, exc_value, None)
print('Transaction rolled back.')
if auto_transact:
start_transaction()
groups = {
'models': {},
'other': {},
'osf': self.get_osf_imports(),
'transaction': {
'start_transaction': start_transaction,
'commit': commit,
'rollback': rollback,
},
'user': self.get_user_imports(),
}
# Import models and common django imports
shell_plus_imports = shell_plus.Command.get_imported_objects(self, options)
for name, object in shell_plus_imports.items():
if isinstance(object, type) and issubclass(object, Model):
groups['models'][name] = object
else:
groups['other'][name] = object
return groups
def get_user_imports(self):
imports = getattr(settings, 'OSF_SHELL_USER_IMPORTS', None)
if imports:
if callable(imports):
imports = imports()
return imports
else:
return {}
# Override shell_plus.Command
def get_imported_objects(self, options):
# Merge all the values of grouped_imports
imported_objects = {}
for imports in self.grouped_imports.values():
imported_objects.update(imports)
return imported_objects
# Override shell_plus.Command
@signalcommand
def handle(self, *args, **options):
self.atomic = transaction.atomic()
auto_transact = options.get('transaction', True)
options['quiet_load'] = True # Don't show default shell_plus banner
self.grouped_imports = self.get_grouped_imports(options)
banner = make_banner(auto_transact=auto_transact, **self.grouped_imports)
print(banner)
if auto_transact:
self.atomic.__enter__()
super(Command, self).handle(*args, **options)
| [
"[email protected]"
] | |
4b07d1427059017a5efe9aaa2f4d709d14931aa8 | d4ea1f9747799bf503523b86b8b5ee29bab65eff | /gyun/cli/iaas_client/actions/s2/modify_s2_shared_target_attributes.py | a1ea167b28dd9c16607678be1997a591d7b7c26d | [
"Apache-2.0"
] | permissive | gyun-gome/gyun-cli | 88b5493d90a19c5bf56a1bba4bf301d1b4a3156d | 275b6664335e2ef21a01a48f8c06d6a89dd63467 | refs/heads/master | 2021-06-28T13:53:01.300135 | 2017-09-13T04:44:01 | 2017-09-13T04:44:01 | 103,353,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,206 | py | # encoding: utf-8
# =========================================================================
# ©2017-2018 北京国美云服科技有限公司
# -------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from gyun.cli.misc.utils import explode_array
from gyun.cli.iaas_client.actions.base import BaseAction
class ModifyS2SharedTargetAttributesAction(BaseAction):
action = 'ModifyS2SharedTargetAttributes'
command = 'modify-s2-shared-target-attributes'
usage = '%(prog)s -s <shared_target> -o <operation> [-p <parameters> ...] [-f <conf_file>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument("-s", "--shared-target", dest="shared_target",
action="store", type=str, default=None,
help="the ID of shared target.")
parser.add_argument("-o", "--operation", dest="operation",
action="store", type=str, default=None,
help="valid values includes add, modify, delete, set.")
parser.add_argument("-p", "--parameters", dest="parameters",
action="store", type=str, default=None,
help="please refer http://docs.qc.gyun.com/api/s2/describle_s2_default_parameters.html")
parser.add_argument("-i", "--initiator-names", dest="initiator_names",
action="store", type=str, default=None,
help="client IQN.")
parser.add_argument("-S", "--s2-group", dest="s2_group",
action="store", type=str, default=None,
help="the ID of permission group.")
parser.add_argument("-n", "--export-name", dest="export_name",
action="store", type=str, default=None,
help="the name of shared target, available in vnas.")
@classmethod
def build_directive(cls, options):
for key in ['shared_target', 'operation']:
if not hasattr(options, key):
print("error: [%s] should be specified." % key)
return None
directive = {
"shared_target": options.shared_target,
"operation": options.operation,
"parameters": explode_array(options.parameters),
"initiator_names": explode_array(options.initiator_names),
"s2_group": options.s2_group,
"export_name": options.export_name,
}
return directive
| [
"[email protected]"
] | |
f686c14d3f3ccf88ac38fcd8a34d6d9f001befd4 | 3c0f50b6563e2c9c6306f7ca2216ff46c8250b96 | /address/migrations/0003_usuario.py | b33eb28800063dbfeb0a7fb4e8513ef46fb1f55c | [] | no_license | JoamirS/project-curriculo | 895e72b34a8a51478c3fe5958d509bfa89be761e | 490ed533dae740a7d2e1b652ce36fdb2af294eb3 | refs/heads/master | 2020-06-01T19:50:42.019259 | 2019-06-22T22:16:18 | 2019-06-22T22:16:18 | 190,904,296 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 576 | py | # Generated by Django 2.2.2 on 2019-06-12 17:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('address', '0002_auto_20190612_1127'),
]
operations = [
migrations.CreateModel(
name='Usuario',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.CharField(max_length=100)),
('senha', models.CharField(max_length=30)),
],
),
]
| [
"[email protected]"
] | |
bf604c5c88de4b1652ed6c32594f61c0e84a082f | b6a59c78b4143441077f9ce81c9a6951687f9103 | /quiz/common/templatetags/common_tags.py | c22c495f3760396d2cbf01c3943b9cb2026abee6 | [] | no_license | EkaterinaEIvanova/quiz | 7389bd26eb891ba5a7033b91698321cbba7d2d7d | 6f93a5d6e604f127be0d29e8eebbb07c10eb9d47 | refs/heads/master | 2023-03-22T00:54:27.100204 | 2021-03-10T07:35:08 | 2021-03-10T07:35:08 | 346,270,553 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | from django import template
register = template.Library()
@register.simple_tag()
def get_name_or_email(user):
name = user.name if user.name else user.email
return name
| [
"[email protected]"
] | |
cff93f064b230f06153b1a99ce69e4f99f7623ed | 82f5a3e139034da846db0c3516848e3a797a52f0 | /sixteen.py | 16eeac1b1be9cf18646774eb3f75c61a77c5b307 | [] | no_license | Yanl05/LeetCode-git | d98f807d05d80b7df6c0a4f69cf233e25b0695b5 | ce617247645517f15d513c29e12c7fff33e1cccf | refs/heads/master | 2020-04-12T08:33:30.416069 | 2018-12-19T06:07:36 | 2018-12-19T06:07:36 | 162,388,594 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,272 | py | '''
给定一个包括 n 个整数的数组 nums 和 一个目标值 target。
找出 nums 中的三个整数,使得它们的和与 target 最接近。返回这三个数的和。假定每组输入只存在唯一答案。
例如,给定数组 nums = [-1,2,1,-4], 和 target = 1.
与 target 最接近的三个数的和为 2. (-1 + 2 + 1 = 2).
'''
class Solution:
def threeSumClosest(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
nums.sort()
print(nums)
difmin = 9999999
ans = 0
lenn = len(nums)
for i in range(lenn - 2):
left = i + 1
right = lenn - 1
while left < right:
count = nums[i] + nums[left] + nums[right] -target
if count == 0:
return target
else:
dif = abs(count)
if dif <= difmin:
ans = count + target
difmin = dif
if count + target < target:
left += 1
else:
right -= 1
return ans
print(Solution().threeSumClosest([-1,2,1,-4], 1)) | [
"[email protected]"
] | |
8c4bc2f8647439b9567f65c61e77308b8808c395 | 63b0fed007d152fe5e96640b844081c07ca20a11 | /ABC/ABC200~ABC299/ABC275/a.py | 8e0374be934c1e0a79209fb5d3a1594e525966c5 | [] | no_license | Nikkuniku/AtcoderProgramming | 8ff54541c8e65d0c93ce42f3a98aec061adf2f05 | fbaf7b40084c52e35c803b6b03346f2a06fb5367 | refs/heads/master | 2023-08-21T10:20:43.520468 | 2023-08-12T09:53:07 | 2023-08-12T09:53:07 | 254,373,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 100 | py | n = int(input())
h = list(map(int, input().split()))
maxh = max(h)
ans = h.index(maxh)+1
print(ans)
| [
"[email protected]"
] | |
d4877edd8d5a2d480e47bd50800d5ab3ebf850c1 | 411e5de8629d6449ff9aad2eeb8bb1dbd5977768 | /AlgoExpert/array/largestRange/1.py | 821c57749e45fe5a0fdc1c4e4747e82157bdece3 | [
"MIT"
] | permissive | Muzque/Leetcode | cd22a8f5a17d9bdad48f8e2e4dba84051e2fb92b | 2c37b4426b7e8bfc1cd2a807240b0afab2051d03 | refs/heads/master | 2022-06-01T20:40:28.019107 | 2022-04-01T15:38:16 | 2022-04-01T15:39:24 | 129,880,002 | 1 | 1 | MIT | 2022-04-01T15:39:25 | 2018-04-17T09:28:02 | Python | UTF-8 | Python | false | false | 395 | py | def largestRange(array):
array.sort()
h = len(array)
mid = int(h/2) if h % 2 == 0 else int(h/2)+1
left, right = array[0], array[h-1]
for i in range(1, mid):
j = h - i - 1
if array[i] not in (array[i-1], array[i - 1]+1):
left = array[i]
if array[j] not in (array[j+1], array[j + 1]-1):
right = array[j]
return [left, right]
| [
"[email protected]"
] | |
df35d4e2bc4e83da4aa1b6939db8d9e229e0bd70 | 9edaf93c833ba90ae9a903aa3c44c407a7e55198 | /travelport/models/type_agency_profile_level_4.py | 5ee1ce876f63b649381647bc034c48d77dea4ecb | [] | no_license | tefra/xsdata-samples | c50aab4828b8c7c4448dbdab9c67d1ebc519e292 | ef027fe02e6a075d8ed676c86a80e9647d944571 | refs/heads/main | 2023-08-14T10:31:12.152696 | 2023-07-25T18:01:22 | 2023-07-25T18:01:22 | 222,543,692 | 6 | 1 | null | 2023-06-25T07:21:04 | 2019-11-18T21:00:37 | Python | UTF-8 | Python | false | false | 318 | py | from __future__ import annotations
from enum import Enum
__NAMESPACE__ = "http://www.travelport.com/schema/common_v37_0"
class TypeAgencyProfileLevel4(Enum):
"""
Profile levels in the Agency Hierarchy.
"""
AGENCY = "Agency"
BRANCH = "Branch"
BRANCH_GROUP = "BranchGroup"
AGENT = "Agent"
| [
"[email protected]"
] | |
6ffac5ea208ba2d6e273b1fdd1775d31f9762364 | 9eab77cb998e94ceb2b2d08738b05a98982505f1 | /sentiment-analysis/pythoncodes/01-text-to-id.py | 16b8e56535efcf07addf12250c40f7bd8382a0a7 | [] | no_license | behrouzmadahian/python | 1584dd13cde8531e69bb6fab76f148dc3fc0da57 | 5d4dbde8d570623fe785e78a3e45cd05ea80aa08 | refs/heads/master | 2021-06-28T16:53:09.927450 | 2020-09-21T14:02:55 | 2020-09-21T14:02:55 | 156,713,696 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 827 | py | import re
from nltk.corpus import stopwords
from nltk import word_tokenize
stop_words = stopwords.words('english')
def remove_stop_words(word_list, stopwords):
filtered_list = [w for w in word_list if not w in stopwords]
return filtered_list
# Removes punctuation, parentheses, question marks, etc., and leaves only alphanumeric characters
def clean_sentences(string):
strip_special_chars = re.compile("[^a-zA-Z0-9_]+")
string = string.lower().replace("<br />", " ")
return re.sub(strip_special_chars, "", string.lower())
def text_to_ids(text, vocab_list):
text_cleaned = clean_sentences(text)
word_list = word_tokenize(text_cleaned)
word_list = remove_stop_words(word_list, stop_words)
word_inds = [vocab_list.index(w) for w in word_list]
return word_inds
| [
"[email protected]"
] | |
225ceeb7e8183ff4fe55fd640c53ec2f3624a6c8 | 2bcc421ee345b00cf805c543b37d18b5d019dc04 | /adafruit-circuitpython-bundle-6.x-mpy-20201126/examples/matrixportal_simpletest.py | 6acecd14cfce626edae83fa7e034c7cadbe1bf85 | [] | no_license | saewoonam/sc-current-source-titano | 5a1ad46889c1b09c168424901fd71cb4eab5c61b | 1c136aa8b61268d9ac0b5a682b30ece70ab87663 | refs/heads/main | 2023-03-02T22:12:26.685537 | 2021-02-09T03:28:01 | 2021-02-09T03:28:01 | 317,299,900 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,528 | py | # SPDX-FileCopyrightText: 2020 Melissa LeBlanc-Williams, written for Adafruit Industries
#
# SPDX-License-Identifier: Unlicense
"""
This example checks the current Bitcoin price and displays it in the middle of the screen
"""
import time
import board
import terminalio
from adafruit_matrixportal.matrixportal import MatrixPortal
# You can display in 'GBP', 'EUR' or 'USD'
CURRENCY = "USD"
# Set up where we'll be fetching data from
DATA_SOURCE = "https://api.coindesk.com/v1/bpi/currentprice.json"
DATA_LOCATION = ["bpi", CURRENCY, "rate_float"]
def text_transform(val):
if CURRENCY == "USD":
return "$%d" % val
if CURRENCY == "EUR":
return "€%d" % val
if CURRENCY == "GBP":
return "£%d" % val
return "%d" % val
# the current working directory (where this file is)
cwd = ("/" + __file__).rsplit("/", 1)[0]
matrixportal = MatrixPortal(
url=DATA_SOURCE, json_path=DATA_LOCATION, status_neopixel=board.NEOPIXEL,
)
matrixportal.add_text(
text_font=terminalio.FONT,
text_position=(16, 16),
text_color=0xFFFFFF,
text_transform=text_transform,
)
matrixportal.preload_font(b"$012345789") # preload numbers
matrixportal.preload_font((0x00A3, 0x20AC)) # preload gbp/euro symbol
while True:
try:
value = matrixportal.fetch()
print("Response is", value)
except (ValueError, RuntimeError) as e:
print("Some error occured, retrying! -", e)
time.sleep(3 * 60) # wait 3 minutes
| [
"[email protected]"
] | |
62ba4516cecfc475a1122352092cb23d07a9500a | f1e19808b558ec4ff7c296b40ba08f1f7300e048 | /spotty/project_resources/stack.py | e91948662c95c9325d0eac30f2d3fa052f21fa61 | [
"MIT"
] | permissive | giserh/spotty | 300a6ec470f98319819cabfda0982ef3e46618ca | e12547ae32e39cdffa9c0187787ea7ca404090dd | refs/heads/master | 2020-04-11T18:57:54.218929 | 2018-12-14T00:18:06 | 2018-12-14T00:18:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,543 | py | import os
import yaml
from botocore.exceptions import EndpointConnectionError
from cfn_tools import CfnYamlLoader, CfnYamlDumper
from spotty.helpers.resources import get_snapshot, is_gpu_instance, stack_exists, get_volume, get_ami
from spotty.helpers.spot_prices import get_current_spot_price
from spotty.project_resources.key_pair import KeyPairResource
from spotty.utils import data_dir
class StackResource(object):
def __init__(self, cf, project_name: str, region: str):
self._cf = cf
self._project_name = project_name
self._region = region
self._stack_name = 'spotty-instance-%s' % project_name
@property
def name(self):
return self._stack_name
def stack_exists(self):
return stack_exists(self._cf, self._stack_name)
def get_stack_info(self):
try:
res = self._cf.describe_stacks(StackName=self._stack_name)
except EndpointConnectionError:
res = {}
return res['Stacks'][0]
def prepare_template(self, ec2, availability_zone: str, subnet_id: str, instance_type: str, volumes: list,
ports: list, max_price, on_demand, docker_commands):
"""Prepares CloudFormation template to run a Spot Instance."""
# read and update CF template
with open(data_dir('run_container.yaml')) as f:
template = yaml.load(f, Loader=CfnYamlLoader)
# ending letters for the devices (https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/device_naming.html)
device_letters = 'fghijklmnop'
# create and attach volumes
for i, volume in enumerate(volumes):
device_letter = device_letters[i]
volume_resources, volume_availability_zone = self._get_volume_resources(ec2, volume, device_letter)
# existing volume will be attached to the instance
if availability_zone and volume_availability_zone and (availability_zone != volume_availability_zone):
raise ValueError('The availability zone in the configuration file doesn\'t match the availability zone '
'of the existing volume or you have two existing volumes in different availability '
'zones.')
# update availability zone
if volume_availability_zone:
availability_zone = volume_availability_zone
# update template resources
template['Resources'].update(volume_resources)
# set availability zone
if availability_zone:
template['Resources']['SpotInstanceLaunchTemplate']['Properties']['LaunchTemplateData']['Placement'] = {
'AvailabilityZone': availability_zone,
}
# set subnet
if subnet_id:
template['Resources']['SpotInstanceLaunchTemplate']['Properties']['LaunchTemplateData']['NetworkInterfaces'] = [{
'SubnetId': subnet_id,
'DeviceIndex': 0,
'Groups': template['Resources']['SpotInstanceLaunchTemplate']['Properties']['LaunchTemplateData']['SecurityGroupIds'],
}]
del template['Resources']['SpotInstanceLaunchTemplate']['Properties']['LaunchTemplateData']['SecurityGroupIds']
# make sure that the lambda to update log group retention was called after
# the log group was created
template['Resources']['RenameSnapshotFunctionRetention']['DependsOn'] = [
resource_name for resource_name, resource in template['Resources'].items()
if resource['Type'] == 'Custom::SnapshotRenaming'
]
# delete calls of the SetLogsRetentionFunction lambda
if not template['Resources']['RenameSnapshotFunctionRetention']['DependsOn']:
del template['Resources']['RenameSnapshotFunctionRetention']
# make sure that the lambda to update log group retention was called after
# the log group was created
template['Resources']['DeleteSnapshotFunctionRetention']['DependsOn'] = [
resource_name for resource_name, resource in template['Resources'].items()
if resource['Type'] == 'Custom::SnapshotDeletion'
]
# delete calls of the SetLogsRetentionFunction lambda
if not template['Resources']['DeleteSnapshotFunctionRetention']['DependsOn']:
del template['Resources']['DeleteSnapshotFunctionRetention']
# TerminateInstanceFunction lambda should depend on all volume attachments
template['Resources']['TerminateInstance']['DependsOn'] = [
resource_name for resource_name, resource in template['Resources'].items()
if resource['Type'] == 'AWS::EC2::VolumeAttachment'
]
# add ports to the security group
for port in set(ports):
if port != 22:
template['Resources']['InstanceSecurityGroup']['Properties']['SecurityGroupIngress'] += [{
'CidrIp': '0.0.0.0/0',
'IpProtocol': 'tcp',
'FromPort': port,
'ToPort': port,
}, {
'CidrIpv6': '::/0',
'IpProtocol': 'tcp',
'FromPort': port,
'ToPort': port,
}]
# run on-demand instance
if on_demand:
del template['Resources']['SpotInstanceLaunchTemplate']['Properties']['LaunchTemplateData']['InstanceMarketOptions']
if max_price and not on_demand:
# check the maximum price
current_price = get_current_spot_price(ec2, instance_type, availability_zone)
if current_price > max_price:
raise ValueError('Current price for the instance (%.04f) is higher than the maximum price in the '
'configuration file (%.04f).' % (current_price, max_price))
# set maximum price
template['Resources']['SpotInstanceLaunchTemplate']['Properties']['LaunchTemplateData'] \
['InstanceMarketOptions']['SpotOptions']['MaxPrice'] = max_price
# set initial docker commands
if docker_commands:
template['Resources']['SpotInstanceLaunchTemplate']['Metadata']['AWS::CloudFormation::Init'] \
['docker_container_config']['files']['/tmp/docker/docker_commands.sh']['content'] = docker_commands
return yaml.dump(template, Dumper=CfnYamlDumper)
def create_stack(self, ec2, template: str, instance_profile_arn: str, instance_type: str, ami_name: str,
root_volume_size: int, mount_dirs: list, bucket_name: str, remote_project_dir: str,
project_name: str, project_dir: str, docker_config: dict):
"""Runs CloudFormation template."""
# get default VPC ID
res = ec2.describe_vpcs(Filters=[{'Name': 'isDefault', 'Values': ['true']}])
if not len(res['Vpcs']):
raise ValueError('Default VPC not found')
vpc_id = res['Vpcs'][0]['VpcId']
# get image info
ami_info = get_ami(ec2, ami_name)
if not ami_info:
raise ValueError('AMI with name "%s" not found.\n'
'Use "spotty create-ami" command to create an AMI with NVIDIA Docker.' % ami_name)
ami_id = ami_info['ImageId']
# check root volume size
image_volume_size = ami_info['BlockDeviceMappings'][0]['Ebs']['VolumeSize']
if root_volume_size and root_volume_size < image_volume_size:
raise ValueError('Root volume size cannot be less than the size of AMI (%dGB).' % image_volume_size)
elif not root_volume_size:
root_volume_size = image_volume_size + 5
# create key pair
project_key = KeyPairResource(ec2, self._project_name, self._region)
key_name = project_key.create_key()
# working directory for the Docker container
working_dir = docker_config['workingDir']
if not working_dir:
working_dir = remote_project_dir
# get the Dockerfile path and the build's context path
dockerfile_path = docker_config.get('file', '')
docker_context_path = ''
if dockerfile_path:
if not os.path.isfile(os.path.join(project_dir, dockerfile_path)):
raise ValueError('File "%s" doesn\'t exist.' % dockerfile_path)
dockerfile_path = remote_project_dir + '/' + dockerfile_path
docker_context_path = os.path.dirname(dockerfile_path)
# create stack
params = {
'VpcId': vpc_id,
'InstanceProfileArn': instance_profile_arn,
'InstanceType': instance_type,
'KeyName': key_name,
'ImageId': ami_id,
'RootVolumeSize': str(root_volume_size),
'VolumeMountDirectories': ('"%s"' % '" "'.join(mount_dirs)) if mount_dirs else '',
'DockerDataRootDirectory': docker_config['dataRoot'],
'DockerImage': docker_config.get('image', ''),
'DockerfilePath': dockerfile_path,
'DockerBuildContextPath': docker_context_path,
'DockerNvidiaRuntime': 'true' if is_gpu_instance(instance_type) else 'false',
'DockerWorkingDirectory': working_dir,
'InstanceNameTag': project_name,
'ProjectS3Bucket': bucket_name,
'ProjectDirectory': remote_project_dir,
}
res = self._cf.create_stack(
StackName=self._stack_name,
TemplateBody=template,
Parameters=[{'ParameterKey': key, 'ParameterValue': value} for key, value in params.items()],
Capabilities=['CAPABILITY_IAM'],
OnFailure='DO_NOTHING',
)
return res
def delete_stack(self):
self._cf.delete_stack(StackName=self._stack_name)
@staticmethod
def _get_volume_resources(ec2, volume: dict, device_letter: str):
resources = {}
availability_zone = ''
# VolumeAttachment resource
attachment_resource_name = 'VolumeAttachment' + device_letter.upper()
attachment_resource = {
'Type': 'AWS::EC2::VolumeAttachment',
'Properties': {
'Device': '/dev/sd' + device_letter,
'InstanceId': {'Ref': 'SpotInstance'},
},
}
volume_name = volume['name']
volume_size = volume['size']
deletion_policy = volume['deletionPolicy']
# check that the volume name is specified
if not volume_name and deletion_policy != 'delete':
raise ValueError('Volume name is required if the deletion policy isn\'t set to "delete".')
volume_info = get_volume(ec2, volume_name) if volume_name else {}
if volume_info:
# set availability zone
availability_zone = volume_info['AvailabilityZone']
# set volume ID for the VolumeAttachment resource
attachment_resource['Properties']['VolumeId'] = volume_info['VolumeId']
# check size of the volume
if volume_size and (volume_size != volume_info['Size']):
raise ValueError('Specified size for the "%s" volume (%dGB) doesn\'t match the size of the '
'existing volume (%dGB).' % (volume_name, volume_size, volume_info['Size']))
else:
# new volume will be created
volume_resource_name = 'Volume' + device_letter.upper()
volume_resource = {
'Type': 'AWS::EC2::Volume',
'Properties': {
'AvailabilityZone': {'Fn::GetAtt': ['SpotInstance', 'AvailabilityZone']},
},
}
# update VolumeAttachment resource with the reference to new volume
attachment_resource['Properties']['VolumeId'] = {'Ref': volume_resource_name}
# check if a snapshot with the specified name exists
snapshot_info = get_snapshot(ec2, volume_name) if volume_name else {}
if snapshot_info:
# volume will be restored from the snapshot
# check size of the volume
if volume_size and (volume_size < snapshot_info['VolumeSize']):
raise ValueError('Specified size for the "%s" volume (%dGB) is less than size of the '
'snapshot (%dGB).'
% (volume_name, volume_size, snapshot_info['VolumeSize']))
# set snapshot ID
orig_snapshot_id = snapshot_info['SnapshotId']
volume_resource['Properties']['SnapshotId'] = orig_snapshot_id
# rename or delete the original snapshot on stack deletion
if deletion_policy == 'create_snapshot':
# rename the original snapshot once new snapshot is created
s_renaming_resource_name = 'RenameSnapshot' + device_letter.upper()
resources[s_renaming_resource_name] = {
'Type': 'Custom::SnapshotRenaming',
'Properties': {
'ServiceToken': {'Fn::GetAtt': ['RenameSnapshotFunction', 'Arn']},
'SnapshotId': orig_snapshot_id,
},
}
volume_resource['DependsOn'] = s_renaming_resource_name
elif deletion_policy == 'update_snapshot':
# delete the original snapshot once new snapshot is created
s_deletion_resource_name = 'DeleteSnapshot' + device_letter.upper()
resources[s_deletion_resource_name] = {
'Type': 'Custom::SnapshotDeletion',
'Properties': {
'ServiceToken': {'Fn::GetAtt': ['DeleteSnapshotFunction', 'Arn']},
'SnapshotId': orig_snapshot_id,
},
}
volume_resource['DependsOn'] = s_deletion_resource_name
else:
# empty volume will be created, check that the size is specified
if not volume_size:
raise ValueError('Size for the new volume is required.')
# set size of the volume
if volume_size:
volume_resource['Properties']['Size'] = volume_size
# set the Name tag for new volume (it's the future snapshot name as well)
if volume_name:
volume_resource['Properties']['Tags'] = [{'Key': 'Name', 'Value': volume_name}]
if deletion_policy in ['create_snapshot', 'update_snapshot']:
# create snapshots on termination
volume_resource['DeletionPolicy'] = 'Snapshot'
elif deletion_policy == 'retain':
# retain the volume on termination
volume_resource['DeletionPolicy'] = 'Retain'
elif deletion_policy == 'delete':
# delete the volume on termination
volume_resource['DeletionPolicy'] = 'Delete'
else:
raise ValueError('Unsupported deletion policy: "%s".' % deletion_policy)
# update resources
resources[volume_resource_name] = volume_resource
# update resources
resources[attachment_resource_name] = attachment_resource
return resources, availability_zone
| [
"[email protected]"
] | |
6ba923e2897f1f34a8d9fefb279295ca6d447ad8 | 3c88b31090d6568435e811a455ce934604fa5c9f | /category/migrations/0004_auto_20210604_1515.py | 1598b851990f23fb79ef630c2f5f22ca87780b4e | [] | no_license | kamran1231/Great-Cart-Django-Website | 09e0e7b5085737cf54614b45b5424ac5c273bb5b | a674593d5c8cb15be7b24dca397f9027659033e2 | refs/heads/main | 2023-05-17T08:52:31.092404 | 2021-06-09T20:21:08 | 2021-06-09T20:21:08 | 373,645,947 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 453 | py | # Generated by Django 3.1.7 on 2021-06-04 09:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('category', '0003_alter_category_category_name'),
]
operations = [
migrations.AlterField(
model_name='category',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
| [
"[email protected]"
] | |
d8985677a59029b17e03e42069b38812e14ecf8d | 743d58c35caf21568feddc86946bbee340174721 | /leet_code/labuladong/stack/p0739_Daily_Temperatures.py | 0d1b05a124037d6c13f214f35b0a1ee5f41b145f | [] | no_license | klgentle/lc_python | 38009ed82614c8f21ca9af6e3779a2e0898af09f | aabe56e690b40e4b93afef99bfe46d9a06e20cea | refs/heads/master | 2022-12-02T05:23:55.585659 | 2022-08-07T12:11:38 | 2022-08-07T12:11:38 | 176,750,473 | 2 | 0 | null | 2022-11-15T23:42:06 | 2019-03-20T14:21:51 | Python | UTF-8 | Python | false | false | 926 | py | """
739. Daily Temperatures
Medium
Given a list of daily temperatures T, return a list such that, for each day in the input, tells you how many days you would have to wait until a warmer temperature. If there is no future day for which this is possible, put 0 instead.
For example, given the list of temperatures T = [73, 74, 75, 71, 69, 72, 76, 73], your output should be [1, 1, 4, 2, 1, 1, 0, 0].
Note: The length of temperatures will be in the range [1, 30000]. Each temperature will be an integer in the range [30, 100].
"""
class Solution:
def dailyTemperatures(self, T: List[int]) -> List[int]:
res = [0] * len(T)
stack = []
for i in range(len(T)-1, -1, -1):
while stack and T[stack[-1]] <= T[i]:
stack.pop()
if stack:
res[i] = stack[-1] - i
stack.append(i)
#print(f"stack:{stack}")
return res
| [
"[email protected]"
] | |
ffd932dbd780505eb4bef606f414e3d7a4c848cc | fa93e53a9eee6cb476b8998d62067fce2fbcea13 | /build/position_controllers/catkin_generated/pkg.installspace.context.pc.py | 23b00e2290c58c2e5784fc5a4572705354fb4fd1 | [] | no_license | oyetripathi/ROS_conclusion_project | 2947ee2f575ddf05480dabc69cf8af3c2df53f73 | 01e71350437d57d8112b6cec298f89fc8291fb5f | refs/heads/master | 2023-06-30T00:38:29.711137 | 2021-08-05T09:17:54 | 2021-08-05T09:17:54 | 392,716,311 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 518 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "${prefix}/include".split(';') if "${prefix}/include" != "" else []
PROJECT_CATKIN_DEPENDS = "controller_interface;forward_command_controller".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lposition_controllers".split(';') if "-lposition_controllers" != "" else []
PROJECT_NAME = "position_controllers"
PROJECT_SPACE_DIR = "/home/sandeepan/tiago_public_ws/install"
PROJECT_VERSION = "0.4.2"
| [
"[email protected]"
] | |
aa6977b0b274bab8863a388a9723f9b4e5b84d81 | c74c907a32da37d333096e08d2beebea7bea65e7 | /kaikeba/image caption/image_captioning/image_captioning/models.py | 0ee1e2560a93401662355584208c4c607a7e887f | [] | no_license | wangqiang79/learn | 6b37cc41140cc2200d928f3717cfc72357d10d54 | e4b949a236fa52de0e199c69941bcbedd2c26897 | refs/heads/master | 2022-12-25T06:24:39.163061 | 2020-07-13T15:43:13 | 2020-07-13T15:43:13 | 231,796,188 | 2 | 2 | null | 2022-12-08T07:03:05 | 2020-01-04T16:45:33 | Jupyter Notebook | UTF-8 | Python | false | false | 9,252 | py | import torch
from torch import nn
import torchvision
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class Encoder(nn.Module):
"""
Encoder.
"""
def __init__(self, encoded_image_size=14):
super(Encoder, self).__init__()
self.enc_image_size = encoded_image_size
vgg = torchvision.models.vgg16(pretrained=True) # pretrained VGG16
# Remove linear and pool layers (since we're not doing classification)
modules = list(vgg.children())[:-2]
self.vgg = nn.Sequential(*modules)
# Resize image to fixed size to allow input images of variable size
self.adaptive_pool = nn.AdaptiveAvgPool2d((encoded_image_size, encoded_image_size))
self.fine_tune()
def forward(self, images):
"""
Forward propagation.
:param images: images, a tensor of dimensions (batch_size, 3, image_size, image_size)
:return: encoded images
"""
out = self.vgg(images) # (batch_size, 512, image_size/32, image_size/32)
out = self.adaptive_pool(out) # (batch_size, 512, encoded_image_size, encoded_image_size)
out = out.permute(0, 2, 3, 1) # (batch_size, encoded_image_size, encoded_image_size, 512)
return out
def fine_tune(self, fine_tune=True):
"""
:param fine_tune: Allow?
"""
for p in self.vgg.parameters():
p.requires_grad = False
for c in list(self.vgg.children())[0:-2]:
for p in c.parameters():
p.requires_grad = fine_tune
class Attention(nn.Module):
"""
Attention Network.
"""
def __init__(self, encoder_dim, decoder_dim, attention_dim):
"""
:param encoder_dim: feature size of encoded images
:param decoder_dim: size of decoder's RNN
:param attention_dim: size of the attention network
"""
super(Attention, self).__init__()
self.encoder_att = nn.Linear(encoder_dim, attention_dim) # linear layer to transform encoded image
self.decoder_att = nn.Linear(decoder_dim, attention_dim) # linear layer to transform decoder's output
self.full_att = nn.Linear(attention_dim, 1) # linear layer to calculate values to be softmax-ed
self.relu = nn.ReLU()
self.softmax = nn.Softmax(dim=1) # softmax layer to calculate weights
def forward(self, encoder_out, decoder_hidden):
"""
Forward propagation.
:param encoder_out: encoded images, a tensor of dimension (batch_size, num_pixels, encoder_dim)
:param decoder_hidden: previous decoder output, a tensor of dimension (batch_size, decoder_dim)
:return: attention weighted encoding, weights
"""
att1 = self.encoder_att(encoder_out) # (batch_size, num_pixels, attention_dim)
att2 = self.decoder_att(decoder_hidden) # (batch_size, attention_dim)
att = self.full_att(self.relu(att1 + att2.unsqueeze(1))).squeeze(2) # (batch_size, num_pixels)
alpha = self.softmax(att) # (batch_size, num_pixels)
attention_weighted_encoding = (encoder_out * alpha.unsqueeze(2)).sum(dim=1) # (batch_size, encoder_dim)
return attention_weighted_encoding, alpha
class DecoderWithAttention(nn.Module):
"""
Decoder.
"""
def __init__(self, attention_dim, embed_dim, decoder_dim, vocab_size, encoder_dim=512, dropout=0.5):
"""
:param attention_dim: size of attention network
:param embed_dim: embedding size
:param decoder_dim: size of decoder's RNN
:param vocab_size: size of vocabulary
:param encoder_dim: feature size of encoded images
:param dropout: dropout
"""
super(DecoderWithAttention, self).__init__()
self.encoder_dim = encoder_dim
self.attention_dim = attention_dim
self.embed_dim = embed_dim
self.decoder_dim = decoder_dim
self.vocab_size = vocab_size
self.dropout = dropout
self.attention = Attention(encoder_dim, decoder_dim, attention_dim) # attention network
self.embedding = nn.Embedding(vocab_size, embed_dim) # embedding layer
self.dropout = nn.Dropout(p=self.dropout)
self.decode_step = nn.LSTMCell(embed_dim + encoder_dim, decoder_dim, bias=True) # decoding LSTMCell
self.init_h = nn.Linear(encoder_dim, decoder_dim) # linear layer to find initial hidden state of LSTMCell
self.init_c = nn.Linear(encoder_dim, decoder_dim) # linear layer to find initial cell state of LSTMCell
self.f_beta = nn.Linear(decoder_dim, encoder_dim) # linear layer to create a sigmoid-activated gate
self.sigmoid = nn.Sigmoid()
self.fc = nn.Linear(decoder_dim, vocab_size) # linear layer to find scores over vocabulary
self.init_weights() # initialize some layers with the uniform distribution
def init_weights(self):
"""
Initializes some parameters with values from the uniform distribution, for easier convergence.
"""
self.embedding.weight.data.uniform_(-0.1, 0.1)
self.fc.bias.data.fill_(0)
self.fc.weight.data.uniform_(-0.1, 0.1)
def load_pretrained_embeddings(self, embeddings):
"""
Loads embedding layer with pre-trained embeddings.
:param embeddings: pre-trained embeddings
"""
self.embedding.weight = nn.Parameter(embeddings)
def fine_tune_embeddings(self, fine_tune=True):
"""
Allow fine-tuning of embedding layer? (Only makes sense to not-allow if using pre-trained embeddings).
:param fine_tune: Allow?
"""
for p in self.embedding.parameters():
p.requires_grad = fine_tune
def init_hidden_state(self, encoder_out):
"""
Creates the initial hidden and cell states for the decoder's LSTM based on the encoded images.
:param encoder_out: encoded images, a tensor of dimension (batch_size, num_pixels, encoder_dim)
:return: hidden state, cell state
"""
mean_encoder_out = encoder_out.mean(dim=1)
h = self.init_h(mean_encoder_out) # (batch_size, decoder_dim)
c = self.init_c(mean_encoder_out)
return h, c
def forward(self, encoder_out, encoded_captions, caption_lengths):
"""
Forward propagation.
:param encoder_out: encoded images, a tensor of dimension (batch_size, enc_image_size, enc_image_size, encoder_dim)
:param encoded_captions: encoded captions, a tensor of dimension (batch_size, max_caption_length)
:param caption_lengths: caption lengths, a tensor of dimension (batch_size, 1)
:return: scores for vocabulary, sorted encoded captions, decode lengths, weights, sort indices
"""
batch_size = encoder_out.size(0)
encoder_dim = encoder_out.size(-1)
vocab_size = self.vocab_size
# Flatten image
encoder_out = encoder_out.view(batch_size, -1, encoder_dim) # (batch_size, num_pixels, encoder_dim)
num_pixels = encoder_out.size(1)
# Sort input data by decreasing lengths; why? apparent below
caption_lengths, sort_ind = caption_lengths.squeeze(1).sort(dim=0, descending=True)
encoder_out = encoder_out[sort_ind]
encoded_captions = encoded_captions[sort_ind]
# Embedding
embeddings = self.embedding(encoded_captions) # (batch_size, max_caption_length, embed_dim)
# Initialize LSTM state
h, c = self.init_hidden_state(encoder_out) # (batch_size, decoder_dim)
# We won't decode at the <end> position, since we've finished generating as soon as we generate <end>
# So, decoding lengths are actual lengths - 1
decode_lengths = (caption_lengths - 1).tolist()
# Create tensors to hold word predicion scores and alphas
predictions = torch.zeros(batch_size, max(decode_lengths), vocab_size).to(device)
alphas = torch.zeros(batch_size, max(decode_lengths), num_pixels).to(device)
# At each time-step, decode by
# attention-weighing the encoder's output based on the decoder's previous hidden state output
# then generate a new word in the decoder with the previous word and the attention weighted encoding
for t in range(max(decode_lengths)):
batch_size_t = sum([l > t for l in decode_lengths])
attention_weighted_encoding, alpha = self.attention(encoder_out[:batch_size_t],
h[:batch_size_t])
gate = self.sigmoid(self.f_beta(h[:batch_size_t])) # gating scalar, (batch_size_t, encoder_dim)
attention_weighted_encoding = gate * attention_weighted_encoding
h, c = self.decode_step(
torch.cat([embeddings[:batch_size_t, t, :], attention_weighted_encoding], dim=1),
(h[:batch_size_t], c[:batch_size_t])) # (batch_size_t, decoder_dim)
preds = self.fc(self.dropout(h)) # (batch_size_t, vocab_size)
predictions[:batch_size_t, t, :] = preds
alphas[:batch_size_t, t, :] = alpha
return predictions, encoded_captions, decode_lengths, alphas, sort_ind
| [
"[email protected]"
] | |
430b886607c68f95ee1443b58e22c10b06ca0c36 | b2135e3fc77666f043f0fbafd0d88ed9865d5b4f | /7183 Python Basics/32 Chapter 6 - About Properties/07 test_validation3/78794_01_code.py | 2f8efd74a7afa4db194872d5c8b652ef492fbd27 | [] | no_license | Felienne/spea | 164d05e9fbba82c7b7df8d00295f7157054f9248 | ecb06c66aaf6a2dced3f141ca415be9efb7dbff5 | refs/heads/master | 2020-03-17T17:35:27.302219 | 2018-05-17T10:14:49 | 2018-05-17T10:14:49 | 133,794,299 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 568 | py | #
class AboutAssignments(unittest.TestCase):
class Doctor:
def __init__(self):
self._age = 903
@property
def age(self):
return self._age
@age.setter
def age(self, value):
if value < self.age:
pass
# nice try! you can't get any younger
else:
self._age = value
def test_validation3(self):
jodie = self.Doctor()
self.assertEqual(903, jodie.age)
jodie.age += 9
self.assertEqual(__, jodie.age) | [
"[email protected]"
] | |
7b14e461e9ba7105b24ef8d77b490e8ec0419f57 | c0239d75a8199ec84ad683f945c21785c1b59386 | /dingtalk/api/rest/OapiChatTagDeleteRequest.py | 2292ed627d4873421afe37fd82864be50c362d9b | [] | no_license | luss613/oauth_dingtalk | 9f253a75ce914c577dbabfb84e97fd883e80e04b | 1e2554642d2b16c642a031670d08efa4a74e8252 | refs/heads/master | 2023-04-23T01:16:33.450821 | 2020-06-18T08:22:57 | 2020-06-18T08:22:57 | 264,966,287 | 1 | 1 | null | 2020-06-18T08:31:24 | 2020-05-18T14:33:25 | Python | UTF-8 | Python | false | false | 348 | py | '''
Created by auto_sdk on 2019.10.31
'''
from dingtalk.api.base import RestApi
class OapiChatTagDeleteRequest(RestApi):
def __init__(self,url=None):
RestApi.__init__(self,url)
self.chatid = None
self.group_tag = None
def getHttpMethod(self):
return 'POST'
def getapiname(self):
return 'dingtalk.oapi.chat.tag.delete'
| [
"[email protected]"
] | |
187ba8799480652d89c93f0faa7a2c97b7f99b6a | d61f7eda203a336868c010abb8f9a6f45dd51adb | /497. Random Point in Non-overlapping Rectangles.py | 01542c98bf043ff665c52427319b5c46b11bdf49 | [] | no_license | Mschikay/leetcode | b91df914afc728c2ae1a13d3994568bb6c1dcffb | 7c5e5fe76cee542f67cd7dd3a389470b02597548 | refs/heads/master | 2020-04-17T12:11:38.810325 | 2019-10-06T02:37:32 | 2019-10-06T02:37:32 | 166,570,922 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 761 | py | class Solution:
def __init__(self, rects: List[List[int]]):
self.rects = rects
self.prefix = [0]
for x1, y1, x2, y2 in rects:
self.prefix.append((x2 - x1 + 1) * (y2 - y1 + 1) + self.prefix[-1])
def pick(self) -> List[int]:
num = random.randint(0, self.prefix[-1])
l, h = 0, len(self.prefix) - 1
while l <= h:
m = (l + h) // 2
if self.prefix[m] < num:
l = m + 1
else:
h = m - 1
x1, y1, x2, y2 = self.rects[l - 1]
x = random.randint(x1, x2)
y = random.randint(y1, y2)
return [x, y]
# Your Solution object will be instantiated and called as such:
# obj = Solution(rects)
# param_1 = obj.pick() | [
"[email protected]"
] | |
ca6d981f70f9f5e2d0d59bf1cec839c78115a1ef | a1bc22600af8889ea1b96b102a021a4a360654d5 | /restapi/libs/ConnectionManager.py | d0eddd35935d2ac3a2b6e9da225c5336c50530e1 | [] | no_license | IndominusByte/hydro-tech-backend | 940e32f3d4981ec92f78c7efb2f11add0fa17bf5 | ac1ae3f05eb0b5a2c9da80560e7ee5e66e52848f | refs/heads/main | 2023-05-04T14:11:12.267438 | 2021-05-30T01:08:30 | 2021-05-30T01:08:30 | 371,831,757 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,048 | py | import logging, json
from fastapi import WebSocket
from fastapi_jwt_auth import AuthJWT
from fastapi_jwt_auth.exceptions import MissingTokenError
from controllers.ChatController import ChatCrud, ChatFetch
from controllers.UserController import UserFetch
from controllers.ReportController import ReportCrud
from controllers.SettingUserController import SettingUserFetch
from controllers.AlertController import AlertCrud, AlertLogic
from schemas.chats.ChatSchema import ChatData
from schemas.reports.ReportSchema import ReportCreate
from schemas.dashboard.DashboardSchema import (
DashboardSetValueServo,
DashboardSetValueHydro,
DashboardHydroData,
)
from pydantic import ValidationError
from user_agents import parse
from typing import List, Union
from config import settings
from redis import Redis
logger = logging.getLogger("uvicorn.info")
class ConnectionManager:
async def connect(
self,
websocket: WebSocket,
authorize: AuthJWT,
csrf_token: str = None,
token: str = None
):
await websocket.accept()
# user authentication
if csrf_token:
authorize.jwt_required("websocket",websocket=websocket,csrf_token=csrf_token) # check user login
decode_token = authorize.get_raw_jwt()
elif token:
authorize.jwt_required("websocket",token=token) # check IoT device login
decode_token = authorize.get_raw_jwt(token)
else:
raise MissingTokenError(status_code=1008,message="Missing access token from Query or Path")
# set state to websocket
user_agent = websocket.headers.get('user-agent')
if user_agent != 'arduino-WebSocket-Client':
websocket.state.type = "user"
websocket.state.device = str(parse(user_agent))
else:
websocket.state.type = "IoT"
websocket.state.device = user_agent
websocket.state.ip = websocket.client.host
websocket.state.user_id = decode_token['sub']
# remove all duplicate connection
for connection in self.active_connections:
if self.check_duplicate_connection(connection,websocket) is True:
await self.disconnect(connection,'duplicate')
self.active_connections.append(websocket)
def check_duplicate_connection(self, connection: WebSocket, websocket: WebSocket) -> bool:
return connection.state.type == websocket.state.type and \
connection.state.device == websocket.state.device and \
connection.state.ip == websocket.state.ip and \
connection.state.user_id == websocket.state.user_id
async def send_data(self, kind: str, connection: WebSocket, data: Union[str, bytes]) -> None:
try:
if kind == 'text': await connection.send_text(data)
if kind == 'bytes': await connection.send_bytes(data)
except RuntimeError:
await self.disconnect(connection,'invalid_data')
async def disconnect(self, websocket: WebSocket, msg: str):
try:
logger.info(f'{tuple(websocket.client)} - "WebSocket {websocket.url.path}" [disconnect-{msg}]')
self.active_connections.remove(websocket)
await websocket.close()
except ValueError:
pass
class ConnectionDashboard(ConnectionManager):
def __init__(self):
self.active_connections: List[WebSocket] = []
def set_type_device(self, kind: str, websocket: WebSocket) -> None:
if kind == "Hydro": websocket.state.type = "IoT:Hydro"
if kind == "Servo": websocket.state.type = "IoT:Servo"
if kind == "Camera": websocket.state.type = "IoT:Camera"
async def broadcast(self, msg_data: str, websocket: WebSocket, redis_conn: Redis) -> None:
try:
msg_data = dict(i.split(':') for i in msg_data.rstrip().split(','))
web_data = ",".join([":".join([key, str(val)]) for key, val in msg_data.items()])
# set type device IoT when make connection
self.set_type_device(msg_data['kind'], websocket)
# save data from hydro to db
if msg_data['kind'] == 'Hydro':
try:
m = ReportCreate(**msg_data)
user_id = int(websocket.state.user_id)
setting_user = await SettingUserFetch.filter_by_user_id(user_id) # get setting user
if setting_user is not None and setting_user['planted'] is True:
# create alert
if m.tank <= 50:
msg = f"Water remaining {m.tank}%"
if await AlertLogic.check_alert(user_id,'water_tank',msg) is False:
await AlertCrud.create_alert(**{
'type': 'water_tank',
'message': msg,
'user_id': user_id
})
if m.temp <= 15 or m.temp >= 30:
msg = "Oh no, your water temperature is abnormal," + \
f" water temperature right now {m.temp}°C"
if await AlertLogic.check_alert(user_id,'water_temp',msg) is False:
await AlertCrud.create_alert(**{
'type': 'water_temp',
'message': msg,
'user_id': user_id
})
# create report
await ReportCrud.create_report(**m.dict(),user_id=user_id)
except ValidationError:
await self.disconnect(websocket,'validation')
for connection in self.active_connections:
# send data web to camera for capture image & streaming
if (
msg_data['kind'] in ['live_cam_true', 'live_cam_false'] and
connection.state.type == "IoT:Camera" and
connection.state.user_id == websocket.state.user_id
):
await self.send_data('text', connection, web_data)
# send data web to camera for image calibration
if (
msg_data['kind'] in ['image_cal_true','image_cal_false'] and
connection.state.type == "IoT:Camera" and
connection.state.user_id == websocket.state.user_id
):
if msg_data['kind'] == 'image_cal_true':
redis_conn.set(f"camera_cal:{connection.state.user_id}","true",settings.image_cal_expires)
await self.send_data('text', connection, 'kind:capture_image')
if msg_data['kind'] == 'image_cal_false':
redis_conn.set(f"camera_cal:{connection.state.user_id}","false",settings.image_cal_expires)
# send data setting servo to Servo IoT
if (
msg_data['kind'] == 'set_value_servo' and
connection.state.type == "IoT:Servo" and
connection.state.user_id == websocket.state.user_id
):
# validation incoming data from user
try:
DashboardSetValueServo(**msg_data)
await self.send_data('text', connection, web_data)
except ValidationError:
await self.disconnect(websocket,'validation')
# send data hydro to user
if (
msg_data['kind'] == 'Hydro' and
connection.state.type == "user" and
connection.state.user_id == websocket.state.user_id
):
# validation incoming data from user
try:
DashboardHydroData(**msg_data)
await self.send_data('text', connection, web_data)
except ValidationError:
await self.disconnect(websocket,'validation')
# send data setting user to Hydro IoT
if (
msg_data['kind'] == 'set_hydro' and
connection.state.type == "IoT:Hydro" and
connection.state.user_id == websocket.state.user_id
):
# validation incoming data from user
try:
DashboardSetValueHydro(**msg_data)
await self.send_data('text', connection, web_data)
except ValidationError:
await self.disconnect(websocket,'validation')
except ValueError:
pass
async def streaming(self, stream: bytes, websocket: WebSocket) -> None:
# send data streaming to user and not device IoT
for connection in self.active_connections:
if (
connection.state.type == "user" and
connection.state.user_id == websocket.state.user_id
):
await self.send_data('bytes', connection, stream)
async def reset_servo(self) -> None:
for connection in self.active_connections:
if connection.state.type == "IoT:Servo":
user_id = int(connection.state.user_id)
setting_user = await SettingUserFetch.filter_by_user_id(user_id)
await self.send_data(
'text',
connection,
f"kind:set_value_servo,sh:{setting_user['servo_horizontal']},sv:{setting_user['servo_vertical']}"
)
async def capture_image(self) -> None:
for connection in self.active_connections:
if connection.state.type == "IoT:Camera":
await self.send_data('text', connection, 'kind:capture_image')
class ConnectionChat(ConnectionManager):
def __init__(self):
self.active_connections: List[WebSocket] = []
async def connect(
self,
websocket: WebSocket,
authorize: AuthJWT,
csrf_token: str = None,
):
await ConnectionManager.connect(self,websocket,authorize,csrf_token=csrf_token)
await self.list_user_status()
async def broadcast(self, msg_data: str, websocket: WebSocket) -> None:
if msg_data != "kind:get_list_user_status":
# save message to database
chat_id = await ChatCrud.create_chat(message=msg_data,user_id=int(websocket.state.user_id))
chat_db = await ChatFetch.filter_by_id(chat_id)
msg = ChatData(**{index:value for index,value in chat_db.items()}).dict()
# send message to all user
[
await self.send_data('text', connection, json.dumps(msg,default=str))
for connection in self.active_connections
]
else: await self.list_user_status()
async def list_user_status(self):
user_all = await UserFetch.get_user_id()
user_connection = [int(connection.state.user_id) for connection in self.active_connections]
online_user = [str(x) for x in user_all if x in user_connection]
offline_user = [str(x) for x in user_all if x not in user_connection]
total_online = len(online_user)
total_offline = len(offline_user)
msg = {
'total_online': total_online,
'total_offline': total_offline,
'online_user': online_user,
'offline_user': offline_user
}
[
await self.send_data('text', connection, json.dumps(msg,default=str))
for connection in self.active_connections
]
async def disconnect(self, websocket: WebSocket, msg: str):
await ConnectionManager.disconnect(self,websocket,msg)
await self.list_user_status()
| [
"[email protected]"
] | |
3d6198b0abdc87164e432fd09c0390ecba72de19 | de1abd0ebbb817aa5f23d369e7dda360fd6f1c32 | /chapter8/7-NltkAnalysis.py | 486c02f2c7559694ee722504c06720e50861ed6a | [] | no_license | CodedQuen/Web-Scraping-with-Python- | 33aaa2e3733aa1f2b8c7a533d74f5d08ac868197 | 67f2d5f57726d5a943f5f044480e68c36076965b | refs/heads/master | 2022-06-13T01:34:39.764531 | 2020-05-05T11:07:01 | 2020-05-05T11:07:01 | 261,435,932 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | from nltk import word_tokenize, sent_tokenize, pos_tag
sentences = sent_tokenize("Google is one of the best companies in the world. I constantly google myself to see what I'm up to.")
nouns = ['NN', 'NNS', 'NNP', 'NNPS']
for sentence in sentences:
if "google" in sentence.lower():
taggedWords = pos_tag(word_tokenize(sentence))
for word in taggedWords:
if word[0].lower() == "google" and word[1] in nouns:
print(sentence)
| [
"[email protected]"
] | |
48351d6d1b511a8717bd34a114b6e54683357290 | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/acllog/flowcounteraghist1d.py | 2e2a886e4137ca0fffa75a3d90db0646a85fbed6 | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,371 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class FlowCounterAgHist1d(Mo):
"""
A class that represents historical aggregated statistics for Flow Record Counter in a 1 day sampling interval. This class updates every hour.
"""
meta = StatsClassMeta("cobra.model.acllog.FlowCounterAgHist1d", "Flow Record Counter")
counter = CounterMeta("hitscount", CounterCategory.COUNTER, "hits", "Hits Counter")
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "hitscountCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "hitscountPer"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "hitscountSpct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "hitscountThr"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "hitscountTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "hitscountRate"
meta._counters.append(counter)
meta.moClassName = "acllogFlowCounterAgHist1d"
meta.rnFormat = "HDacllogFlowCounterAg1d-%(index)s"
meta.category = MoCategory.STATS_HISTORY
meta.label = "historical aggregated Flow Record Counter stats in 1 day"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.parentClasses.add("cobra.model.acllog.PermitL3Pkt")
meta.parentClasses.add("cobra.model.acllog.DropL2Pkt")
meta.parentClasses.add("cobra.model.acllog.DropL2Flow")
meta.parentClasses.add("cobra.model.acllog.DropL3Pkt")
meta.parentClasses.add("cobra.model.acllog.DropL3Flow")
meta.parentClasses.add("cobra.model.acllog.PermitL2Flow")
meta.parentClasses.add("cobra.model.acllog.PermitL3Flow")
meta.parentClasses.add("cobra.model.acllog.PermitL2Pkt")
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.stats.Hist")
meta.superClasses.add("cobra.model.acllog.FlowCounterAgHist")
meta.rnPrefixes = [
('HDacllogFlowCounterAg1d-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "hitscountCum", "hitscountCum", 25142, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "Hits Counter cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("hitscountCum", prop)
prop = PropMeta("str", "hitscountPer", "hitscountPer", 25143, PropCategory.IMPLICIT_PERIODIC)
prop.label = "Hits Counter periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("hitscountPer", prop)
prop = PropMeta("str", "hitscountRate", "hitscountRate", 25147, PropCategory.IMPLICIT_RATE)
prop.label = "Hits Counter rate"
prop.isOper = True
prop.isStats = True
meta.props.add("hitscountRate", prop)
prop = PropMeta("str", "hitscountSpct", "hitscountSpct", 25144, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Hits Counter suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("hitscountSpct", prop)
prop = PropMeta("str", "hitscountThr", "hitscountThr", 25145, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Hits Counter thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("hitscountThr", prop)
prop = PropMeta("str", "hitscountTr", "hitscountTr", 25146, PropCategory.IMPLICIT_TREND)
prop.label = "Hits Counter trend"
prop.isOper = True
prop.isStats = True
meta.props.add("hitscountTr", prop)
prop = PropMeta("str", "index", "index", 25066, PropCategory.REGULAR)
prop.label = "History Index"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("index", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
meta.namingProps.append(getattr(meta.props, "index"))
def __init__(self, parentMoOrDn, index, markDirty=True, **creationProps):
namingVals = [index]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
2e5b24c6d45a87fa2c12e35d5b432da39edabc4a | 82ebcc53d1c3e80b8960520f934004b945c84f70 | /gumi/models/__init__.py | 66dce650fdf02fea6bbc93b79f43d1dcb08f0817 | [
"MIT"
] | permissive | kumasento/gconv-prune | 404fa15c508be017faf56a0e27edc5410fa459d9 | f81c417d3754102c902bd153809130e12607bd7d | refs/heads/main | 2021-10-06T12:54:49.626589 | 2021-10-04T16:13:59 | 2021-10-04T16:13:59 | 204,706,641 | 10 | 2 | null | null | null | null | UTF-8 | Python | false | false | 117 | py | from .resnet import *
from .preresnet import *
from .densenet import *
from .vgg import *
from .condensenet import *
| [
"[email protected]"
] | |
b3c22a904dac91d8b29c6d27d6ce97e5e99f49d8 | a034d4ba39789e4a351112c46dd04a38180cd06c | /appengine/monorail/framework/sql.py | 41fb66b26a0bd748c5788f67fa37cb9b6da157a5 | [
"BSD-3-Clause"
] | permissive | asdfghjjklllllaaa/infra | 050ad249ab44f264b4e2080aa9537ce74aafb022 | 8f63af54e46194cd29291813f2790ff6e986804d | refs/heads/master | 2023-01-10T21:55:44.811835 | 2019-07-01T14:03:32 | 2019-07-01T14:03:32 | 194,691,941 | 1 | 0 | BSD-3-Clause | 2023-01-07T07:12:37 | 2019-07-01T14:45:29 | Python | UTF-8 | Python | false | false | 37,456 | py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""A set of classes for interacting with tables in SQL."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import logging
import random
import re
import sys
import time
import settings
if not settings.unit_test_mode:
import MySQLdb
from framework import exceptions
from framework import framework_helpers
from infra_libs import ts_mon
from Queue import Queue
class ConnectionPool(object):
"""Manage a set of database connections such that they may be re-used.
"""
def __init__(self, poolsize=1):
self.poolsize = poolsize
self.queues = {}
@framework_helpers.retry(3, delay=0.1, backoff=2)
def get(self, instance, database):
"""Retun a database connection, or throw an exception if none can
be made.
"""
key = instance + '/' + database
if not key in self.queues:
queue = Queue(self.poolsize)
self.queues[key] = queue
queue = self.queues[key]
if queue.empty():
cnxn = cnxn_ctor(instance, database)
else:
cnxn = queue.get()
# Make sure the connection is still good.
cnxn.ping()
cnxn.commit()
return cnxn
def release(self, cnxn):
if not cnxn.pool_key in self.queues:
raise BaseException('unknown pool key: %s' % cnxn.pool_key)
q = self.queues[cnxn.pool_key]
if q.full():
cnxn.close()
else:
q.put(cnxn)
@framework_helpers.retry(2, delay=1, backoff=2)
def cnxn_ctor(instance, database):
logging.info('About to connect to SQL instance %r db %r', instance, database)
if settings.unit_test_mode:
raise ValueError('unit tests should not need real database connections')
try:
if settings.local_mode:
start_time = time.time()
cnxn = MySQLdb.connect(
host='127.0.0.1', port=3306, db=database, user='root', charset='utf8')
else:
start_time = time.time()
cnxn = MySQLdb.connect(
unix_socket='/cloudsql/' + instance, db=database, user='root',
charset='utf8')
duration = int((time.time() - start_time) * 1000)
DB_CNXN_LATENCY.add(duration)
CONNECTION_COUNT.increment({'success': True})
except MySQLdb.OperationalError:
CONNECTION_COUNT.increment({'success': False})
raise
cnxn.pool_key = instance + '/' + database
cnxn.is_bad = False
return cnxn
# One connection pool per database instance (master, replicas are each an
# instance). We'll have four connections per instance because we fetch
# issue comments, stars, spam verdicts and spam verdict history in parallel
# with promises.
cnxn_pool = ConnectionPool(settings.db_cnxn_pool_size)
# MonorailConnection maintains a dictionary of connections to SQL databases.
# Each is identified by an int shard ID.
# And there is one connection to the master DB identified by key MASTER_CNXN.
MASTER_CNXN = 'master_cnxn'
CONNECTION_COUNT = ts_mon.CounterMetric(
'monorail/sql/connection_count',
'Count of connections made to the SQL database.',
[ts_mon.BooleanField('success')])
DB_CNXN_LATENCY = ts_mon.CumulativeDistributionMetric(
'monorail/sql/db_cnxn_latency',
'Time needed to establish a DB connection.',
None)
DB_QUERY_LATENCY = ts_mon.CumulativeDistributionMetric(
'monorail/sql/db_query_latency',
'Time needed to make a DB query.',
[ts_mon.StringField('type')])
DB_COMMIT_LATENCY = ts_mon.CumulativeDistributionMetric(
'monorail/sql/db_commit_latency',
'Time needed to make a DB commit.',
None)
DB_ROLLBACK_LATENCY = ts_mon.CumulativeDistributionMetric(
'monorail/sql/db_rollback_latency',
'Time needed to make a DB rollback.',
None)
DB_RETRY_COUNT = ts_mon.CounterMetric(
'monorail/sql/db_retry_count',
'Count of queries retried.',
None)
DB_QUERY_COUNT = ts_mon.CounterMetric(
'monorail/sql/db_query_count',
'Count of queries sent to the DB.',
[ts_mon.StringField('type')])
DB_COMMIT_COUNT = ts_mon.CounterMetric(
'monorail/sql/db_commit_count',
'Count of commits sent to the DB.',
None)
DB_ROLLBACK_COUNT = ts_mon.CounterMetric(
'monorail/sql/db_rollback_count',
'Count of rollbacks sent to the DB.',
None)
DB_RESULT_ROWS = ts_mon.CumulativeDistributionMetric(
'monorail/sql/db_result_rows',
'Number of results returned by a DB query.',
None)
def RandomShardID():
"""Return a random shard ID to load balance across replicas."""
return random.randint(0, settings.num_logical_shards - 1)
class MonorailConnection(object):
"""Create and manage connections to the SQL servers.
We only store connections in the context of a single user request, not
across user requests. The main purpose of this class is to make using
sharded tables easier.
"""
def __init__(self):
self.sql_cnxns = {} # {MASTER_CNXN: cnxn, shard_id: cnxn, ...}
def GetMasterConnection(self):
"""Return a connection to the master SQL DB."""
if MASTER_CNXN not in self.sql_cnxns:
self.sql_cnxns[MASTER_CNXN] = cnxn_pool.get(
settings.db_instance, settings.db_database_name)
logging.info(
'created a master connection %r', self.sql_cnxns[MASTER_CNXN])
return self.sql_cnxns[MASTER_CNXN]
def GetConnectionForShard(self, shard_id):
"""Return a connection to the DB replica that will be used for shard_id."""
if shard_id not in self.sql_cnxns:
physical_shard_id = shard_id % settings.num_logical_shards
replica_name = settings.db_replica_names[
physical_shard_id % len(settings.db_replica_names)]
shard_instance_name = (
settings.physical_db_name_format % replica_name)
self.sql_cnxns[shard_id] = cnxn_pool.get(
shard_instance_name, settings.db_database_name)
logging.info('created a replica connection for shard %d', shard_id)
return self.sql_cnxns[shard_id]
def Execute(self, stmt_str, stmt_args, shard_id=None, commit=True, retries=1):
"""Execute the given SQL statement on one of the relevant databases."""
if shard_id is None:
# No shard was specified, so hit the master.
sql_cnxn = self.GetMasterConnection()
else:
sql_cnxn = self.GetConnectionForShard(shard_id)
try:
return self._ExecuteWithSQLConnection(
sql_cnxn, stmt_str, stmt_args, commit=commit)
except MySQLdb.OperationalError as e:
logging.exception(e)
logging.info('retries: %r', retries)
if retries > 0:
DB_RETRY_COUNT.increment()
self.sql_cnxns = {} # Drop all old mysql connections and make new.
return self.Execute(
stmt_str, stmt_args, shard_id=shard_id, commit=commit,
retries=retries - 1)
else:
raise e
def _ExecuteWithSQLConnection(
self, sql_cnxn, stmt_str, stmt_args, commit=True):
"""Execute a statement on the given database and return a cursor."""
if stmt_str.startswith('INSERT') or stmt_str.startswith('REPLACE'):
logging.info('SQL stmt_str: \n%s', stmt_str)
logging.info('SQL stmt_args: %r', stmt_args)
else:
logging.info('SQL stmt: \n%s', (stmt_str % tuple(stmt_args)))
start_time = time.time()
cursor = sql_cnxn.cursor()
cursor.execute('SET NAMES utf8mb4')
logging.info('made cursor on %r in %d ms',
sql_cnxn, int((time.time() - start_time) * 1000))
if stmt_str.startswith('INSERT') or stmt_str.startswith('REPLACE'):
cursor.executemany(stmt_str, stmt_args)
duration = (time.time() - start_time) * 1000
DB_QUERY_LATENCY.add(duration, {'type': 'write'})
DB_QUERY_COUNT.increment({'type': 'write'})
else:
cursor.execute(stmt_str, args=stmt_args)
duration = (time.time() - start_time) * 1000
DB_QUERY_LATENCY.add(duration, {'type': 'read'})
DB_QUERY_COUNT.increment({'type': 'read'})
DB_RESULT_ROWS.add(cursor.rowcount)
logging.info('%d rows in %d ms', cursor.rowcount,
int(duration))
if commit and not stmt_str.startswith('SELECT'):
try:
sql_cnxn.commit()
duration = (time.time() - start_time) * 1000
DB_COMMIT_LATENCY.add(duration)
DB_COMMIT_COUNT.increment()
except MySQLdb.DatabaseError:
sql_cnxn.rollback()
duration = (time.time() - start_time) * 1000
DB_ROLLBACK_LATENCY.add(duration)
DB_ROLLBACK_COUNT.increment()
return cursor
def Commit(self):
"""Explicitly commit any pending txns. Normally done automatically."""
sql_cnxn = self.GetMasterConnection()
try:
sql_cnxn.commit()
except MySQLdb.DatabaseError:
logging.exception('Commit failed for cnxn, rolling back')
sql_cnxn.rollback()
def Close(self):
"""Safely close any connections that are still open."""
for sql_cnxn in self.sql_cnxns.values():
try:
sql_cnxn.rollback() # Abandon any uncommitted changes.
cnxn_pool.release(sql_cnxn)
except MySQLdb.DatabaseError:
# This might happen if the cnxn is somehow already closed.
logging.exception('ProgrammingError when trying to close cnxn')
class SQLTableManager(object):
"""Helper class to make it easier to deal with an SQL table."""
def __init__(self, table_name):
self.table_name = table_name
def Select(
self, cnxn, distinct=False, cols=None, left_joins=None,
joins=None, where=None, or_where_conds=False, group_by=None,
order_by=None, limit=None, offset=None, shard_id=None, use_clause=None,
having=None, **kwargs):
"""Compose and execute an SQL SELECT statement on this table.
Args:
cnxn: MonorailConnection to the databases.
distinct: If True, add DISTINCT keyword.
cols: List of columns to retrieve, defaults to '*'.
left_joins: List of LEFT JOIN (str, args) pairs.
joins: List of regular JOIN (str, args) pairs.
where: List of (str, args) for WHERE clause.
or_where_conds: Set to True to use OR in the WHERE conds.
group_by: List of strings for GROUP BY clause.
order_by: List of (str, args) for ORDER BY clause.
limit: Optional LIMIT on the number of rows returned.
offset: Optional OFFSET when using LIMIT.
shard_id: Int ID of the shard to query.
use_clause: Optional string USE clause to tell the DB which index to use.
having: List of (str, args) for Optional HAVING clause
**kwargs: WHERE-clause equality and set-membership conditions.
Keyword args are used to build up more WHERE conditions that compare
column values to constants. Key word Argument foo='bar' translates to 'foo
= "bar"', and foo=[3, 4, 5] translates to 'foo IN (3, 4, 5)'.
Returns:
A list of rows, each row is a tuple of values for the requested cols.
"""
cols = cols or ['*'] # If columns not specified, retrieve all columns.
stmt = Statement.MakeSelect(
self.table_name, cols, distinct=distinct,
or_where_conds=or_where_conds)
if use_clause:
stmt.AddUseClause(use_clause)
if having:
stmt.AddHavingTerms(having)
stmt.AddJoinClauses(left_joins or [], left=True)
stmt.AddJoinClauses(joins or [])
stmt.AddWhereTerms(where or [], **kwargs)
stmt.AddGroupByTerms(group_by or [])
stmt.AddOrderByTerms(order_by or [])
stmt.SetLimitAndOffset(limit, offset)
stmt_str, stmt_args = stmt.Generate()
cursor = cnxn.Execute(stmt_str, stmt_args, shard_id=shard_id)
rows = cursor.fetchall()
cursor.close()
return rows
def SelectRow(
self, cnxn, cols=None, default=None, where=None, **kwargs):
"""Run a query that is expected to return just one row."""
rows = self.Select(cnxn, distinct=True, cols=cols, where=where, **kwargs)
if len(rows) == 1:
return rows[0]
elif not rows:
logging.info('SelectRow got 0 results, so using default %r', default)
return default
else:
raise ValueError('SelectRow got %d results, expected only 1', len(rows))
def SelectValue(self, cnxn, col, default=None, where=None, **kwargs):
"""Run a query that is expected to return just one row w/ one value."""
row = self.SelectRow(
cnxn, cols=[col], default=[default], where=where, **kwargs)
return row[0]
def InsertRows(
self, cnxn, cols, row_values, replace=False, ignore=False,
commit=True, return_generated_ids=False):
"""Insert all the given rows.
Args:
cnxn: MonorailConnection object.
cols: List of column names to set.
row_values: List of lists with values to store. The length of each
nested list should be equal to len(cols).
replace: Set to True if inserted values should replace existing DB rows
that have the same DB keys.
ignore: Set to True to ignore rows that would duplicate existing DB keys.
commit: Set to False if this operation is part of a series of operations
that should not be committed until the final one is done.
return_generated_ids: Set to True to return a list of generated
autoincrement IDs for inserted rows. This requires us to insert rows
one at a time.
Returns:
If return_generated_ids is set to True, this method returns a list of the
auto-increment IDs generated by the DB. Otherwise, [] is returned.
"""
if not row_values:
return None # Nothing to insert
generated_ids = []
if return_generated_ids:
# We must insert the rows one-at-a-time to know the generated IDs.
for row_value in row_values:
stmt = Statement.MakeInsert(
self.table_name, cols, [row_value], replace=replace, ignore=ignore)
stmt_str, stmt_args = stmt.Generate()
cursor = cnxn.Execute(stmt_str, stmt_args, commit=commit)
if cursor.lastrowid:
generated_ids.append(cursor.lastrowid)
cursor.close()
return generated_ids
stmt = Statement.MakeInsert(
self.table_name, cols, row_values, replace=replace, ignore=ignore)
stmt_str, stmt_args = stmt.Generate()
cnxn.Execute(stmt_str, stmt_args, commit=commit)
return []
def InsertRow(
self, cnxn, replace=False, ignore=False, commit=True, **kwargs):
"""Insert a single row into the table.
Args:
cnxn: MonorailConnection object.
replace: Set to True if inserted values should replace existing DB rows
that have the same DB keys.
ignore: Set to True to ignore rows that would duplicate existing DB keys.
commit: Set to False if this operation is part of a series of operations
that should not be committed until the final one is done.
**kwargs: column=value assignments to specify what to store in the DB.
Returns:
The generated autoincrement ID of the key column if one was generated.
Otherwise, return None.
"""
cols = sorted(kwargs.keys())
row = tuple(kwargs[col] for col in cols)
generated_ids = self.InsertRows(
cnxn, cols, [row], replace=replace, ignore=ignore,
commit=commit, return_generated_ids=True)
if generated_ids:
return generated_ids[0]
else:
return None
def Update(self, cnxn, delta, where=None, commit=True, limit=None, **kwargs):
"""Update one or more rows.
Args:
cnxn: MonorailConnection object.
delta: Dictionary of {column: new_value} assignments.
where: Optional list of WHERE conditions saying which rows to update.
commit: Set to False if this operation is part of a series of operations
that should not be committed until the final one is done.
limit: Optional LIMIT on the number of rows updated.
**kwargs: WHERE-clause equality and set-membership conditions.
Returns:
Int number of rows updated.
"""
if not delta:
return 0 # Nothing is being changed
stmt = Statement.MakeUpdate(self.table_name, delta)
stmt.AddWhereTerms(where, **kwargs)
stmt.SetLimitAndOffset(limit, None)
stmt_str, stmt_args = stmt.Generate()
cursor = cnxn.Execute(stmt_str, stmt_args, commit=commit)
result = cursor.rowcount
cursor.close()
return result
def IncrementCounterValue(self, cnxn, col_name, where=None, **kwargs):
"""Atomically increment a counter stored in MySQL, return new value.
Args:
cnxn: MonorailConnection object.
col_name: int column to increment.
where: Optional list of WHERE conditions saying which rows to update.
**kwargs: WHERE-clause equality and set-membership conditions. The
where and kwargs together should narrow the update down to exactly
one row.
Returns:
The new, post-increment value of the counter.
"""
stmt = Statement.MakeIncrement(self.table_name, col_name)
stmt.AddWhereTerms(where, **kwargs)
stmt_str, stmt_args = stmt.Generate()
cursor = cnxn.Execute(stmt_str, stmt_args)
assert cursor.rowcount == 1, (
'missing or ambiguous counter: %r' % cursor.rowcount)
result = cursor.lastrowid
cursor.close()
return result
def Delete(self, cnxn, where=None, or_where_conds=False, commit=True,
limit=None, **kwargs):
"""Delete the specified table rows.
Args:
cnxn: MonorailConnection object.
where: Optional list of WHERE conditions saying which rows to update.
or_where_conds: Set to True to use OR in the WHERE conds.
commit: Set to False if this operation is part of a series of operations
that should not be committed until the final one is done.
limit: Optional LIMIT on the number of rows deleted.
**kwargs: WHERE-clause equality and set-membership conditions.
Returns:
Int number of rows updated.
"""
# Deleting the whole table is never intended in Monorail.
assert where or kwargs
stmt = Statement.MakeDelete(self.table_name, or_where_conds=or_where_conds)
stmt.AddWhereTerms(where, **kwargs)
stmt.SetLimitAndOffset(limit, None)
stmt_str, stmt_args = stmt.Generate()
cursor = cnxn.Execute(stmt_str, stmt_args, commit=commit)
result = cursor.rowcount
cursor.close()
return result
class Statement(object):
"""A class to help build complex SQL statements w/ full escaping.
Start with a Make*() method, then fill in additional clauses as needed,
then call Generate() to return the SQL string and argument list. We pass
the string and args to MySQLdb separately so that it can do escaping on
the arg values as appropriate to prevent SQL-injection attacks.
The only values that are not escaped by MySQLdb are the table names
and column names, and bits of SQL syntax, all of which is hard-coded
in our application.
"""
@classmethod
def MakeSelect(cls, table_name, cols, distinct=False, or_where_conds=False):
"""Constuct a SELECT statement."""
assert _IsValidTableName(table_name)
assert all(_IsValidColumnName(col) for col in cols)
main_clause = 'SELECT%s %s FROM %s' % (
(' DISTINCT' if distinct else ''), ', '.join(cols), table_name)
return cls(main_clause, or_where_conds=or_where_conds)
@classmethod
def MakeInsert(
cls, table_name, cols, new_values, replace=False, ignore=False):
"""Constuct an INSERT statement."""
if replace == True:
return cls.MakeReplace(table_name, cols, new_values, ignore)
assert _IsValidTableName(table_name)
assert all(_IsValidColumnName(col) for col in cols)
ignore_word = ' IGNORE' if ignore else ''
main_clause = 'INSERT%s INTO %s (%s)' % (
ignore_word, table_name, ', '.join(cols))
return cls(main_clause, insert_args=new_values)
@classmethod
def MakeReplace(
cls, table_name, cols, new_values, ignore=False):
"""Construct an INSERT...ON DUPLICATE KEY UPDATE... statement.
Uses the INSERT/UPDATE syntax because REPLACE is literally a DELETE
followed by an INSERT, which doesn't play well with foreign keys.
INSERT/UPDATE is an atomic check of whether the primary key exists,
followed by an INSERT if it doesn't or an UPDATE if it does.
"""
assert _IsValidTableName(table_name)
assert all(_IsValidColumnName(col) for col in cols)
ignore_word = ' IGNORE' if ignore else ''
main_clause = 'INSERT%s INTO %s (%s)' % (
ignore_word, table_name, ', '.join(cols))
return cls(main_clause, insert_args=new_values, duplicate_update_cols=cols)
@classmethod
def MakeUpdate(cls, table_name, delta):
"""Constuct an UPDATE statement."""
assert _IsValidTableName(table_name)
assert all(_IsValidColumnName(col) for col in delta.keys())
update_strs = []
update_args = []
for col, val in delta.items():
update_strs.append(col + '=%s')
update_args.append(val)
main_clause = 'UPDATE %s SET %s' % (
table_name, ', '.join(update_strs))
return cls(main_clause, update_args=update_args)
@classmethod
def MakeIncrement(cls, table_name, col_name, step=1):
"""Constuct an UPDATE statement that increments and returns a counter."""
assert _IsValidTableName(table_name)
assert _IsValidColumnName(col_name)
main_clause = (
'UPDATE %s SET %s = LAST_INSERT_ID(%s + %%s)' % (
table_name, col_name, col_name))
update_args = [step]
return cls(main_clause, update_args=update_args)
@classmethod
def MakeDelete(cls, table_name, or_where_conds=False):
"""Constuct a DELETE statement."""
assert _IsValidTableName(table_name)
main_clause = 'DELETE FROM %s' % table_name
return cls(main_clause, or_where_conds=or_where_conds)
def __init__(
self, main_clause, insert_args=None, update_args=None,
duplicate_update_cols=None, or_where_conds=False):
self.main_clause = main_clause # E.g., SELECT or DELETE
self.or_where_conds = or_where_conds
self.insert_args = insert_args or [] # For INSERT statements
for row_value in self.insert_args:
if not all(_IsValidDBValue(val) for val in row_value):
raise exceptions.InputException('Invalid DB value %r' % (row_value,))
self.update_args = update_args or [] # For UPDATEs
for val in self.update_args:
if not _IsValidDBValue(val):
raise exceptions.InputException('Invalid DB value %r' % val)
self.duplicate_update_cols = duplicate_update_cols or [] # For REPLACE-ish
self.use_clauses = []
self.join_clauses, self.join_args = [], []
self.where_conds, self.where_args = [], []
self.having_conds, self.having_args = [], []
self.group_by_terms, self.group_by_args = [], []
self.order_by_terms, self.order_by_args = [], []
self.limit, self.offset = None, None
def Generate(self):
"""Return an SQL string having %s placeholders and args to fill them in."""
clauses = [self.main_clause] + self.use_clauses + self.join_clauses
if self.where_conds:
if self.or_where_conds:
clauses.append('WHERE ' + '\n OR '.join(self.where_conds))
else:
clauses.append('WHERE ' + '\n AND '.join(self.where_conds))
if self.group_by_terms:
clauses.append('GROUP BY ' + ', '.join(self.group_by_terms))
if self.having_conds:
assert self.group_by_terms
clauses.append('HAVING %s' % ','.join(self.having_conds))
if self.order_by_terms:
clauses.append('ORDER BY ' + ', '.join(self.order_by_terms))
if self.limit and self.offset:
clauses.append('LIMIT %d OFFSET %d' % (self.limit, self.offset))
elif self.limit:
clauses.append('LIMIT %d' % self.limit)
elif self.offset:
clauses.append('LIMIT %d OFFSET %d' % (sys.maxint, self.offset))
if self.insert_args:
clauses.append('VALUES (' + PlaceHolders(self.insert_args[0]) + ')')
args = self.insert_args
if self.duplicate_update_cols:
clauses.append('ON DUPLICATE KEY UPDATE %s' % (
', '.join(['%s=VALUES(%s)' % (col, col)
for col in self.duplicate_update_cols])))
assert not (self.join_args + self.update_args + self.where_args +
self.group_by_args + self.order_by_args + self.having_args)
else:
args = (self.join_args + self.update_args + self.where_args +
self.group_by_args + self.having_args + self.order_by_args)
assert not (self.insert_args + self.duplicate_update_cols)
args = _BoolsToInts(args)
stmt_str = '\n'.join(clause for clause in clauses if clause)
assert _IsValidStatement(stmt_str), stmt_str
return stmt_str, args
def AddUseClause(self, use_clause):
"""Add a USE clause (giving the DB a hint about which indexes to use)."""
assert _IsValidUseClause(use_clause), use_clause
self.use_clauses.append(use_clause)
def AddJoinClauses(self, join_pairs, left=False):
"""Save JOIN clauses based on the given list of join conditions."""
for join, args in join_pairs:
assert _IsValidJoin(join), join
assert join.count('%s') == len(args), join
self.join_clauses.append(
' %sJOIN %s' % (('LEFT ' if left else ''), join))
self.join_args.extend(args)
def AddGroupByTerms(self, group_by_term_list):
"""Save info needed to generate the GROUP BY clause."""
assert all(_IsValidGroupByTerm(term) for term in group_by_term_list)
self.group_by_terms.extend(group_by_term_list)
def AddOrderByTerms(self, order_by_pairs):
"""Save info needed to generate the ORDER BY clause."""
for term, args in order_by_pairs:
assert _IsValidOrderByTerm(term), term
assert term.count('%s') == len(args), term
self.order_by_terms.append(term)
self.order_by_args.extend(args)
def SetLimitAndOffset(self, limit, offset):
"""Save info needed to generate the LIMIT OFFSET clause."""
self.limit = limit
self.offset = offset
def AddWhereTerms(self, where_cond_pairs, **kwargs):
"""Generate a WHERE clause."""
where_cond_pairs = where_cond_pairs or []
for cond, args in where_cond_pairs:
assert _IsValidWhereCond(cond), cond
assert cond.count('%s') == len(args), cond
self.where_conds.append(cond)
self.where_args.extend(args)
for col, val in sorted(kwargs.items()):
assert _IsValidColumnName(col), col
eq = True
if col.endswith('_not'):
col = col[:-4]
eq = False
if isinstance(val, set):
val = list(val) # MySQL inteface cannot handle sets.
if val is None or val == []:
op = 'IS' if eq else 'IS NOT'
self.where_conds.append(col + ' ' + op + ' NULL')
elif isinstance(val, list):
op = 'IN' if eq else 'NOT IN'
# Sadly, MySQLdb cannot escape lists, so we flatten to multiple "%s"s
self.where_conds.append(
col + ' ' + op + ' (' + PlaceHolders(val) + ')')
self.where_args.extend(val)
else:
op = '=' if eq else '!='
self.where_conds.append(col + ' ' + op + ' %s')
self.where_args.append(val)
def AddHavingTerms(self, having_cond_pairs):
"""Generate a HAVING clause."""
for cond, args in having_cond_pairs:
assert _IsValidHavingCond(cond), cond
assert cond.count('%s') == len(args), cond
self.having_conds.append(cond)
self.having_args.extend(args)
def PlaceHolders(sql_args):
"""Return a comma-separated list of %s placeholders for the given args."""
return ','.join('%s' for _ in sql_args)
TABLE_PAT = '[A-Z][_a-zA-Z0-9]+'
COLUMN_PAT = '[a-z][_a-z]+'
COMPARE_OP_PAT = '(<|>|=|!=|>=|<=|LIKE|NOT LIKE)'
SHORTHAND = {
'table': TABLE_PAT,
'column': COLUMN_PAT,
'tab_col': r'(%s\.)?%s' % (TABLE_PAT, COLUMN_PAT),
'placeholder': '%s', # That's a literal %s that gets passed to MySQLdb
'multi_placeholder': '%s(, ?%s)*',
'compare_op': COMPARE_OP_PAT,
'opt_asc_desc': '( ASC| DESC)?',
'opt_alias': '( AS %s)?' % TABLE_PAT,
'email_cond': (r'\(?'
r'('
r'(LOWER\(Spare\d+\.email\) IS NULL OR )?'
r'LOWER\(Spare\d+\.email\) '
r'(%s %%s|IN \(%%s(, ?%%s)*\))'
r'( (AND|OR) )?'
r')+'
r'\)?' % COMPARE_OP_PAT),
'hotlist_cond': (r'\(?'
r'('
r'(LOWER\(Cond\d+\.name\) IS NULL OR )?'
r'LOWER\(Cond\d+\.name\) '
r'(%s %%s|IN \(%%s(, ?%%s)*\))'
r'( (AND|OR) )?'
r')+'
r'\)?' % COMPARE_OP_PAT),
'phase_cond': (r'\(?'
r'('
r'(LOWER\(Phase\d+\.name\) IS NULL OR )?'
r'LOWER\(Phase\d+\.name\) '
r'(%s %%s|IN \(%%s(, ?%%s)*\))?'
r'( (AND|OR) )?'
r')+'
r'\)?' % COMPARE_OP_PAT),
'approval_cond': (r'\(?'
r'('
r'(LOWER\(Cond\d+\.status\) IS NULL OR )?'
r'LOWER\(Cond\d+\.status\) '
r'(%s %%s|IN \(%%s(, ?%%s)*\))'
r'( (AND|OR) )?'
r')+'
r'\)?' % COMPARE_OP_PAT),
}
def _MakeRE(regex_str):
"""Return a regular expression object, expanding our shorthand as needed."""
return re.compile(regex_str.format(**SHORTHAND))
TABLE_RE = _MakeRE('^{table}$')
TAB_COL_RE = _MakeRE('^{tab_col}$')
USE_CLAUSE_RE = _MakeRE(
r'^USE INDEX \({column}\) USE INDEX FOR ORDER BY \({column}\)$')
HAVING_RE_LIST = [
_MakeRE(r'^COUNT\(\*\) {compare_op} {placeholder}$')]
COLUMN_RE_LIST = [
TAB_COL_RE,
_MakeRE(r'\*'),
_MakeRE(r'COUNT\(\*\)'),
_MakeRE(r'COUNT\({tab_col}\)'),
_MakeRE(r'COUNT\(DISTINCT\({tab_col}\)\)'),
_MakeRE(r'MAX\({tab_col}\)'),
_MakeRE(r'MIN\({tab_col}\)'),
_MakeRE(r'GROUP_CONCAT\((DISTINCT )?{tab_col}( ORDER BY {tab_col})?' \
r'( SEPARATOR \'.*\')?\)'),
]
JOIN_RE_LIST = [
TABLE_RE,
_MakeRE(
r'^{table}{opt_alias} ON {tab_col} = {tab_col}'
r'( AND {tab_col} = {tab_col})?'
r'( AND {tab_col} IN \({multi_placeholder}\))?$'),
_MakeRE(
r'^{table}{opt_alias} ON {tab_col} = {tab_col}'
r'( AND {tab_col} = {tab_col})?'
r'( AND {tab_col} = {placeholder})?'
r'( AND {tab_col} IN \({multi_placeholder}\))?'
r'( AND {tab_col} = {tab_col})?$'),
_MakeRE(
r'^{table}{opt_alias} ON {tab_col} = {tab_col}'
r'( AND {tab_col} = {tab_col})?'
r'( AND {tab_col} = {placeholder})?'
r'( AND {tab_col} IN \({multi_placeholder}\))?'
r'( AND {tab_col} IS NULL)?'
r'( AND \({tab_col} IS NULL'
r' OR {tab_col} NOT IN \({multi_placeholder}\)\))?$'),
_MakeRE(
r'^{table}{opt_alias} ON {tab_col} = {tab_col}'
r'( AND {tab_col} = {tab_col})?'
r'( AND {tab_col} = {placeholder})?'
r' AND \(?{tab_col} {compare_op} {placeholder}\)?'
r'( AND {tab_col} = {tab_col})?$'),
_MakeRE(
r'^{table}{opt_alias} ON {tab_col} = {tab_col}'
r'( AND {tab_col} = {tab_col})?'
r'( AND {tab_col} = {placeholder})?'
r' AND {tab_col} = {tab_col}$'),
_MakeRE(
r'^{table}{opt_alias} ON {tab_col} = {tab_col}'
r'( AND {tab_col} = {tab_col})?'
r'( AND {tab_col} = {placeholder})?'
r' AND \({tab_col} IS NULL OR'
r' {tab_col} != {placeholder}\)$'),
_MakeRE(
r'^{table}{opt_alias} ON {tab_col} = {tab_col}'
r' AND LOWER\({tab_col}\) = LOWER\({placeholder}\)'),
_MakeRE(
r'^{table}{opt_alias} ON {tab_col} = {tab_col} AND {email_cond}$'),
_MakeRE(
r'^{table}{opt_alias} ON {email_cond}$'),
_MakeRE(
r'^{table}{opt_alias} ON '
r'\({tab_col} = {tab_col} OR {tab_col} = {tab_col}\)$'),
_MakeRE(
r'^\({table} AS {table} JOIN User AS {table} '
r'ON {tab_col} = {tab_col} AND {email_cond}\) '
r'ON Issue(Snapshot)?.id = {tab_col}'
r'( AND {tab_col} IS NULL)?'),
_MakeRE(
r'^\({table} JOIN Hotlist AS {table} '
r'ON {tab_col} = {tab_col} AND {hotlist_cond}\) '
r'ON Issue.id = {tab_col}?'),
_MakeRE(
r'^\({table} AS {table} JOIN IssuePhaseDef AS {table} '
r'ON {tab_col} = {tab_col} AND {phase_cond}\) '
r'ON Issue.id = {tab_col}?'),
_MakeRE(
r'^IssuePhaseDef AS {table} ON {phase_cond}'),
_MakeRE(
r'^Issue2ApprovalValue AS {table} ON {tab_col} = {tab_col} '
r'AND {tab_col} = {placeholder} AND {approval_cond}'),
_MakeRE(
r'^{table} AS {table} ON {tab_col} = {tab_col} '
r'LEFT JOIN {table} AS {table} ON {tab_col} = {tab_col}'),
]
ORDER_BY_RE_LIST = [
_MakeRE(r'^{tab_col}{opt_asc_desc}$'),
_MakeRE(r'^LOWER\({tab_col}\){opt_asc_desc}$'),
_MakeRE(r'^ISNULL\({tab_col}\){opt_asc_desc}$'),
_MakeRE(r'^\(ISNULL\({tab_col}\) AND ISNULL\({tab_col}\)\){opt_asc_desc}$'),
_MakeRE(r'^FIELD\({tab_col}, {multi_placeholder}\){opt_asc_desc}$'),
_MakeRE(r'^FIELD\(IF\(ISNULL\({tab_col}\), {tab_col}, {tab_col}\), '
r'{multi_placeholder}\){opt_asc_desc}$'),
_MakeRE(r'^CONCAT\({tab_col}, {tab_col}\){opt_asc_desc}$'),
]
GROUP_BY_RE_LIST = [
TAB_COL_RE,
]
WHERE_COND_RE_LIST = [
_MakeRE(r'^TRUE$'),
_MakeRE(r'^FALSE$'),
_MakeRE(r'^{tab_col} IS NULL$'),
_MakeRE(r'^{tab_col} IS NOT NULL$'),
_MakeRE(r'^{tab_col} {compare_op} {tab_col}$'),
_MakeRE(r'^{tab_col} {compare_op} {placeholder}$'),
_MakeRE(r'^{tab_col} %% {placeholder} = {placeholder}$'),
_MakeRE(r'^{tab_col} IN \({multi_placeholder}\)$'),
_MakeRE(r'^{tab_col} NOT IN \({multi_placeholder}\)$'),
_MakeRE(r'^LOWER\({tab_col}\) IS NULL$'),
_MakeRE(r'^LOWER\({tab_col}\) IS NOT NULL$'),
_MakeRE(r'^LOWER\({tab_col}\) {compare_op} {placeholder}$'),
_MakeRE(r'^LOWER\({tab_col}\) IN \({multi_placeholder}\)$'),
_MakeRE(r'^LOWER\({tab_col}\) NOT IN \({multi_placeholder}\)$'),
_MakeRE(r'^LOWER\({tab_col}\) LIKE {placeholder}$'),
_MakeRE(r'^LOWER\({tab_col}\) NOT LIKE {placeholder}$'),
_MakeRE(r'^timestep < \(SELECT MAX\(j.timestep\) FROM Invalidate AS j '
r'WHERE j.kind = %s '
r'AND j.cache_key = Invalidate.cache_key\)$'),
_MakeRE(r'^\({tab_col} IS NULL OR {tab_col} {compare_op} {placeholder}\) '
'AND \({tab_col} IS NULL OR {tab_col} {compare_op} {placeholder}'
'\)$'),
_MakeRE(r'^\({tab_col} IS NOT NULL AND {tab_col} {compare_op} '
'{placeholder}\) OR \({tab_col} IS NOT NULL AND {tab_col} '
'{compare_op} {placeholder}\)$'),
]
# Note: We never use ';' for multiple statements, '@' for SQL variables, or
# any quoted strings in stmt_str (quotes are put in my MySQLdb for args).
STMT_STR_RE = re.compile(
r'\A(SELECT|UPDATE|DELETE|INSERT|REPLACE) [\'-+=!<>%*.,()\w\s]+\Z',
re.MULTILINE)
def _IsValidDBValue(val):
if isinstance(val, basestring):
return '\x00' not in val
return True
def _IsValidTableName(table_name):
return TABLE_RE.match(table_name)
def _IsValidColumnName(column_expr):
return any(regex.match(column_expr) for regex in COLUMN_RE_LIST)
def _IsValidUseClause(use_clause):
return USE_CLAUSE_RE.match(use_clause)
def _IsValidHavingCond(cond):
if cond.startswith('(') and cond.endswith(')'):
cond = cond[1:-1]
if ' OR ' in cond:
return all(_IsValidHavingCond(c) for c in cond.split(' OR '))
if ' AND ' in cond:
return all(_IsValidHavingCond(c) for c in cond.split(' AND '))
return any(regex.match(cond) for regex in HAVING_RE_LIST)
def _IsValidJoin(join):
return any(regex.match(join) for regex in JOIN_RE_LIST)
def _IsValidOrderByTerm(term):
return any(regex.match(term) for regex in ORDER_BY_RE_LIST)
def _IsValidGroupByTerm(term):
return any(regex.match(term) for regex in GROUP_BY_RE_LIST)
def _IsValidWhereCond(cond):
if cond.startswith('NOT '):
cond = cond[4:]
if cond.startswith('(') and cond.endswith(')'):
cond = cond[1:-1]
if any(regex.match(cond) for regex in WHERE_COND_RE_LIST):
return True
if ' OR ' in cond:
return all(_IsValidWhereCond(c) for c in cond.split(' OR '))
if ' AND ' in cond:
return all(_IsValidWhereCond(c) for c in cond.split(' AND '))
return False
def _IsValidStatement(stmt_str):
"""Final check to make sure there is no funny junk sneaking in somehow."""
return (STMT_STR_RE.match(stmt_str) and
'--' not in stmt_str)
def _BoolsToInts(arg_list):
"""Convert any True values to 1s and Falses to 0s.
Google's copy of MySQLdb has bool-to-int conversion disabled,
and yet it seems to be needed otherwise they are converted
to strings and always interpreted as 0 (which is FALSE).
Args:
arg_list: (nested) list of SQL statment argument values, which may
include some boolean values.
Returns:
The same list, but with True replaced by 1 and False replaced by 0.
"""
result = []
for arg in arg_list:
if isinstance(arg, (list, tuple)):
result.append(_BoolsToInts(arg))
elif arg is True:
result.append(1)
elif arg is False:
result.append(0)
else:
result.append(arg)
return result
| [
"[email protected]"
] | |
7b44412ce11d8c6c342152422abcba093327737b | 3a48cfb0b43fe61f52355a67b2b5700aa8c5ddf2 | /src/som/interpreter/ast/nodes/message/generic_node.py | 5cfc38a7257dfdd24617ab9116a1996177084454 | [
"MIT"
] | permissive | SOM-st/RTruffleSOM | ce380d02985b0ef1f41f400409f61377dc3a583e | 1efc698577830ff3fcd1607e7155d9c6423e8804 | refs/heads/master | 2021-01-17T07:25:19.895376 | 2020-12-08T18:56:50 | 2020-12-08T18:56:50 | 17,311,290 | 9 | 2 | MIT | 2020-09-02T16:08:31 | 2014-03-01T08:45:25 | Python | UTF-8 | Python | false | false | 2,256 | py | from rpython.rlib.debug import make_sure_not_resized
from rpython.rlib.jit import we_are_jitted
from ..dispatch import SuperDispatchNode, UninitializedDispatchNode, send_does_not_understand
from .abstract_node import AbstractMessageNode
class GenericMessageNode(AbstractMessageNode):
_immutable_fields_ = ['_dispatch?']
_child_nodes_ = ['_dispatch']
def __init__(self, selector, universe, rcvr_expr, arg_exprs,
source_section = None):
AbstractMessageNode.__init__(self, selector, universe, rcvr_expr,
arg_exprs, source_section)
if rcvr_expr.is_super_node():
dispatch = SuperDispatchNode(selector, rcvr_expr.get_super_class(),
universe)
else:
dispatch = UninitializedDispatchNode(selector, universe)
self._dispatch = self.adopt_child(dispatch)
def replace_dispatch_list_head(self, node):
self._dispatch.replace(node)
def execute(self, frame):
rcvr, args = self._evaluate_rcvr_and_args(frame)
return self.execute_evaluated(frame, rcvr, args)
def execute_evaluated(self, frame, rcvr, args):
assert frame is not None
assert rcvr is not None
assert args is not None
make_sure_not_resized(args)
if we_are_jitted():
return self._direct_dispatch(rcvr, args)
else:
return self._dispatch.execute_dispatch(rcvr, args)
def _direct_dispatch(self, rcvr, args):
method = self._lookup_method(rcvr)
if method:
return method.invoke(rcvr, args)
else:
return send_does_not_understand(rcvr, self._selector, args, self._universe)
def _lookup_method(self, rcvr):
rcvr_class = self._class_of_receiver(rcvr)
return rcvr_class.lookup_invokable(self._selector)
def _class_of_receiver(self, rcvr):
if self._rcvr_expr.is_super_node():
return self._rcvr_expr.get_super_class()
return rcvr.get_class(self._universe)
def __str__(self):
return "%s(%s, %s)" % (self.__class__.__name__,
self._selector,
self._source_section)
| [
"[email protected]"
] | |
e1f076a8b40ac225debbdfe4c6812b58dabf08a9 | ef74d9ad851021bcb0ed12880e14269b6ed7f617 | /Sample/Doudizhu/Server/src/ZyGames.Doudizhu.HostServer/PyScript/Action/Action12001.py | 7d60a50f0c7331ca1c254a61ca9b33c5de93279d | [
"BSD-2-Clause-Views",
"MIT"
] | permissive | sunyuping/Scut | b5e5798e9b519941f0ac3a08a3263dc0f45beb47 | ec2ea35c0e4de1f2da49c50d14e119a4f17cd93a | refs/heads/master | 2020-12-25T23:19:26.597830 | 2013-11-16T07:50:01 | 2013-11-16T07:50:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,188 | py | import clr, sys
from action import *
from System import *
from mathUtils import MathUtils
clr.AddReference('ZyGames.Framework');
clr.AddReference('ZyGames.Framework.Common');
clr.AddReference('ZyGames.Framework.Game');
clr.AddReference('ZyGames.Doudizhu.Bll');
clr.AddReference('ZyGames.Doudizhu.Model');
clr.AddReference('ZyGames.Doudizhu.Lang');
from System.Collections.Generic import *
from ZyGames.Framework.SyncThreading import *
from ZyGames.Framework.Common import *
from ZyGames.Framework.Game.Cache import *
from ZyGames.Framework.Game.Com.Rank import *
from ZyGames.Framework.Game.Service import *
from ZyGames.Doudizhu.Bll import *
from ZyGames.Doudizhu.Bll.Logic import *
from ZyGames.Doudizhu.Bll.Com.Chat import *
from ZyGames.Doudizhu.Lang import *
from ZyGames.Doudizhu.Model import *
from ZyGames.Framework.Cache.Generic import *
from ZyGames.Framework.Game.Runtime import *
from ZyGames.Framework.Cache import *
#12001_转盘界面接口
class UrlParam(HttpParam):
def __init__(self):
HttpParam.__init__(self)
class ActionResult(DataResult):
def __init__(self):
DataResult.__init__(self)
self.IsFree = 0
self.FreeNum = 0
self.DailList = List[DialInfo]
self.UserCoin = 0
self.UserGold = 0
def getUrlElement(httpGet, parent):
urlParam = UrlParam()
if True:
urlParam.Result = True
else:
urlParam.Result = False
return urlParam
def takeAction(urlParam, parent):
actionResult = ActionResult()
userId = parent.Current.User.PersonalId;
user = parent.Current.User
gameRoom = GameRoom.Current
dailyFreeNum = ConfigEnvSet.GetInt("User.DailyFreeNum", 3);
useNum = 0
userRestrain = GameDataCacheSet[UserDailyRestrain]().FindKey(userId)
if userRestrain!=None:
gameRoom.RefreshRestrain(userRestrain)
if userRestrain.RestrainProperty!= None:
useNum = userRestrain.RestrainProperty.DialFreeNum
if dailyFreeNum > useNum:
actionResult.FreeNum = MathUtils.Subtraction(dailyFreeNum,useNum)
else:
actionResult.IsFree = 1;
actionResult.DailList = ConfigCacheSet[DialInfo]().FindAll();
actionResult.UserCoin = user.GameCoin
gameHall = GameHall(user)
actionResult.UserGold = gameHall.UserGold
#需要实现
return actionResult
def buildPacket(writer, urlParam, actionResult):
postion = 0
writer.PushShortIntoStack(actionResult.IsFree)
writer.PushIntoStack(actionResult.FreeNum)
writer.PushIntoStack(len(actionResult.DailList))
for info in actionResult.DailList:
postion = MathUtils.Addition(postion, 1);
Probability = PythonHelper.TransformString(info.Probability)
dsItem = DataStruct()
dsItem.PushIntoStack(postion)
dsItem.PushIntoStack(MathUtils.ToNotNullString(info.HeadID))
dsItem.PushIntoStack(MathUtils.ToNotNullString(Probability))
dsItem.PushIntoStack(MathUtils.ToNotNullString(info.ItemDesc))
dsItem.PushIntoStack(info.GameCoin)
writer.PushIntoStack(dsItem)
writer.PushIntoStack(actionResult.UserCoin)
writer.PushIntoStack(actionResult.UserGold)
return True | [
"[email protected]"
] | |
c7b9c6378a3dd842cabaa7d5fb31214631d710ee | c02b157399f2ede41abf5119e57f94bfe18c713d | /merc/__init__.py | d8fed445f0d9439793cf5d9c80b0de7600943748 | [
"MIT"
] | permissive | merc-devel/merc | b366befb6285af984c2da7eabdd1063f16e0414e | 15e010db2474b5d9f9720fc83983b03c95063a02 | refs/heads/master | 2021-01-18T17:15:33.553125 | 2014-11-08T03:02:30 | 2014-11-08T03:02:30 | 25,289,852 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 57 | py | from merc import util
__version__ = util.get_version()
| [
"[email protected]"
] | |
33984d775374f698a16233b294ee3e505d447c22 | 75519d2a9bf55e2d9376ea08a36676948a8b232c | /ui/uikits/TextSteam.py | 222dfb2dcd7959a0cc728b523b9bf881ec8afbf0 | [
"MIT"
] | permissive | CGFanTuan/damgteam | 9c32d59cbd0ecb9d3acffd9b902b918c40797e14 | aec414f084f6ab6ec5897314390605aaa8380d62 | refs/heads/master | 2020-09-17T00:29:24.832648 | 2019-11-25T09:51:13 | 2019-11-25T09:51:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,118 | py | # -*- coding: utf-8 -*-
"""
Script Name: TextSteam.py
Author: Do Trinh/Jimmy - 3D artist.
Description:
"""
# -------------------------------------------------------------------------------------------------------------
from __future__ import absolute_import, unicode_literals
from PyQt5.QtCore import QTextStream
from appData import __copyright__
class TextStream(QTextStream):
Type = 'DAMGSTREAM'
key = 'TextStream'
_name = 'DAMG Text Stream'
_copyright = __copyright__
@property
def copyright(self):
return self._copyright
@property
def name(self):
return self._name
@name.setter
def name(self, newName):
self._name = newName
# -------------------------------------------------------------------------------------------------------------
# Created by panda on 15/11/2019 - 5:43 PM
# © 2017 - 2018 DAMGteam. All rights reserved | [
"[email protected]"
] | |
5bc9f7cb725e608b584db5bb260968104795a451 | 8aefdf04c115c6c6ab64997576ced97d4727dd06 | /curation-api/src/users/migrations/0003_auto_20170809_0921.py | b1d063c42c10db300647e9e67f63a3b2095bfcd5 | [] | no_license | mohanj1919/django_app_test | a0d47bc98c604d81253c74488dcdbc2ccd039863 | 5d5bc4c1eecbf627d38260e4d314d8451d67a4f5 | refs/heads/master | 2021-05-08T06:01:21.712986 | 2017-10-11T12:12:07 | 2017-10-11T12:12:07 | 106,544,537 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 485 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-08-09 09:21
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0002_auto_20170703_1555'),
]
operations = [
migrations.AlterField(
model_name='curationuser',
name='phone_number',
field=models.CharField(max_length=15, null=True, unique=True),
),
]
| [
"[email protected]"
] | |
59919a9d9900991467fcaabb4cc8e2acaff0e9e0 | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/aphotomanager/testcase/firstcases/testcase5_028.py | 6856a16cc6fb6a518aa1c467766e72d1e3596a1c | [] | no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,391 | py | #coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'de.k3b.android.androFotoFinder',
'appActivity' : 'de.k3b.android.androFotoFinder.FotoGalleryActivity',
'resetKeyboard' : True,
'androidCoverage' : 'de.k3b.android.androFotoFinder/de.k3b.android.androFotoFinder.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
return
# testcase028
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
element = getElememt(driver, "new UiSelector().className(\"android.widget.ImageButton\").description(\"More options\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Show in new gallery\")", "new UiSelector().className(\"android.widget.TextView\").instance(5)")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"android:id/home\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememt(driver, "new UiSelector().resourceId(\"android:id/home\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"/storage/sdcard/Pictures/Wikipedia/Michael Mosman District Judge.jpg\")", "new UiSelector().className(\"android.widget.TextView\").instance(3)")
TouchAction(driver).long_press(element).release().perform()
element = getElememtBack(driver, "new UiSelector().text(\"/storage/sdcard/pic4.jpg\")", "new UiSelector().className(\"android.widget.TextView\").instance(7)")
TouchAction(driver).long_press(element).release().perform()
element = getElememt(driver, "new UiSelector().resourceId(\"de.k3b.android.androFotoFinder:id/action_edit\").className(\"android.widget.TextView\")")
TouchAction(driver).long_press(element).release().perform()
element = getElememt(driver, "new UiSelector().resourceId(\"de.k3b.android.androFotoFinder:id/menu_item_share\").className(\"android.widget.TextView\")")
TouchAction(driver).tap(element).perform()
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"5_028\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'de.k3b.android.androFotoFinder'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage) | [
"[email protected]"
] | |
b15a144177a3426684ef389cecaaf365fc24dcb7 | f54070cd3048a3645cb25f301592a904d387a1c9 | /python_prgrams/testpython/file7.py | d8e98c05bbd14af3e9bf261e2d23c7dc207b2a22 | [] | no_license | mak705/Python_interview | 02bded60417f1e6e2d81e1f6cde6961d95da2a8e | aff2d6018fd539dbcde9e3a6b3f8a69167ffca0d | refs/heads/master | 2020-03-22T21:03:34.018919 | 2019-11-15T08:51:34 | 2019-11-15T08:51:34 | 140,653,056 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 138 | py | ##counting lines in a file
fhand = open('mul.py')
for line in fhand:
line = line.rstrip()
if not line.startswith('#From'):
print line
| [
"[email protected]"
] | |
63d97a4042ea1c94875bb42957b33061db5ac700 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnreggio.py | 32b325c4e4eef33665e12e96b01b39fc616f374c | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 216 | py | ii = [('ClarGE2.py', 1), ('RoscTTI3.py', 1), ('RoscTTI2.py', 2), ('MedwTAI.py', 1), ('HogaGMM.py', 3), ('MartHRW.py', 1), ('WestJIT.py', 1), ('RoscTTI.py', 1), ('BrewDTO.py', 2), ('ClarGE3.py', 2), ('RogeSIP.py', 1)] | [
"[email protected]"
] | |
fe616439df2cf983c744ea323919525c2e94cbb2 | 814fd0bea5bc063a4e34ebdd0a5597c9ff67532b | /chrome/common/extensions/docs/server2/refresh_tracker_test.py | f1f596f1afefe93317d8fa365571a158aa4abe97 | [
"BSD-3-Clause"
] | permissive | rzr/chromium-crosswalk | 1b22208ff556d69c009ad292bc17dca3fe15c493 | d391344809adf7b4f39764ac0e15c378169b805f | refs/heads/master | 2021-01-21T09:11:07.316526 | 2015-02-16T11:52:21 | 2015-02-16T11:52:21 | 38,887,985 | 0 | 0 | NOASSERTION | 2019-08-07T21:59:20 | 2015-07-10T15:35:50 | C++ | UTF-8 | Python | false | false | 1,941 | py | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from object_store_creator import ObjectStoreCreator
from refresh_tracker import RefreshTracker
class RefreshTrackerTest(unittest.TestCase):
def setUp(self):
self._refresh_tracker = RefreshTracker(ObjectStoreCreator.ForTest())
def testNonExistentRefreshIsIncomplete(self):
self.assertFalse(self._refresh_tracker.GetRefreshComplete('unicorns').Get())
def testEmptyRefreshIsComplete(self):
refresh_id = 'abcdefghijklmnopqrstuvwxyz'
self._refresh_tracker.StartRefresh(refresh_id, []).Get()
self.assertTrue(self._refresh_tracker.GetRefreshComplete(refresh_id).Get())
def testRefreshCompletion(self):
refresh_id = 'this is fun'
self._refresh_tracker.StartRefresh(refresh_id, ['/do/foo', '/do/bar']).Get()
self._refresh_tracker.MarkTaskComplete(refresh_id, '/do/foo').Get()
self.assertFalse(self._refresh_tracker.GetRefreshComplete(refresh_id).Get())
self._refresh_tracker.MarkTaskComplete(refresh_id, '/do/bar').Get()
self.assertTrue(self._refresh_tracker.GetRefreshComplete(refresh_id).Get())
def testUnknownTasksAreIrrelevant(self):
refresh_id = 'i am a banana'
self._refresh_tracker.StartRefresh(refresh_id, ['a', 'b', 'c', 'd']).Get()
self._refresh_tracker.MarkTaskComplete(refresh_id, 'a').Get()
self._refresh_tracker.MarkTaskComplete(refresh_id, 'b').Get()
self._refresh_tracker.MarkTaskComplete(refresh_id, 'c').Get()
self._refresh_tracker.MarkTaskComplete(refresh_id, 'q').Get()
self.assertFalse(self._refresh_tracker.GetRefreshComplete(refresh_id).Get())
self._refresh_tracker.MarkTaskComplete(refresh_id, 'd').Get()
self.assertTrue(self._refresh_tracker.GetRefreshComplete(refresh_id).Get())
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
a13bd7f9da7ea032c84dac021788da7cb8446ba9 | ac2c3e8c278d0aac250d31fd023c645fa3984a1b | /saleor/saleor/wishlist/error_codes.py | 5f77c477ea3948543085f5817a1d759cf6bc6e85 | [
"BSD-3-Clause",
"CC-BY-4.0"
] | permissive | jonndoe/saleor-test-shop | 152bc8bef615382a45ca5f4f86f3527398bd1ef9 | 1e83176684f418a96260c276f6a0d72adf7dcbe6 | refs/heads/master | 2023-01-21T16:54:36.372313 | 2020-12-02T10:19:13 | 2020-12-02T10:19:13 | 316,514,489 | 1 | 1 | BSD-3-Clause | 2020-11-27T23:29:20 | 2020-11-27T13:52:33 | TypeScript | UTF-8 | Python | false | false | 196 | py | from enum import Enum
class WishlistErrorCode(str, Enum):
GRAPHQL_ERROR = "graphql_error"
INVALID = "invalid"
NOT_FOUND = "not_found"
REQUIRED = "required"
UNIQUE = "unique"
| [
"[email protected]"
] | |
b9a387605d577d71f54a61961bb4e49480104471 | 0180b1a8e19c0a02e7c00ebe1a58e17347ad1996 | /BCR2000/consts.py | a1a23805ec9ecae2ff31a2bf1a642c416c9ebe69 | [] | no_license | cce/buttons | e486af364c6032b4be75ab9de26f42b8d882c5b0 | 7d4936c91df99f4c6e08f7e347de64361c75e652 | refs/heads/master | 2021-01-17T06:56:55.859306 | 2014-12-22T05:03:00 | 2015-11-25T03:42:28 | 46,657,841 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,666 | py | # Embedded file name: /Users/versonator/Jenkins/live/Binary/Core_Release_64_static/midi-remote-scripts/BCR2000/consts.py
""" The following consts should be substituted with the Sys Ex messages for requesting
a controller's ID response and that response to allow for automatic lookup"""
ID_REQUEST = 0
ID_RESP = 0
GENERIC_STOP = 105
GENERIC_PLAY = 106
GENERIC_REC = 107
GENERIC_LOOP = 108
GENERIC_RWD = -1
GENERIC_FFWD = -1
GENERIC_TRANSPORT = (GENERIC_STOP,
GENERIC_PLAY,
GENERIC_REC,
GENERIC_LOOP,
GENERIC_RWD,
GENERIC_FFWD)
GENERIC_ENC1 = 1
GENERIC_ENC2 = 2
GENERIC_ENC3 = 3
GENERIC_ENC4 = 4
GENERIC_ENC5 = 5
GENERIC_ENC6 = 6
GENERIC_ENC7 = 7
GENERIC_ENC8 = 8
GENERIC_ENCODERS = (GENERIC_ENC1,
GENERIC_ENC2,
GENERIC_ENC3,
GENERIC_ENC4,
GENERIC_ENC5,
GENERIC_ENC6,
GENERIC_ENC7,
GENERIC_ENC8)
GENERIC_SLI1 = 81
GENERIC_SLI2 = 82
GENERIC_SLI3 = 83
GENERIC_SLI4 = 84
GENERIC_SLI5 = 85
GENERIC_SLI6 = 86
GENERIC_SLI7 = 87
GENERIC_SLI8 = 88
GENERIC_SLIDERS = (GENERIC_SLI1,
GENERIC_SLI2,
GENERIC_SLI3,
GENERIC_SLI4,
GENERIC_SLI5,
GENERIC_SLI6,
GENERIC_SLI7,
GENERIC_SLI8)
GENERIC_BUT1 = 73
GENERIC_BUT2 = 74
GENERIC_BUT3 = 75
GENERIC_BUT4 = 76
GENERIC_BUT5 = 77
GENERIC_BUT6 = 78
GENERIC_BUT7 = 79
GENERIC_BUT8 = 80
GENERIC_BUT9 = -1
GENERIC_BUTTONS = (GENERIC_BUT1,
GENERIC_BUT2,
GENERIC_BUT3,
GENERIC_BUT4,
GENERIC_BUT5,
GENERIC_BUT6,
GENERIC_BUT7,
GENERIC_BUT8)
GENERIC_PAD1 = 65
GENERIC_PAD2 = 66
GENERIC_PAD3 = 67
GENERIC_PAD4 = 68
GENERIC_PAD5 = 69
GENERIC_PAD6 = 70
GENERIC_PAD7 = 71
GENERIC_PAD8 = 72
GENERIC_PADS = (GENERIC_PAD1,
GENERIC_PAD2,
GENERIC_PAD3,
GENERIC_PAD4,
GENERIC_PAD5,
GENERIC_PAD6,
GENERIC_PAD7,
GENERIC_PAD8) | [
"[email protected]"
] | |
62fed4f8d716eb544aca34dbe492a0dfcc899225 | 4da57c6e9efb0a884449e019ce5c9b5d516d2bb1 | /exp/kernel_benchmark/bin_clean/amarel_aggr_data.py | 6d0a278193addea1d73a624d1f74908838af8828 | [] | no_license | radical-experiments/affinity_model | dc848fe1666b2f017d37ba041890462890eba9b5 | fc67420a2278020eee770680fa7ccef76ed2dfa5 | refs/heads/master | 2021-04-06T16:56:26.847920 | 2018-09-25T03:15:47 | 2018-09-25T03:15:47 | 83,361,464 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,431 | py | import os
import sys
import csv
from pprint import pprint
from files_dir_mod import *
def amarel_aggregate(src_path, dst_path):
for dirname in dirnames:
if not os.path.isdir(src_path+'/'+dirname):
print "{0} does not exist".format(dirname)
continue
dir_keywords = dirname.split('/')
pprint(dir_keywords)
machine = dir_keywords[1]
if machine != "amarel":
continue
dir_list = os.listdir(src_path+'/'+dirname)
if dir_list:
kernel = dir_keywords[0]
node_type = dir_keywords[2]
usage = dir_keywords[3]
for meas in measurements:
fd_out = open(dst_path+'/'+dirname+'/'+meas+'.csv', 'w')
writer = csv.writer(fd_out)
for session in dir_list:
with open(src_path+'/'+dirname+'/'+session+'/'+meas+'.csv') as fd_in:
reader = csv.reader(fd_in)
for row in reader:
cleaned_row = row
cleaned_row[0] = session + "__" + cleaned_row[0]
writer.writerow(cleaned_row)
fd_out.close()
pprint(dirname)
pprint(dir_list)
if __name__ == "__main__":
src_path = sys.argv[1]
dst_path = sys.argv[2]
amarel_aggregate(src_path, dst_path)
| [
"[email protected]"
] | |
0255e46bd31fd1ecc2393fdf7322e84db39abf47 | 97e60d0ca572d0dc3fc80f8719cd57a707ab6069 | /bias_zebra_print/stock.py | dd94d374c932338a87ab830754b76fb7b1fe5b94 | [] | no_license | josepato/bias_trunk_v6 | 0c7c86493c88f015c049a139360478cabec7f698 | b6ab6fc2ff3dc832f26effdba421bcc76d5cabac | refs/heads/master | 2020-06-12T14:18:31.101513 | 2016-12-15T22:55:54 | 2016-12-15T22:55:54 | 75,803,957 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,033 | py | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#
#Bias Product / PriceList
#
from osv import osv
from osv import fields
import time
import netsvc
#----------------------------------------------------------
# Price List
#----------------------------------------------------------
OLDPRINTSTR = """XA~TA000~JSN^LT0^MNW^MTT^PON^PMN^LH0,0^JMA^PR4,4^MD0^JUS^LRN^CI0^XZ
^XA
^MMT
^LL0142
^LS0
^FT246,119^A0N,17,16^FH\^FD%s^FS
^FT164,18^A0N,17,16^FH\^FD%s^FS
^FT286,110^A0B,17,16^FH\^FD%s^FS
^FT21,136^A0N,17,16^FH\^FD%s^FS
^FT4,123^A0N,17,16^FH\^FD%s^FS
^FT193,51^A0N,17,16^FH\^FD%s^FS
^FT4,67^A0N,17,16^FH\^FD%s/%s^FS
^FT3,51^A0N,17,16^FH\^FD%s/%s^FS
^FT3,34^A0N,17,16^FH\^FD%s^FS
^FT8,18^A0N,17,16^FH\^FD%s^FS
^PQ%i,0,1,Y^XZ"""
PRINTSTR = """^XA~TA000~JSN^LT0^MNW^MTT^PON^PMN^LH0,0^JMA^PR4,4^MD0^JUS^LRN^CI0^XZ
^XA
^MMT
^LL0850
^LS0
^FT48,731^A0I,17,16^FH\^F%sB^FS
^FT131,831^A0I,17,16^FH\^FD%s^FS
^FT8,740^A0R,17,16^FH\^FD%s^FS
^FT273,713^A0I,17,16^FH\^FD%s^FS
^FT290,727^A0I,17,16^FH\^FD%s^FS
^FT101,799^A0I,17,16^FH\^FD%s^FS
^FT291,782^A0I,17,16^FH\^FD%s/%s^FS
^FT291,799^A0I,17,16^FH\^FD%s/%s^FS
^FT291,815^A0I,17,16^FH\^FD%s^FS
^FT287,832^A0I,17,16^FH\^FD%s^FS
^BY1,3,22^FT291,755^BCI,,Y,N
^FD>:LA>50001>6BB^FS
^PQ%i,0,1,Y^XZ
"""
class stock_picking(osv.osv):
_inherit = "stock.picking"
def getZebraData(self, cr, uid, ids):
if isinstance(ids, (int, long)):
ids = [ids]
res = []
move_obj = self.pool.get('stock.move')
for picking in self.browse(cr, uid, ids):
mydict = {'id': picking.id}
mylines = []
for move in picking.move_lines:
mystr = PRINTSTR %(move.product_id.product_writing_kind_id.name,
move.product_id.product_colection_id.name,
move.product_id.default_code,
move.product_id.product_tmpl_id.categ_id.parent_id.name,
move.product_id.product_writing_metaerial_id.name,
(move.product_id.product_hardware_ids and move.product_id.product_hardware_ids[0].name) or "-",
(move.product_id.product_top_material_ids and move.product_id.product_top_material_ids[0].name) or "-",
(move.product_id.product_bottom_material_ids and move.product_id.product_bottom_material_ids[0].name) or "-",
(move.product_id.product_top_color_ids and move.product_id.product_top_color_ids[0].name) or "-",
(move.product_id.product_bottom_color_ids and move.product_id.product_bottom_color_ids[0].name) or "-",
move.product_id.product_line_id.name,
move.product_id.product_brand_id.name,
move.product_qty)
mylines.append(mystr)
mydict['lines'] = mylines
res.append(mydict)
return res
stock_picking()
| [
"[email protected]"
] | |
b563672c1f0906584832778d726b6ba3cac18c7f | 060ce17de7b5cdbd5f7064d1fceb4ded17a23649 | /fn_microsoft_defender/fn_microsoft_defender/util/customize.py | bb2e546adca2b9b9f81794d806d0518c8a1f2dd2 | [
"MIT"
] | permissive | ibmresilient/resilient-community-apps | 74bbd770062a22801cef585d4415c29cbb4d34e2 | 6878c78b94eeca407998a41ce8db2cc00f2b6758 | refs/heads/main | 2023-06-26T20:47:15.059297 | 2023-06-23T16:33:58 | 2023-06-23T16:33:58 | 101,410,006 | 81 | 107 | MIT | 2023-03-29T20:40:31 | 2017-08-25T14:07:33 | Python | UTF-8 | Python | false | false | 6,691 | py | # -*- coding: utf-8 -*-
"""Generate the Resilient customizations required for fn_microsoft_defender"""
import base64
import os
import io
try:
from resilient import ImportDefinition
except ImportError:
# Support Apps running on resilient-circuits < v35.0.195
from resilient_circuits.util import ImportDefinition
RES_FILE = "data/export.res"
def codegen_reload_data():
"""
Parameters required reload codegen for the fn_microsoft_defender package
"""
return {
"package": u"fn_microsoft_defender",
"message_destinations": [u"fn_microsoft_defender"],
"functions": [u"defender_alert_search", u"defender_app_execution", u"defender_collect_machine_investigation_package", u"defender_delete_indicator", u"defender_find_machines", u"defender_find_machines_by_file", u"defender_find_machines_by_filter", u"defender_get_file_information", u"defender_get_incident", u"defender_get_related_alert_information", u"defender_list_indicators", u"defender_machine_isolation", u"defender_machine_scan", u"defender_machine_vulnerabilities", u"defender_quarantine_file", u"defender_set_indicator", u"defender_update_alert", u"defender_update_incident"],
"workflows": [u"defender_atp_app_execution", u"defender_atp_collect_machine_investigation_package", u"defender_atp_delete_indicator", u"defender_atp_find_machines", u"defender_atp_find_machines_by_file_hash", u"defender_atp_get_file_information", u"defender_atp_machine_isolation", u"defender_atp_machine_scan", u"defender_atp_machine_vulnerabilities", u"defender_atp_set_indicator", u"defender_atp_update_alert", u"defender_atp_update_indicator", u"defender_close_incident", u"defender_find_machines_by_filter", u"defender_get_incident", u"defender_get_updated_machine_information", u"defender_list_indicators", u"defender_quarantine_file", u"defender_refresh_incident", u"defender_sync_comment", u"defender_sync_incident"],
"actions": [u"Create Artifact from Indicator", u"Defender Close Incident", u"Defender Find Machine by DNS name", u"Defender Find Machines by File Hash", u"Defender Find Machines by Internal IP Address", u"Defender Get File Information", u"Defender Get Incident", u"Defender List Indicators", u"Defender Machine App Execution Restriction", u"Defender Machine Collect Investigation Package", u"Defender Machine Isolate Action", u"Defender Machine Quarantine File", u"Defender Machine Refresh Information", u"Defender Machine Scan", u"Defender Machine Update Information", u"Defender Machine Vulnerabilities", u"Defender Refresh Incident", u"Defender Set Indicator", u"Defender Sync Comment", u"Defender Sync Incident", u"Defender Update Alert", u"Delete Indicator", u"Update Indicator"],
"incident_fields": [u"defender_classification", u"defender_determination", u"defender_incident_createtime", u"defender_incident_id", u"defender_incident_lastupdatetime", u"defender_incident_url", u"defender_tags"],
"incident_artifact_types": [],
"incident_types": [],
"datatables": [u"defender_alerts", u"defender_indicators", u"defender_machines"],
"automatic_tasks": [],
"scripts": [u"Create Artifact from Indicator"],
}
def customization_data(client=None):
"""
Returns a Generator of ImportDefinitions (Customizations).
Install them using `resilient-circuits customize`
IBM Resilient Platform Version: 39.0.6328
Contents:
- Message Destinations:
- fn_microsoft_defender
- Functions:
- defender_alert_search
- defender_app_execution
- defender_collect_machine_investigation_package
- defender_delete_indicator
- defender_find_machines
- defender_find_machines_by_file
- defender_find_machines_by_filter
- defender_get_file_information
- defender_get_incident
- defender_get_related_alert_information
- defender_list_indicators
- defender_machine_isolation
- defender_machine_scan
- defender_machine_vulnerabilities
- defender_quarantine_file
- defender_set_indicator
- defender_update_alert
- defender_update_incident
- Workflows:
- defender_atp_app_execution
- defender_atp_collect_machine_investigation_package
- defender_atp_delete_indicator
- defender_atp_find_machines
- defender_atp_find_machines_by_file_hash
- defender_atp_get_file_information
- defender_atp_machine_isolation
- defender_atp_machine_scan
- defender_atp_machine_vulnerabilities
- defender_atp_set_indicator
- defender_atp_update_alert
- defender_atp_update_indicator
- defender_close_incident
- defender_find_machines_by_filter
- defender_get_incident
- defender_get_updated_machine_information
- defender_list_indicators
- defender_quarantine_file
- defender_refresh_incident
- defender_sync_comment
- defender_sync_incident
- Rules:
- Create Artifact from Indicator
- Defender Close Incident
- Defender Find Machine by DNS name
- Defender Find Machines by File Hash
- Defender Find Machines by Internal IP Address
- Defender Get File Information
- Defender Get Incident
- Defender List Indicators
- Defender Machine App Execution Restriction
- Defender Machine Collect Investigation Package
- Defender Machine Isolate Action
- Defender Machine Quarantine File
- Defender Machine Refresh Information
- Defender Machine Scan
- Defender Machine Update Information
- Defender Machine Vulnerabilities
- Defender Refresh Incident
- Defender Set Indicator
- Defender Sync Comment
- Defender Sync Incident
- Defender Update Alert
- Delete Indicator
- Update Indicator
- Incident Fields:
- defender_classification
- defender_determination
- defender_incident_createtime
- defender_incident_id
- defender_incident_lastupdatetime
- defender_incident_url
- defender_tags
- Data Tables:
- defender_alerts
- defender_indicators
- defender_machines
- Scripts:
- Create Artifact from Indicator
"""
res_file = os.path.join(os.path.dirname(__file__), RES_FILE)
if not os.path.isfile(res_file):
raise FileNotFoundError("{} not found".format(RES_FILE))
with io.open(res_file, mode='rt') as f:
b64_data = base64.b64encode(f.read().encode('utf-8'))
yield ImportDefinition(b64_data) | [
"[email protected]"
] | |
5f5e98e0204db775e5b06fd86453f2a62c41f96b | 6dc685fdb6f4a556225f13a1d26170ee203e9eb6 | /Windows2016Lab/scripts/Service_Windows2016_Action___create___Task_set_parameters.py | 57d63f95d0ebaa657302006a67576086a8cb18df | [
"MIT"
] | permissive | amaniai/calm | dffe6227af4c9aa3d95a08b059eac619b2180889 | fefc8b9f75e098daa4c88c7c4570495ce6be9ee4 | refs/heads/master | 2023-08-15T17:52:50.555026 | 2021-10-10T08:33:01 | 2021-10-10T08:33:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 549 | py | username = 'user-{}'.format(_construct_random_password(8,numDigits=4, numLetters=4, numPuncs=0, numCaps=0).lower())
password = _construct_random_password(10,upper=14, numDigits=4)
print('ACCESS_USERNAME={}'.format(username))
print('ACCESS_PASSWORD={}'.format(password))
calm_index = int('@@{calm_array_index}@@')
email_list = '''@@{EMAIL_LIST}@@'''
clean_list = [x for x in email_list.splitlines() if x.strip(' ')]
if calm_index < len(clean_list):
print('EMAIL={}'.format(clean_list[calm_index]))
else:
print('EMAIL={}'.format(clean_list[0]))
| [
"[email protected]"
] | |
dd0eb441e105f56c21813d7d9263c17466d46938 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/217/usersdata/274/113684/submittedfiles/av2_p3_m2.py | 56a351331cc54ba12f7e3c1497129b302fa40d64 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 916 | py | # -*- coding: utf-8 -*
n=int(input("Dimensão do Quadrado: "))
while notn>=3:
n=int(input("Dimensão do Quadrado: "))
M=[]
for i in range(0,n,1):
L=[]
for j in range(o,n,1):
L.append(int(input("Elemento da Linha: "))
M.append(L)
somaL=[]
for i in range(0,n,1):
somaL.append(sum(M[i]))
somaC=[]
for j in range(0,n,1):
C=0
for i in range (0,n,1):
C=C+M[i][j]
somaC.append(C)
b=[somaL[0]]
cont=0
k=0
VE=0
VC=0
for i in range(0,n,1):
if somaL[i]in b:
continue
else:
ct+ct=1
k=1
if ct==1:
VE=somaL[k]
VC+somaL[0]
if ct!=1:
VE=somaL[0]
VC+somaL[1]
k=0
b2=[somaC[0]]
cont2=0
k2=0
VE2=0
for i in range(0,n,1):
if somaC[i]in b2:
continue
else:
ct2=ct2+1
k2=i
if cont2==1:
VE2=somaC[k2]
if ct!=1:
VE2=somaC[0]
k2=0
O=VC-(VE-M[k][k2])
P=M[k][k2]
print(O)
print(P)
| [
"[email protected]"
] | |
b284c2b20a27edfd46ff6f14ba59bcd5aff733d3 | be026334d457b1f78050f8262cd693922c6c8579 | /onnxruntime/python/tools/transformers/fusion_gpt_attention_megatron.py | 5418ccf513c770d3ec626ac6520e367c249eaa37 | [
"MIT"
] | permissive | ConnectionMaster/onnxruntime | 953c34c6599c9426043a8e5cd2dba05424084e3b | bac9c0eb50ed5f0361f00707dd6434061ef6fcfe | refs/heads/master | 2023-04-05T00:01:50.750871 | 2022-03-16T15:49:42 | 2022-03-16T15:49:42 | 183,019,796 | 1 | 0 | MIT | 2023-04-04T02:03:14 | 2019-04-23T13:21:11 | C++ | UTF-8 | Python | false | false | 10,803 | py | #-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
#--------------------------------------------------------------------------
import numpy as np
from logging import getLogger
from onnx import helper, numpy_helper, TensorProto
from onnx_model import OnnxModel
from fusion_base import Fusion
from fusion_utils import FusionUtils
from fusion_gpt_attention import FusionGptAttentionPastBase
logger = getLogger(__name__)
def is_close(value, expected_value):
return abs(value - expected_value) <= 1e-6
class FusionGptAttentionMegatron(FusionGptAttentionPastBase):
"""
Fuse GPT-2 Attention with past state subgraph from Megatron into one Attention node.
"""
def __init__(self, model: OnnxModel, num_heads: int):
super().__init__(model, num_heads)
def fuse_attention_node(self, matmul_before_split, add_before_split, past, present, input, reshape_qkv, mask):
attention_node_name = self.model.create_node_name('GptAttention')
int32_mask = self.cast_attention_mask(mask)
output = reshape_qkv.output[0]
i = 1 if (add_before_split.input[0] == matmul_before_split.output[0]) else 0
attention_node = helper.make_node(
'Attention',
inputs=[input, matmul_before_split.input[1], add_before_split.input[i], int32_mask, past],
outputs=[output, present],
name=attention_node_name)
attention_node.domain = "com.microsoft"
attention_node.attribute.extend([
helper.make_attribute("num_heads", self.num_heads),
helper.make_attribute("unidirectional", 0) # unidirectional shall not be ON for 4D attention mask
])
nodes_to_add = [attention_node]
self.nodes_to_add.extend(nodes_to_add)
for node in nodes_to_add:
self.node_name_to_graph_name[node.name] = self.this_graph_name
self.nodes_to_remove.append(reshape_qkv)
# we rely on prune_graph() to clean old subgraph nodes
self.prune_graph = True
def match_mask(self, sub_qk, mul_qk, matmul_qk, layernorm_before_attention):
mask_nodes = self.model.match_parent_path(
sub_qk,
['Mul', 'Sub', 'Slice', 'Slice'],
[1, 0, 1, 0]) # yapf: disable
if mask_nodes is None:
logger.debug("fuse_attention: failed to match unidirectional mask path")
return None
(mul_mask, sub_mask, last_slice_mask, slice_mask) = mask_nodes
if mul_qk.input[1] != last_slice_mask.output[0]:
logger.debug("fuse_attention failed: mul_qk.input[1] != last_slice_mask.output[0]")
return None
if not self.utils.check_node_input_value(mul_mask, 1, 10000.0):
logger.debug("fuse_attention failed: mul_mask input 1 is not constant 10000.0")
return None
if not self.utils.check_node_input_value(sub_mask, 0, 1.0):
logger.debug("fuse_attention failed: sub_mask input 0 is not constant 1.0")
return None
if not self.model.find_graph_input(slice_mask.input[0]):
logger.info("expect slick_mask input 0 to be graph input")
return None
if not self.utils.check_node_input_value(last_slice_mask, 1, [0]):
logger.debug("fuse_attention failed: last_slice_mask input 1 (starts) is not constant [0]")
return None
if not self.utils.check_node_input_value(last_slice_mask, 3, [3]):
logger.debug("fuse_attention failed: last_slice_mask input 3 (axes) is not constant [3]")
return False
if not self.utils.check_node_input_value(last_slice_mask, 4, [1]):
logger.debug("fuse_attention failed: last_slice_mask input 4 (steps) is not constant [1]")
return False
if not self.utils.check_node_input_value(slice_mask, 3, [2]):
logger.debug("fuse_attention failed: slice_mask input 3 (axes) is not constant [2]")
return None
if not self.utils.check_node_input_value(slice_mask, 4, [1]):
logger.debug("fuse_attention failed: slice_mask input 4 (steps) is not constant [1]")
return None
last_slice_path = self.model.match_parent_path(last_slice_mask, ['Unsqueeze', 'Gather', 'Shape', 'MatMul'],
[2, 0, 0, 0])
if last_slice_path is None or last_slice_path[-1] != matmul_qk:
logger.debug("fuse_attention: failed to match last slice path")
return None
first_slice_path = self.model.match_parent_path(slice_mask, ['Unsqueeze', 'Gather', 'Shape', 'MatMul'],
[2, 0, 0, 0])
if first_slice_path is None or first_slice_path[-1] != matmul_qk:
logger.debug("fuse_attention: failed to match first slice path")
return None
first_slice_sub = self.model.match_parent_path(slice_mask, ['Unsqueeze', 'Sub', 'Gather', 'Shape', 'MatMul'],
[1, 0, 0, 0, 0])
if first_slice_sub is None or first_slice_sub[-1] != matmul_qk:
logger.debug("fuse_attention: failed to match last slice sub path")
return None
first_slice_sub_1 = self.model.match_parent_path(slice_mask,
['Unsqueeze', 'Sub', 'Gather', 'Shape', 'LayerNormalization'],
[1, 0, 1, 0, 0])
if first_slice_sub_1 is None or first_slice_sub_1[-1] != layernorm_before_attention:
logger.debug("fuse_attention: failed to match last slice sub path 1")
return None
return slice_mask.input[0]
def fuse(self, normalize_node, input_name_to_nodes, output_name_to_node):
past = None
present = None
qkv_nodes = self.model.match_parent_path(
normalize_node,
['Add', 'Add', 'MatMul', 'Reshape', 'Transpose', 'MatMul'],
[ 0, 1, None, 0, 0, 0],
output_name_to_node=output_name_to_node,
) # yapf: disable
if qkv_nodes is None:
return
(add_skip, add_after_attention, matmul_after_attention, reshape_qkv, transpose_qkv, matmul_qkv) = qkv_nodes
skip_input = add_skip.input[0]
v_nodes = self.model.match_parent_path(
matmul_qkv,
['Concat', 'Transpose', 'Reshape', 'Split', 'Add', 'MatMul', 'LayerNormalization'],
[1, 1, 0, 0, 0, None, 0]) # yapf: disable
if v_nodes is None:
logger.debug("fuse_attention: failed to match v path")
return
(concat_v, transpose_v, reshape_v, split_v, add_before_split, matmul_before_split,
layernorm_before_attention) = v_nodes
if skip_input != layernorm_before_attention.input[0]:
logger.debug("fuse_attention: skip_input != layernorm_before_attention.input[0]")
return
qk_nodes = self.model.match_parent_path(matmul_qkv, ['Softmax', 'Sub', 'Mul', 'MatMul'], [0, 0, 0, 0])
if qk_nodes is None:
logger.debug("fuse_attention: failed to match qk path")
return None
(softmax_qk, sub_qk, mul_qk, matmul_qk) = qk_nodes
if self.model.get_node_attribute(softmax_qk, "axis") != 3:
logger.debug("fuse_attention failed: softmax_qk axis != 3")
return None
attention_mask = self.match_mask(sub_qk, mul_qk, matmul_qk, layernorm_before_attention)
q_nodes = self.model.match_parent_path(matmul_qk, ['Div', 'Transpose', 'Reshape', 'Split'], [0, 0, 0, 0])
if q_nodes is None:
logger.debug("fuse_attention: failed to match q path")
return
(div_q, transpose_q, reshape_q, split_q) = q_nodes
if split_v != split_q:
logger.debug("fuse_attention: skip since split_v != split_q")
return
k_nodes = self.model.match_parent_path(matmul_qk,
['Div', 'Transpose', 'Concat', 'Transpose', 'Reshape', 'Split'],
[1, 0, 0, 1, 0, 0])
if k_nodes is None:
logger.debug("fuse_attention: failed to match k path")
return
(div_k, _, concat_k, transpose_k, reshape_k, split_k) = k_nodes
if split_v != split_k:
logger.debug("fuse_attention: skip since split_v != split_k")
return
i, value = self.model.get_constant_input(reshape_k)
if not (isinstance(value, np.ndarray) and list(value.shape) == [4] and value[0] == 0 and value[1] == 0
and value[2] > 0 and value[3] > 0):
logger.debug("fuse_attention: reshape constant input is not [0, 0, N, H]")
return
num_heads = value[2]
if num_heads != self.num_heads:
logger.info(f"Detected num_heads={num_heads}. Ignore user specified value {self.num_heads}")
self.num_heads = num_heads
hidden_size_per_head = value[3]
i, value = self.model.get_constant_input(div_k)
expected_value = float(np.sqrt(np.sqrt(hidden_size_per_head)))
if not is_close(value, expected_value):
logger.debug(f"fuse_attention: div_k value={value} expected={expected_value}")
return
i, value = self.model.get_constant_input(div_q)
if not is_close(value, expected_value):
logger.debug(f"fuse_attention: div_q value={value} expected={expected_value}")
return
# Match past and present paths
past = self.match_past_pattern_2(concat_k, concat_v, output_name_to_node)
if past is None:
logger.debug("fuse_attention: match past failed")
return
if not self.model.find_graph_input(past):
logger.debug("fuse_attention: past is not graph input.")
# For GPT2LMHeadModel_BeamSearchStep, there is an extra Gather node to select beam index so it is not graph input.
present = self.match_present(concat_v, input_name_to_nodes)
if present is None:
logger.debug("fuse_attention: match present failed")
return
if not self.model.find_graph_output(present):
logger.info("fuse_attention: expect present to be graph output")
return
self.fuse_attention_node(matmul_before_split, add_before_split, past, present,
layernorm_before_attention.output[0], reshape_qkv, attention_mask)
| [
"[email protected]"
] | |
db889d7c5e5cba1d1b2ed71e137b42acf283c13f | b89ec2839b4a6bd4e2d774f64be9138f4b71a97e | /dataent/patches/v7_2/set_doctype_engine.py | 6de22a5c653dc5755560998976ce23c246a2026d | [
"MIT"
] | permissive | dataent/dataent | ec0e9a21d864bc0f7413ea39670584109c971855 | c41bd5942ffe5513f4d921c4c0595c84bbc422b4 | refs/heads/master | 2022-12-14T08:33:48.008587 | 2019-07-09T18:49:21 | 2019-07-09T18:49:21 | 195,729,981 | 0 | 0 | MIT | 2022-12-09T17:23:49 | 2019-07-08T03:26:28 | Python | UTF-8 | Python | false | false | 231 | py | from __future__ import unicode_literals
import dataent
def execute():
for t in dataent.db.sql('show table status'):
if t[0].startswith('tab'):
dataent.db.sql('update tabDocType set engine=%s where name=%s', (t[1], t[0][3:])) | [
"[email protected]"
] | |
6a4c16868431e1e23eb5da001f0272c6e45ae97e | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /7ECZC8CBEhy5QkvN3_15.py | b7cee2eac0f62400c8ad19d3b56c9c8b2daff2e8 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 151 | py |
def how_many_walls(n, w, h):
sum_ = 0
count = 0
wallSquare = w * h
while sum_ <= n:
sum_ += wallSquare
count += 1
return count - 1
| [
"[email protected]"
] | |
ac9322695c8338f7c5b6352dc885ec393d8b1b9a | ca66a4283c5137f835377c3ed9a37128fcaed037 | /Lib/site-packages/pandas/tests/indexes/test_base.py | 48214ef4e92a8217fc8c6c342ae4de28f448658f | [] | no_license | NamithaKonda09/majorProject | f377f7a77d40939a659a3e59f5f1b771d88889ad | 4eff4ff18fa828c6278b00244ff2e66522e0cd51 | refs/heads/master | 2023-06-04T20:25:38.450271 | 2021-06-24T19:03:46 | 2021-06-24T19:03:46 | 370,240,034 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104,196 | py | # -*- coding: utf-8 -*-
from collections import defaultdict
from datetime import datetime, timedelta
import math
import operator
import sys
import numpy as np
import pytest
from pandas._libs.tslib import Timestamp
from pandas.compat import (
PY3, PY35, PY36, StringIO, lrange, lzip, range, text_type, u, zip)
from pandas.compat.numpy import np_datetime64_compat
from pandas.core.dtypes.common import is_unsigned_integer_dtype
from pandas.core.dtypes.generic import ABCIndex
import pandas as pd
from pandas import (
CategoricalIndex, DataFrame, DatetimeIndex, Float64Index, Int64Index,
PeriodIndex, RangeIndex, Series, TimedeltaIndex, UInt64Index, date_range,
isna, period_range)
import pandas.core.config as cf
from pandas.core.index import _get_combined_index, ensure_index_from_sequences
from pandas.core.indexes.api import Index, MultiIndex
from pandas.core.sorting import safe_sort
from pandas.tests.indexes.common import Base
import pandas.util.testing as tm
from pandas.util.testing import assert_almost_equal
class TestIndex(Base):
_holder = Index
def setup_method(self, method):
self.indices = dict(unicodeIndex=tm.makeUnicodeIndex(100),
strIndex=tm.makeStringIndex(100),
dateIndex=tm.makeDateIndex(100),
periodIndex=tm.makePeriodIndex(100),
tdIndex=tm.makeTimedeltaIndex(100),
intIndex=tm.makeIntIndex(100),
uintIndex=tm.makeUIntIndex(100),
rangeIndex=tm.makeRangeIndex(100),
floatIndex=tm.makeFloatIndex(100),
boolIndex=Index([True, False]),
catIndex=tm.makeCategoricalIndex(100),
empty=Index([]),
tuples=MultiIndex.from_tuples(lzip(
['foo', 'bar', 'baz'], [1, 2, 3])),
repeats=Index([0, 0, 1, 1, 2, 2]))
self.setup_indices()
def create_index(self):
return Index(list('abcde'))
def generate_index_types(self, skip_index_keys=[]):
"""
Return a generator of the various index types, leaving
out the ones with a key in skip_index_keys
"""
for key, index in self.indices.items():
if key not in skip_index_keys:
yield key, index
def test_can_hold_identifiers(self):
index = self.create_index()
key = index[0]
assert index._can_hold_identifiers_and_holds_name(key) is True
def test_new_axis(self):
new_index = self.dateIndex[None, :]
assert new_index.ndim == 2
assert isinstance(new_index, np.ndarray)
def test_copy_and_deepcopy(self):
new_copy2 = self.intIndex.copy(dtype=int)
assert new_copy2.dtype.kind == 'i'
@pytest.mark.parametrize("attr", ['strIndex', 'dateIndex'])
def test_constructor_regular(self, attr):
# regular instance creation
index = getattr(self, attr)
tm.assert_contains_all(index, index)
def test_constructor_casting(self):
# casting
arr = np.array(self.strIndex)
index = Index(arr)
tm.assert_contains_all(arr, index)
tm.assert_index_equal(self.strIndex, index)
def test_constructor_copy(self):
# copy
arr = np.array(self.strIndex)
index = Index(arr, copy=True, name='name')
assert isinstance(index, Index)
assert index.name == 'name'
tm.assert_numpy_array_equal(arr, index.values)
arr[0] = "SOMEBIGLONGSTRING"
assert index[0] != "SOMEBIGLONGSTRING"
# what to do here?
# arr = np.array(5.)
# pytest.raises(Exception, arr.view, Index)
def test_constructor_corner(self):
# corner case
pytest.raises(TypeError, Index, 0)
@pytest.mark.parametrize("index_vals", [
[('A', 1), 'B'], ['B', ('A', 1)]])
def test_construction_list_mixed_tuples(self, index_vals):
# see gh-10697: if we are constructing from a mixed list of tuples,
# make sure that we are independent of the sorting order.
index = Index(index_vals)
assert isinstance(index, Index)
assert not isinstance(index, MultiIndex)
@pytest.mark.parametrize('na_value', [None, np.nan])
@pytest.mark.parametrize('vtype', [list, tuple, iter])
def test_construction_list_tuples_nan(self, na_value, vtype):
# GH 18505 : valid tuples containing NaN
values = [(1, 'two'), (3., na_value)]
result = Index(vtype(values))
expected = MultiIndex.from_tuples(values)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("cast_as_obj", [True, False])
@pytest.mark.parametrize("index", [
pd.date_range('2015-01-01 10:00', freq='D', periods=3,
tz='US/Eastern', name='Green Eggs & Ham'), # DTI with tz
pd.date_range('2015-01-01 10:00', freq='D', periods=3), # DTI no tz
pd.timedelta_range('1 days', freq='D', periods=3), # td
pd.period_range('2015-01-01', freq='D', periods=3) # period
])
def test_constructor_from_index_dtlike(self, cast_as_obj, index):
if cast_as_obj:
result = pd.Index(index.astype(object))
else:
result = pd.Index(index)
tm.assert_index_equal(result, index)
if isinstance(index, pd.DatetimeIndex):
assert result.tz == index.tz
if cast_as_obj:
# GH#23524 check that Index(dti, dtype=object) does not
# incorrectly raise ValueError, and that nanoseconds are not
# dropped
index += pd.Timedelta(nanoseconds=50)
result = pd.Index(index, dtype=object)
assert result.dtype == np.object_
assert list(result) == list(index)
@pytest.mark.parametrize("index,has_tz", [
(pd.date_range('2015-01-01 10:00', freq='D', periods=3,
tz='US/Eastern'), True), # datetimetz
(pd.timedelta_range('1 days', freq='D', periods=3), False), # td
(pd.period_range('2015-01-01', freq='D', periods=3), False) # period
])
def test_constructor_from_series_dtlike(self, index, has_tz):
result = pd.Index(pd.Series(index))
tm.assert_index_equal(result, index)
if has_tz:
assert result.tz == index.tz
@pytest.mark.parametrize("klass", [Index, DatetimeIndex])
def test_constructor_from_series(self, klass):
expected = DatetimeIndex([Timestamp('20110101'), Timestamp('20120101'),
Timestamp('20130101')])
s = Series([Timestamp('20110101'), Timestamp('20120101'),
Timestamp('20130101')])
result = klass(s)
tm.assert_index_equal(result, expected)
def test_constructor_from_series_freq(self):
# GH 6273
# create from a series, passing a freq
dts = ['1-1-1990', '2-1-1990', '3-1-1990', '4-1-1990', '5-1-1990']
expected = DatetimeIndex(dts, freq='MS')
s = Series(pd.to_datetime(dts))
result = DatetimeIndex(s, freq='MS')
tm.assert_index_equal(result, expected)
def test_constructor_from_frame_series_freq(self):
# GH 6273
# create from a series, passing a freq
dts = ['1-1-1990', '2-1-1990', '3-1-1990', '4-1-1990', '5-1-1990']
expected = DatetimeIndex(dts, freq='MS')
df = pd.DataFrame(np.random.rand(5, 3))
df['date'] = dts
result = DatetimeIndex(df['date'], freq='MS')
assert df['date'].dtype == object
expected.name = 'date'
tm.assert_index_equal(result, expected)
expected = pd.Series(dts, name='date')
tm.assert_series_equal(df['date'], expected)
# GH 6274
# infer freq of same
freq = pd.infer_freq(df['date'])
assert freq == 'MS'
@pytest.mark.parametrize("array", [
np.arange(5), np.array(['a', 'b', 'c']), date_range(
'2000-01-01', periods=3).values
])
def test_constructor_ndarray_like(self, array):
# GH 5460#issuecomment-44474502
# it should be possible to convert any object that satisfies the numpy
# ndarray interface directly into an Index
class ArrayLike(object):
def __init__(self, array):
self.array = array
def __array__(self, dtype=None):
return self.array
expected = pd.Index(array)
result = pd.Index(ArrayLike(array))
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize('dtype', [
int, 'int64', 'int32', 'int16', 'int8', 'uint64', 'uint32',
'uint16', 'uint8'])
def test_constructor_int_dtype_float(self, dtype):
# GH 18400
if is_unsigned_integer_dtype(dtype):
index_type = UInt64Index
else:
index_type = Int64Index
expected = index_type([0, 1, 2, 3])
result = Index([0., 1., 2., 3.], dtype=dtype)
tm.assert_index_equal(result, expected)
def test_constructor_int_dtype_nan(self):
# see gh-15187
data = [np.nan]
expected = Float64Index(data)
result = Index(data, dtype='float')
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("dtype", ['int64', 'uint64'])
def test_constructor_int_dtype_nan_raises(self, dtype):
# see gh-15187
data = [np.nan]
msg = "cannot convert"
with pytest.raises(ValueError, match=msg):
Index(data, dtype=dtype)
def test_constructor_no_pandas_array(self):
ser = pd.Series([1, 2, 3])
result = pd.Index(ser.array)
expected = pd.Index([1, 2, 3])
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("klass,dtype,na_val", [
(pd.Float64Index, np.float64, np.nan),
(pd.DatetimeIndex, 'datetime64[ns]', pd.NaT)
])
def test_index_ctor_infer_nan_nat(self, klass, dtype, na_val):
# GH 13467
na_list = [na_val, na_val]
expected = klass(na_list)
assert expected.dtype == dtype
result = Index(na_list)
tm.assert_index_equal(result, expected)
result = Index(np.array(na_list))
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("pos", [0, 1])
@pytest.mark.parametrize("klass,dtype,ctor", [
(pd.DatetimeIndex, 'datetime64[ns]', np.datetime64('nat')),
(pd.TimedeltaIndex, 'timedelta64[ns]', np.timedelta64('nat'))
])
def test_index_ctor_infer_nat_dt_like(self, pos, klass, dtype, ctor,
nulls_fixture):
expected = klass([pd.NaT, pd.NaT])
assert expected.dtype == dtype
data = [ctor]
data.insert(pos, nulls_fixture)
result = Index(data)
tm.assert_index_equal(result, expected)
result = Index(np.array(data, dtype=object))
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("swap_objs", [True, False])
def test_index_ctor_nat_result(self, swap_objs):
# mixed np.datetime64/timedelta64 nat results in object
data = [np.datetime64('nat'), np.timedelta64('nat')]
if swap_objs:
data = data[::-1]
expected = pd.Index(data, dtype=object)
tm.assert_index_equal(Index(data), expected)
tm.assert_index_equal(Index(np.array(data, dtype=object)), expected)
def test_index_ctor_infer_periodindex(self):
xp = period_range('2012-1-1', freq='M', periods=3)
rs = Index(xp)
tm.assert_index_equal(rs, xp)
assert isinstance(rs, PeriodIndex)
@pytest.mark.parametrize("vals,dtype", [
([1, 2, 3, 4, 5], 'int'), ([1.1, np.nan, 2.2, 3.0], 'float'),
(['A', 'B', 'C', np.nan], 'obj')
])
def test_constructor_simple_new(self, vals, dtype):
index = Index(vals, name=dtype)
result = index._simple_new(index.values, dtype)
tm.assert_index_equal(result, index)
@pytest.mark.parametrize("vals", [
[1, 2, 3], np.array([1, 2, 3]), np.array([1, 2, 3], dtype=int),
# below should coerce
[1., 2., 3.], np.array([1., 2., 3.], dtype=float)
])
def test_constructor_dtypes_to_int64(self, vals):
index = Index(vals, dtype=int)
assert isinstance(index, Int64Index)
@pytest.mark.parametrize("vals", [
[1, 2, 3], [1., 2., 3.], np.array([1., 2., 3.]),
np.array([1, 2, 3], dtype=int), np.array([1., 2., 3.], dtype=float)
])
def test_constructor_dtypes_to_float64(self, vals):
index = Index(vals, dtype=float)
assert isinstance(index, Float64Index)
@pytest.mark.parametrize("cast_index", [True, False])
@pytest.mark.parametrize("vals", [
[True, False, True], np.array([True, False, True], dtype=bool)
])
def test_constructor_dtypes_to_object(self, cast_index, vals):
if cast_index:
index = Index(vals, dtype=bool)
else:
index = Index(vals)
assert isinstance(index, Index)
assert index.dtype == object
@pytest.mark.parametrize("vals", [
[1, 2, 3], np.array([1, 2, 3], dtype=int),
np.array([np_datetime64_compat('2011-01-01'),
np_datetime64_compat('2011-01-02')]),
[datetime(2011, 1, 1), datetime(2011, 1, 2)]
])
def test_constructor_dtypes_to_categorical(self, vals):
index = Index(vals, dtype='category')
assert isinstance(index, CategoricalIndex)
@pytest.mark.parametrize("cast_index", [True, False])
@pytest.mark.parametrize("vals", [
Index(np.array([np_datetime64_compat('2011-01-01'),
np_datetime64_compat('2011-01-02')])),
Index([datetime(2011, 1, 1), datetime(2011, 1, 2)])
])
def test_constructor_dtypes_to_datetime(self, cast_index, vals):
if cast_index:
index = Index(vals, dtype=object)
assert isinstance(index, Index)
assert index.dtype == object
else:
index = Index(vals)
assert isinstance(index, DatetimeIndex)
@pytest.mark.parametrize("cast_index", [True, False])
@pytest.mark.parametrize("vals", [
np.array([np.timedelta64(1, 'D'), np.timedelta64(1, 'D')]),
[timedelta(1), timedelta(1)]
])
def test_constructor_dtypes_to_timedelta(self, cast_index, vals):
if cast_index:
index = Index(vals, dtype=object)
assert isinstance(index, Index)
assert index.dtype == object
else:
index = Index(vals)
assert isinstance(index, TimedeltaIndex)
@pytest.mark.parametrize("attr, utc", [
['values', False],
['asi8', True]])
@pytest.mark.parametrize("klass", [pd.Index, pd.DatetimeIndex])
def test_constructor_dtypes_datetime(self, tz_naive_fixture, attr, utc,
klass):
# Test constructing with a datetimetz dtype
# .values produces numpy datetimes, so these are considered naive
# .asi8 produces integers, so these are considered epoch timestamps
# ^the above will be true in a later version. Right now we `.view`
# the i8 values as NS_DTYPE, effectively treating them as wall times.
index = pd.date_range('2011-01-01', periods=5)
arg = getattr(index, attr)
index = index.tz_localize(tz_naive_fixture)
dtype = index.dtype
# TODO(GH-24559): Remove the sys.modules and warnings
# not sure what this is from. It's Py2 only.
modules = [sys.modules['pandas.core.indexes.base']]
if (tz_naive_fixture and attr == "asi8" and
str(tz_naive_fixture) not in ('UTC', 'tzutc()')):
ex_warn = FutureWarning
else:
ex_warn = None
# stacklevel is checked elsewhere. We don't do it here since
# Index will have an frame, throwing off the expected.
with tm.assert_produces_warning(ex_warn, check_stacklevel=False,
clear=modules):
result = klass(arg, tz=tz_naive_fixture)
tm.assert_index_equal(result, index)
with tm.assert_produces_warning(ex_warn, check_stacklevel=False):
result = klass(arg, dtype=dtype)
tm.assert_index_equal(result, index)
with tm.assert_produces_warning(ex_warn, check_stacklevel=False):
result = klass(list(arg), tz=tz_naive_fixture)
tm.assert_index_equal(result, index)
with tm.assert_produces_warning(ex_warn, check_stacklevel=False):
result = klass(list(arg), dtype=dtype)
tm.assert_index_equal(result, index)
@pytest.mark.parametrize("attr", ['values', 'asi8'])
@pytest.mark.parametrize("klass", [pd.Index, pd.TimedeltaIndex])
def test_constructor_dtypes_timedelta(self, attr, klass):
index = pd.timedelta_range('1 days', periods=5)
dtype = index.dtype
values = getattr(index, attr)
result = klass(values, dtype=dtype)
tm.assert_index_equal(result, index)
result = klass(list(values), dtype=dtype)
tm.assert_index_equal(result, index)
@pytest.mark.parametrize("value", [[], iter([]), (x for x in [])])
@pytest.mark.parametrize("klass",
[Index, Float64Index, Int64Index, UInt64Index,
CategoricalIndex, DatetimeIndex, TimedeltaIndex])
def test_constructor_empty(self, value, klass):
empty = klass(value)
assert isinstance(empty, klass)
assert not len(empty)
@pytest.mark.parametrize("empty,klass", [
(PeriodIndex([], freq='B'), PeriodIndex),
(PeriodIndex(iter([]), freq='B'), PeriodIndex),
(PeriodIndex((x for x in []), freq='B'), PeriodIndex),
(RangeIndex(step=1), pd.RangeIndex),
(MultiIndex(levels=[[1, 2], ['blue', 'red']],
codes=[[], []]), MultiIndex)
])
def test_constructor_empty_special(self, empty, klass):
assert isinstance(empty, klass)
assert not len(empty)
def test_constructor_overflow_int64(self):
# see gh-15832
msg = ("The elements provided in the data cannot "
"all be casted to the dtype int64")
with pytest.raises(OverflowError, match=msg):
Index([np.iinfo(np.uint64).max - 1], dtype="int64")
@pytest.mark.xfail(reason="see GH#21311: Index "
"doesn't enforce dtype argument")
def test_constructor_cast(self):
msg = "could not convert string to float"
with pytest.raises(ValueError, match=msg):
Index(["a", "b", "c"], dtype=float)
def test_view_with_args(self):
restricted = ['unicodeIndex', 'strIndex', 'catIndex', 'boolIndex',
'empty']
for i in restricted:
ind = self.indices[i]
# with arguments
pytest.raises(TypeError, lambda: ind.view('i8'))
# these are ok
for i in list(set(self.indices.keys()) - set(restricted)):
ind = self.indices[i]
# with arguments
ind.view('i8')
def test_astype(self):
casted = self.intIndex.astype('i8')
# it works!
casted.get_loc(5)
# pass on name
self.intIndex.name = 'foobar'
casted = self.intIndex.astype('i8')
assert casted.name == 'foobar'
def test_equals_object(self):
# same
assert Index(['a', 'b', 'c']).equals(Index(['a', 'b', 'c']))
@pytest.mark.parametrize("comp", [
Index(['a', 'b']), Index(['a', 'b', 'd']), ['a', 'b', 'c']])
def test_not_equals_object(self, comp):
assert not Index(['a', 'b', 'c']).equals(comp)
def test_insert(self):
# GH 7256
# validate neg/pos inserts
result = Index(['b', 'c', 'd'])
# test 0th element
tm.assert_index_equal(Index(['a', 'b', 'c', 'd']),
result.insert(0, 'a'))
# test Nth element that follows Python list behavior
tm.assert_index_equal(Index(['b', 'c', 'e', 'd']),
result.insert(-1, 'e'))
# test loc +/- neq (0, -1)
tm.assert_index_equal(result.insert(1, 'z'), result.insert(-2, 'z'))
# test empty
null_index = Index([])
tm.assert_index_equal(Index(['a']), null_index.insert(0, 'a'))
def test_insert_missing(self, nulls_fixture):
# GH 22295
# test there is no mangling of NA values
expected = Index(['a', nulls_fixture, 'b', 'c'])
result = Index(list('abc')).insert(1, nulls_fixture)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("pos,expected", [
(0, Index(['b', 'c', 'd'], name='index')),
(-1, Index(['a', 'b', 'c'], name='index'))
])
def test_delete(self, pos, expected):
index = Index(['a', 'b', 'c', 'd'], name='index')
result = index.delete(pos)
tm.assert_index_equal(result, expected)
assert result.name == expected.name
def test_delete_raises(self):
index = Index(['a', 'b', 'c', 'd'], name='index')
with pytest.raises((IndexError, ValueError)):
# either depending on numpy version
index.delete(5)
def test_identical(self):
# index
i1 = Index(['a', 'b', 'c'])
i2 = Index(['a', 'b', 'c'])
assert i1.identical(i2)
i1 = i1.rename('foo')
assert i1.equals(i2)
assert not i1.identical(i2)
i2 = i2.rename('foo')
assert i1.identical(i2)
i3 = Index([('a', 'a'), ('a', 'b'), ('b', 'a')])
i4 = Index([('a', 'a'), ('a', 'b'), ('b', 'a')], tupleize_cols=False)
assert not i3.identical(i4)
def test_is_(self):
ind = Index(range(10))
assert ind.is_(ind)
assert ind.is_(ind.view().view().view().view())
assert not ind.is_(Index(range(10)))
assert not ind.is_(ind.copy())
assert not ind.is_(ind.copy(deep=False))
assert not ind.is_(ind[:])
assert not ind.is_(np.array(range(10)))
# quasi-implementation dependent
assert ind.is_(ind.view())
ind2 = ind.view()
ind2.name = 'bob'
assert ind.is_(ind2)
assert ind2.is_(ind)
# doesn't matter if Indices are *actually* views of underlying data,
assert not ind.is_(Index(ind.values))
arr = np.array(range(1, 11))
ind1 = Index(arr, copy=False)
ind2 = Index(arr, copy=False)
assert not ind1.is_(ind2)
def test_asof(self):
d = self.dateIndex[0]
assert self.dateIndex.asof(d) == d
assert isna(self.dateIndex.asof(d - timedelta(1)))
d = self.dateIndex[-1]
assert self.dateIndex.asof(d + timedelta(1)) == d
d = self.dateIndex[0].to_pydatetime()
assert isinstance(self.dateIndex.asof(d), Timestamp)
def test_asof_datetime_partial(self):
index = pd.date_range('2010-01-01', periods=2, freq='m')
expected = Timestamp('2010-02-28')
result = index.asof('2010-02')
assert result == expected
assert not isinstance(result, Index)
def test_nanosecond_index_access(self):
s = Series([Timestamp('20130101')]).values.view('i8')[0]
r = DatetimeIndex([s + 50 + i for i in range(100)])
x = Series(np.random.randn(100), index=r)
first_value = x.asof(x.index[0])
# this does not yet work, as parsing strings is done via dateutil
# assert first_value == x['2013-01-01 00:00:00.000000050+0000']
expected_ts = np_datetime64_compat('2013-01-01 00:00:00.000000050+'
'0000', 'ns')
assert first_value == x[Timestamp(expected_ts)]
def test_booleanindex(self):
boolIndex = np.repeat(True, len(self.strIndex)).astype(bool)
boolIndex[5:30:2] = False
subIndex = self.strIndex[boolIndex]
for i, val in enumerate(subIndex):
assert subIndex.get_loc(val) == i
subIndex = self.strIndex[list(boolIndex)]
for i, val in enumerate(subIndex):
assert subIndex.get_loc(val) == i
def test_fancy(self):
sl = self.strIndex[[1, 2, 3]]
for i in sl:
assert i == sl[sl.get_loc(i)]
@pytest.mark.parametrize("attr", [
'strIndex', 'intIndex', 'floatIndex'])
@pytest.mark.parametrize("dtype", [np.int_, np.bool_])
def test_empty_fancy(self, attr, dtype):
empty_arr = np.array([], dtype=dtype)
index = getattr(self, attr)
empty_index = index.__class__([])
assert index[[]].identical(empty_index)
assert index[empty_arr].identical(empty_index)
@pytest.mark.parametrize("attr", [
'strIndex', 'intIndex', 'floatIndex'])
def test_empty_fancy_raises(self, attr):
# pd.DatetimeIndex is excluded, because it overrides getitem and should
# be tested separately.
empty_farr = np.array([], dtype=np.float_)
index = getattr(self, attr)
empty_index = index.__class__([])
assert index[[]].identical(empty_index)
# np.ndarray only accepts ndarray of int & bool dtypes, so should Index
pytest.raises(IndexError, index.__getitem__, empty_farr)
@pytest.mark.parametrize("sort", [None, False])
def test_intersection(self, sort):
first = self.strIndex[:20]
second = self.strIndex[:10]
intersect = first.intersection(second, sort=sort)
if sort is None:
tm.assert_index_equal(intersect, second.sort_values())
assert tm.equalContents(intersect, second)
# Corner cases
inter = first.intersection(first, sort=sort)
assert inter is first
@pytest.mark.parametrize("index2,keeps_name", [
(Index([3, 4, 5, 6, 7], name="index"), True), # preserve same name
(Index([3, 4, 5, 6, 7], name="other"), False), # drop diff names
(Index([3, 4, 5, 6, 7]), False)])
@pytest.mark.parametrize("sort", [None, False])
def test_intersection_name_preservation(self, index2, keeps_name, sort):
index1 = Index([1, 2, 3, 4, 5], name='index')
expected = Index([3, 4, 5])
result = index1.intersection(index2, sort)
if keeps_name:
expected.name = 'index'
assert result.name == expected.name
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("first_name,second_name,expected_name", [
('A', 'A', 'A'), ('A', 'B', None), (None, 'B', None)])
@pytest.mark.parametrize("sort", [None, False])
def test_intersection_name_preservation2(self, first_name, second_name,
expected_name, sort):
first = self.strIndex[5:20]
second = self.strIndex[:10]
first.name = first_name
second.name = second_name
intersect = first.intersection(second, sort=sort)
assert intersect.name == expected_name
@pytest.mark.parametrize("index2,keeps_name", [
(Index([4, 7, 6, 5, 3], name='index'), True),
(Index([4, 7, 6, 5, 3], name='other'), False)])
@pytest.mark.parametrize("sort", [None, False])
def test_intersection_monotonic(self, index2, keeps_name, sort):
index1 = Index([5, 3, 2, 4, 1], name='index')
expected = Index([5, 3, 4])
if keeps_name:
expected.name = "index"
result = index1.intersection(index2, sort=sort)
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("index2,expected_arr", [
(Index(['B', 'D']), ['B']),
(Index(['B', 'D', 'A']), ['A', 'B', 'A'])])
@pytest.mark.parametrize("sort", [None, False])
def test_intersection_non_monotonic_non_unique(self, index2, expected_arr,
sort):
# non-monotonic non-unique
index1 = Index(['A', 'B', 'A', 'C'])
expected = Index(expected_arr, dtype='object')
result = index1.intersection(index2, sort=sort)
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("sort", [None, False])
def test_intersect_str_dates(self, sort):
dt_dates = [datetime(2012, 2, 9), datetime(2012, 2, 22)]
i1 = Index(dt_dates, dtype=object)
i2 = Index(['aa'], dtype=object)
result = i2.intersection(i1, sort=sort)
assert len(result) == 0
def test_intersect_nosort(self):
result = pd.Index(['c', 'b', 'a']).intersection(['b', 'a'])
expected = pd.Index(['b', 'a'])
tm.assert_index_equal(result, expected)
def test_intersection_equal_sort(self):
idx = pd.Index(['c', 'a', 'b'])
tm.assert_index_equal(idx.intersection(idx, sort=False), idx)
tm.assert_index_equal(idx.intersection(idx, sort=None), idx)
@pytest.mark.xfail(reason="Not implemented")
def test_intersection_equal_sort_true(self):
# TODO decide on True behaviour
idx = pd.Index(['c', 'a', 'b'])
sorted_ = pd.Index(['a', 'b', 'c'])
tm.assert_index_equal(idx.intersection(idx, sort=True), sorted_)
@pytest.mark.parametrize("sort", [None, False])
def test_chained_union(self, sort):
# Chained unions handles names correctly
i1 = Index([1, 2], name='i1')
i2 = Index([5, 6], name='i2')
i3 = Index([3, 4], name='i3')
union = i1.union(i2.union(i3, sort=sort), sort=sort)
expected = i1.union(i2, sort=sort).union(i3, sort=sort)
tm.assert_index_equal(union, expected)
j1 = Index([1, 2], name='j1')
j2 = Index([], name='j2')
j3 = Index([], name='j3')
union = j1.union(j2.union(j3, sort=sort), sort=sort)
expected = j1.union(j2, sort=sort).union(j3, sort=sort)
tm.assert_index_equal(union, expected)
@pytest.mark.parametrize("sort", [None, False])
def test_union(self, sort):
# TODO: Replace with fixturesult
first = self.strIndex[5:20]
second = self.strIndex[:10]
everything = self.strIndex[:20]
union = first.union(second, sort=sort)
if sort is None:
tm.assert_index_equal(union, everything.sort_values())
assert tm.equalContents(union, everything)
@pytest.mark.parametrize('slice_', [slice(None), slice(0)])
def test_union_sort_other_special(self, slice_):
# https://github.com/pandas-dev/pandas/issues/24959
idx = pd.Index([1, 0, 2])
# default, sort=None
other = idx[slice_]
tm.assert_index_equal(idx.union(other), idx)
tm.assert_index_equal(other.union(idx), idx)
# sort=False
tm.assert_index_equal(idx.union(other, sort=False), idx)
@pytest.mark.xfail(reason="Not implemented")
@pytest.mark.parametrize('slice_', [slice(None), slice(0)])
def test_union_sort_special_true(self, slice_):
# TODO decide on True behaviour
# sort=True
idx = pd.Index([1, 0, 2])
# default, sort=None
other = idx[slice_]
result = idx.union(other, sort=True)
expected = pd.Index([0, 1, 2])
tm.assert_index_equal(result, expected)
def test_union_sort_other_incomparable(self):
# https://github.com/pandas-dev/pandas/issues/24959
idx = pd.Index([1, pd.Timestamp('2000')])
# default (sort=None)
with tm.assert_produces_warning(RuntimeWarning):
result = idx.union(idx[:1])
tm.assert_index_equal(result, idx)
# sort=None
with tm.assert_produces_warning(RuntimeWarning):
result = idx.union(idx[:1], sort=None)
tm.assert_index_equal(result, idx)
# sort=False
result = idx.union(idx[:1], sort=False)
tm.assert_index_equal(result, idx)
@pytest.mark.xfail(reason="Not implemented")
def test_union_sort_other_incomparable_true(self):
# TODO decide on True behaviour
# sort=True
idx = pd.Index([1, pd.Timestamp('2000')])
with pytest.raises(TypeError, match='.*'):
idx.union(idx[:1], sort=True)
@pytest.mark.parametrize("klass", [
np.array, Series, list])
@pytest.mark.parametrize("sort", [None, False])
def test_union_from_iterables(self, klass, sort):
# GH 10149
# TODO: Replace with fixturesult
first = self.strIndex[5:20]
second = self.strIndex[:10]
everything = self.strIndex[:20]
case = klass(second.values)
result = first.union(case, sort=sort)
if sort is None:
tm.assert_index_equal(result, everything.sort_values())
assert tm.equalContents(result, everything)
@pytest.mark.parametrize("sort", [None, False])
def test_union_identity(self, sort):
# TODO: replace with fixturesult
first = self.strIndex[5:20]
union = first.union(first, sort=sort)
# i.e. identity is not preserved when sort is True
assert (union is first) is (not sort)
union = first.union([], sort=sort)
assert (union is first) is (not sort)
union = Index([]).union(first, sort=sort)
assert (union is first) is (not sort)
@pytest.mark.parametrize("first_list", [list('ba'), list()])
@pytest.mark.parametrize("second_list", [list('ab'), list()])
@pytest.mark.parametrize("first_name, second_name, expected_name", [
('A', 'B', None), (None, 'B', None), ('A', None, None)])
@pytest.mark.parametrize("sort", [None, False])
def test_union_name_preservation(self, first_list, second_list, first_name,
second_name, expected_name, sort):
first = Index(first_list, name=first_name)
second = Index(second_list, name=second_name)
union = first.union(second, sort=sort)
vals = set(first_list).union(second_list)
if sort is None and len(first_list) > 0 and len(second_list) > 0:
expected = Index(sorted(vals), name=expected_name)
tm.assert_index_equal(union, expected)
else:
expected = Index(vals, name=expected_name)
assert tm.equalContents(union, expected)
@pytest.mark.parametrize("sort", [None, False])
def test_union_dt_as_obj(self, sort):
# TODO: Replace with fixturesult
firstCat = self.strIndex.union(self.dateIndex)
secondCat = self.strIndex.union(self.strIndex)
if self.dateIndex.dtype == np.object_:
appended = np.append(self.strIndex, self.dateIndex)
else:
appended = np.append(self.strIndex, self.dateIndex.astype('O'))
assert tm.equalContents(firstCat, appended)
assert tm.equalContents(secondCat, self.strIndex)
tm.assert_contains_all(self.strIndex, firstCat)
tm.assert_contains_all(self.strIndex, secondCat)
tm.assert_contains_all(self.dateIndex, firstCat)
@pytest.mark.parametrize("method", ['union', 'intersection', 'difference',
'symmetric_difference'])
def test_setops_disallow_true(self, method):
idx1 = pd.Index(['a', 'b'])
idx2 = pd.Index(['b', 'c'])
with pytest.raises(ValueError, match="The 'sort' keyword only takes"):
getattr(idx1, method)(idx2, sort=True)
def test_map_identity_mapping(self):
# GH 12766
# TODO: replace with fixture
for name, cur_index in self.indices.items():
tm.assert_index_equal(cur_index, cur_index.map(lambda x: x))
def test_map_with_tuples(self):
# GH 12766
# Test that returning a single tuple from an Index
# returns an Index.
index = tm.makeIntIndex(3)
result = tm.makeIntIndex(3).map(lambda x: (x,))
expected = Index([(i,) for i in index])
tm.assert_index_equal(result, expected)
# Test that returning a tuple from a map of a single index
# returns a MultiIndex object.
result = index.map(lambda x: (x, x == 1))
expected = MultiIndex.from_tuples([(i, i == 1) for i in index])
tm.assert_index_equal(result, expected)
def test_map_with_tuples_mi(self):
# Test that returning a single object from a MultiIndex
# returns an Index.
first_level = ['foo', 'bar', 'baz']
multi_index = MultiIndex.from_tuples(lzip(first_level, [1, 2, 3]))
reduced_index = multi_index.map(lambda x: x[0])
tm.assert_index_equal(reduced_index, Index(first_level))
@pytest.mark.parametrize("attr", [
'makeDateIndex', 'makePeriodIndex', 'makeTimedeltaIndex'])
def test_map_tseries_indices_return_index(self, attr):
index = getattr(tm, attr)(10)
expected = Index([1] * 10)
result = index.map(lambda x: 1)
tm.assert_index_equal(expected, result)
def test_map_tseries_indices_accsr_return_index(self):
date_index = tm.makeDateIndex(24, freq='h', name='hourly')
expected = Index(range(24), name='hourly')
tm.assert_index_equal(expected, date_index.map(lambda x: x.hour))
@pytest.mark.parametrize(
"mapper",
[
lambda values, index: {i: e for e, i in zip(values, index)},
lambda values, index: pd.Series(values, index)])
def test_map_dictlike(self, mapper):
# GH 12756
expected = Index(['foo', 'bar', 'baz'])
index = tm.makeIntIndex(3)
result = index.map(mapper(expected.values, index))
tm.assert_index_equal(result, expected)
# TODO: replace with fixture
for name in self.indices.keys():
if name == 'catIndex':
# Tested in test_categorical
continue
elif name == 'repeats':
# Cannot map duplicated index
continue
index = self.indices[name]
expected = Index(np.arange(len(index), 0, -1))
# to match proper result coercion for uints
if name == 'empty':
expected = Index([])
result = index.map(mapper(expected, index))
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("mapper", [
Series(['foo', 2., 'baz'], index=[0, 2, -1]),
{0: 'foo', 2: 2.0, -1: 'baz'}])
def test_map_with_non_function_missing_values(self, mapper):
# GH 12756
expected = Index([2., np.nan, 'foo'])
result = Index([2, 1, 0]).map(mapper)
tm.assert_index_equal(expected, result)
def test_map_na_exclusion(self):
index = Index([1.5, np.nan, 3, np.nan, 5])
result = index.map(lambda x: x * 2, na_action='ignore')
expected = index * 2
tm.assert_index_equal(result, expected)
def test_map_defaultdict(self):
index = Index([1, 2, 3])
default_dict = defaultdict(lambda: 'blank')
default_dict[1] = 'stuff'
result = index.map(default_dict)
expected = Index(['stuff', 'blank', 'blank'])
tm.assert_index_equal(result, expected)
def test_append_multiple(self):
index = Index(['a', 'b', 'c', 'd', 'e', 'f'])
foos = [index[:2], index[2:4], index[4:]]
result = foos[0].append(foos[1:])
tm.assert_index_equal(result, index)
# empty
result = index.append([])
tm.assert_index_equal(result, index)
@pytest.mark.parametrize("name,expected", [
('foo', 'foo'), ('bar', None)])
def test_append_empty_preserve_name(self, name, expected):
left = Index([], name='foo')
right = Index([1, 2, 3], name=name)
result = left.append(right)
assert result.name == expected
@pytest.mark.parametrize("second_name,expected", [
(None, None), ('name', 'name')])
@pytest.mark.parametrize("sort", [None, False])
def test_difference_name_preservation(self, second_name, expected, sort):
# TODO: replace with fixturesult
first = self.strIndex[5:20]
second = self.strIndex[:10]
answer = self.strIndex[10:20]
first.name = 'name'
second.name = second_name
result = first.difference(second, sort=sort)
assert tm.equalContents(result, answer)
if expected is None:
assert result.name is None
else:
assert result.name == expected
@pytest.mark.parametrize("sort", [None, False])
def test_difference_empty_arg(self, sort):
first = self.strIndex[5:20]
first.name == 'name'
result = first.difference([], sort)
assert tm.equalContents(result, first)
assert result.name == first.name
@pytest.mark.parametrize("sort", [None, False])
def test_difference_identity(self, sort):
first = self.strIndex[5:20]
first.name == 'name'
result = first.difference(first, sort)
assert len(result) == 0
assert result.name == first.name
@pytest.mark.parametrize("sort", [None, False])
def test_difference_sort(self, sort):
first = self.strIndex[5:20]
second = self.strIndex[:10]
result = first.difference(second, sort)
expected = self.strIndex[10:20]
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("sort", [None, False])
def test_symmetric_difference(self, sort):
# smoke
index1 = Index([5, 2, 3, 4], name='index1')
index2 = Index([2, 3, 4, 1])
result = index1.symmetric_difference(index2, sort=sort)
expected = Index([5, 1])
assert tm.equalContents(result, expected)
assert result.name is None
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
# __xor__ syntax
expected = index1 ^ index2
assert tm.equalContents(result, expected)
assert result.name is None
@pytest.mark.parametrize('opname', ['difference', 'symmetric_difference'])
def test_difference_incomparable(self, opname):
a = pd.Index([3, pd.Timestamp('2000'), 1])
b = pd.Index([2, pd.Timestamp('1999'), 1])
op = operator.methodcaller(opname, b)
# sort=None, the default
result = op(a)
expected = pd.Index([3, pd.Timestamp('2000'), 2, pd.Timestamp('1999')])
if opname == 'difference':
expected = expected[:2]
tm.assert_index_equal(result, expected)
# sort=False
op = operator.methodcaller(opname, b, sort=False)
result = op(a)
tm.assert_index_equal(result, expected)
@pytest.mark.xfail(reason="Not implemented")
@pytest.mark.parametrize('opname', ['difference', 'symmetric_difference'])
def test_difference_incomparable_true(self, opname):
# TODO decide on True behaviour
# # sort=True, raises
a = pd.Index([3, pd.Timestamp('2000'), 1])
b = pd.Index([2, pd.Timestamp('1999'), 1])
op = operator.methodcaller(opname, b, sort=True)
with pytest.raises(TypeError, match='Cannot compare'):
op(a)
@pytest.mark.parametrize("sort", [None, False])
def test_symmetric_difference_mi(self, sort):
index1 = MultiIndex.from_tuples(self.tuples)
index2 = MultiIndex.from_tuples([('foo', 1), ('bar', 3)])
result = index1.symmetric_difference(index2, sort=sort)
expected = MultiIndex.from_tuples([('bar', 2), ('baz', 3), ('bar', 3)])
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
assert tm.equalContents(result, expected)
@pytest.mark.parametrize("index2,expected", [
(Index([0, 1, np.nan]), Index([2.0, 3.0, 0.0])),
(Index([0, 1]), Index([np.nan, 2.0, 3.0, 0.0]))])
@pytest.mark.parametrize("sort", [None, False])
def test_symmetric_difference_missing(self, index2, expected, sort):
# GH 13514 change: {nan} - {nan} == {}
# (GH 6444, sorting of nans, is no longer an issue)
index1 = Index([1, np.nan, 2, 3])
result = index1.symmetric_difference(index2, sort=sort)
if sort is None:
expected = expected.sort_values()
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("sort", [None, False])
def test_symmetric_difference_non_index(self, sort):
index1 = Index([1, 2, 3, 4], name='index1')
index2 = np.array([2, 3, 4, 5])
expected = Index([1, 5])
result = index1.symmetric_difference(index2, sort=sort)
assert tm.equalContents(result, expected)
assert result.name == 'index1'
result = index1.symmetric_difference(index2, result_name='new_name',
sort=sort)
assert tm.equalContents(result, expected)
assert result.name == 'new_name'
@pytest.mark.parametrize("sort", [None, False])
def test_difference_type(self, sort):
# GH 20040
# If taking difference of a set and itself, it
# needs to preserve the type of the index
skip_index_keys = ['repeats']
for key, index in self.generate_index_types(skip_index_keys):
result = index.difference(index, sort=sort)
expected = index.drop(index)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("sort", [None, False])
def test_intersection_difference(self, sort):
# GH 20040
# Test that the intersection of an index with an
# empty index produces the same index as the difference
# of an index with itself. Test for all types
skip_index_keys = ['repeats']
for key, index in self.generate_index_types(skip_index_keys):
inter = index.intersection(index.drop(index))
diff = index.difference(index, sort=sort)
tm.assert_index_equal(inter, diff)
@pytest.mark.parametrize("attr,expected", [
('strIndex', False), ('boolIndex', False), ('catIndex', False),
('intIndex', True), ('dateIndex', False), ('floatIndex', True)])
def test_is_numeric(self, attr, expected):
assert getattr(self, attr).is_numeric() == expected
@pytest.mark.parametrize("attr,expected", [
('strIndex', True), ('boolIndex', True), ('catIndex', False),
('intIndex', False), ('dateIndex', False), ('floatIndex', False)])
def test_is_object(self, attr, expected):
assert getattr(self, attr).is_object() == expected
@pytest.mark.parametrize("attr,expected", [
('strIndex', False), ('boolIndex', False), ('catIndex', False),
('intIndex', False), ('dateIndex', True), ('floatIndex', False)])
def test_is_all_dates(self, attr, expected):
assert getattr(self, attr).is_all_dates == expected
def test_summary(self):
self._check_method_works(Index._summary)
# GH3869
ind = Index(['{other}%s', "~:{range}:0"], name='A')
result = ind._summary()
# shouldn't be formatted accidentally.
assert '~:{range}:0' in result
assert '{other}%s' in result
# GH18217
def test_summary_deprecated(self):
ind = Index(['{other}%s', "~:{range}:0"], name='A')
with tm.assert_produces_warning(FutureWarning):
ind.summary()
def test_format(self):
self._check_method_works(Index.format)
# GH 14626
# windows has different precision on datetime.datetime.now (it doesn't
# include us since the default for Timestamp shows these but Index
# formatting does not we are skipping)
now = datetime.now()
if not str(now).endswith("000"):
index = Index([now])
formatted = index.format()
expected = [str(index[0])]
assert formatted == expected
self.strIndex[:0].format()
@pytest.mark.parametrize("vals", [
[1, 2.0 + 3.0j, 4.], ['a', 'b', 'c']])
def test_format_missing(self, vals, nulls_fixture):
# 2845
vals = list(vals) # Copy for each iteration
vals.append(nulls_fixture)
index = Index(vals)
formatted = index.format()
expected = [str(index[0]), str(index[1]), str(index[2]), u('NaN')]
assert formatted == expected
assert index[3] is nulls_fixture
def test_format_with_name_time_info(self):
# bug I fixed 12/20/2011
inc = timedelta(hours=4)
dates = Index([dt + inc for dt in self.dateIndex], name='something')
formatted = dates.format(name=True)
assert formatted[0] == 'something'
def test_format_datetime_with_time(self):
t = Index([datetime(2012, 2, 7), datetime(2012, 2, 7, 23)])
result = t.format()
expected = ['2012-02-07 00:00:00', '2012-02-07 23:00:00']
assert len(result) == 2
assert result == expected
@pytest.mark.parametrize("op", ['any', 'all'])
def test_logical_compat(self, op):
index = self.create_index()
assert getattr(index, op)() == getattr(index.values, op)()
def _check_method_works(self, method):
# TODO: make this a dedicated test with parametrized methods
method(self.empty)
method(self.dateIndex)
method(self.unicodeIndex)
method(self.strIndex)
method(self.intIndex)
method(self.tuples)
method(self.catIndex)
def test_get_indexer(self):
index1 = Index([1, 2, 3, 4, 5])
index2 = Index([2, 4, 6])
r1 = index1.get_indexer(index2)
e1 = np.array([1, 3, -1], dtype=np.intp)
assert_almost_equal(r1, e1)
@pytest.mark.parametrize("reverse", [True, False])
@pytest.mark.parametrize("expected,method", [
(np.array([-1, 0, 0, 1, 1], dtype=np.intp), 'pad'),
(np.array([-1, 0, 0, 1, 1], dtype=np.intp), 'ffill'),
(np.array([0, 0, 1, 1, 2], dtype=np.intp), 'backfill'),
(np.array([0, 0, 1, 1, 2], dtype=np.intp), 'bfill')])
def test_get_indexer_methods(self, reverse, expected, method):
index1 = Index([1, 2, 3, 4, 5])
index2 = Index([2, 4, 6])
if reverse:
index1 = index1[::-1]
expected = expected[::-1]
result = index2.get_indexer(index1, method=method)
assert_almost_equal(result, expected)
def test_get_indexer_invalid(self):
# GH10411
index = Index(np.arange(10))
with pytest.raises(ValueError, match='tolerance argument'):
index.get_indexer([1, 0], tolerance=1)
with pytest.raises(ValueError, match='limit argument'):
index.get_indexer([1, 0], limit=1)
@pytest.mark.parametrize(
'method, tolerance, indexer, expected',
[
('pad', None, [0, 5, 9], [0, 5, 9]),
('backfill', None, [0, 5, 9], [0, 5, 9]),
('nearest', None, [0, 5, 9], [0, 5, 9]),
('pad', 0, [0, 5, 9], [0, 5, 9]),
('backfill', 0, [0, 5, 9], [0, 5, 9]),
('nearest', 0, [0, 5, 9], [0, 5, 9]),
('pad', None, [0.2, 1.8, 8.5], [0, 1, 8]),
('backfill', None, [0.2, 1.8, 8.5], [1, 2, 9]),
('nearest', None, [0.2, 1.8, 8.5], [0, 2, 9]),
('pad', 1, [0.2, 1.8, 8.5], [0, 1, 8]),
('backfill', 1, [0.2, 1.8, 8.5], [1, 2, 9]),
('nearest', 1, [0.2, 1.8, 8.5], [0, 2, 9]),
('pad', 0.2, [0.2, 1.8, 8.5], [0, -1, -1]),
('backfill', 0.2, [0.2, 1.8, 8.5], [-1, 2, -1]),
('nearest', 0.2, [0.2, 1.8, 8.5], [0, 2, -1])])
def test_get_indexer_nearest(self, method, tolerance, indexer, expected):
index = Index(np.arange(10))
actual = index.get_indexer(indexer, method=method, tolerance=tolerance)
tm.assert_numpy_array_equal(actual, np.array(expected,
dtype=np.intp))
@pytest.mark.parametrize('listtype', [list, tuple, Series, np.array])
@pytest.mark.parametrize(
'tolerance, expected',
list(zip([[0.3, 0.3, 0.1], [0.2, 0.1, 0.1],
[0.1, 0.5, 0.5]],
[[0, 2, -1], [0, -1, -1],
[-1, 2, 9]])))
def test_get_indexer_nearest_listlike_tolerance(self, tolerance,
expected, listtype):
index = Index(np.arange(10))
actual = index.get_indexer([0.2, 1.8, 8.5], method='nearest',
tolerance=listtype(tolerance))
tm.assert_numpy_array_equal(actual, np.array(expected,
dtype=np.intp))
def test_get_indexer_nearest_error(self):
index = Index(np.arange(10))
with pytest.raises(ValueError, match='limit argument'):
index.get_indexer([1, 0], method='nearest', limit=1)
with pytest.raises(ValueError, match='tolerance size must match'):
index.get_indexer([1, 0], method='nearest',
tolerance=[1, 2, 3])
@pytest.mark.parametrize("method,expected", [
('pad', [8, 7, 0]), ('backfill', [9, 8, 1]), ('nearest', [9, 7, 0])])
def test_get_indexer_nearest_decreasing(self, method, expected):
index = Index(np.arange(10))[::-1]
actual = index.get_indexer([0, 5, 9], method=method)
tm.assert_numpy_array_equal(actual, np.array([9, 4, 0], dtype=np.intp))
actual = index.get_indexer([0.2, 1.8, 8.5], method=method)
tm.assert_numpy_array_equal(actual, np.array(expected, dtype=np.intp))
@pytest.mark.parametrize("method,expected", [
('pad', np.array([-1, 0, 1, 1], dtype=np.intp)),
('backfill', np.array([0, 0, 1, -1], dtype=np.intp))])
def test_get_indexer_strings(self, method, expected):
index = pd.Index(['b', 'c'])
actual = index.get_indexer(['a', 'b', 'c', 'd'], method=method)
tm.assert_numpy_array_equal(actual, expected)
def test_get_indexer_strings_raises(self):
index = pd.Index(['b', 'c'])
with pytest.raises(TypeError):
index.get_indexer(['a', 'b', 'c', 'd'], method='nearest')
with pytest.raises(TypeError):
index.get_indexer(['a', 'b', 'c', 'd'], method='pad', tolerance=2)
with pytest.raises(TypeError):
index.get_indexer(['a', 'b', 'c', 'd'], method='pad',
tolerance=[2, 2, 2, 2])
def test_get_indexer_numeric_index_boolean_target(self):
# GH 16877
numeric_index = pd.Index(range(4))
result = numeric_index.get_indexer([True, False, True])
expected = np.array([-1, -1, -1], dtype=np.intp)
tm.assert_numpy_array_equal(result, expected)
def test_get_indexer_with_NA_values(self, unique_nulls_fixture,
unique_nulls_fixture2):
# GH 22332
# check pairwise, that no pair of na values
# is mangled
if unique_nulls_fixture is unique_nulls_fixture2:
return # skip it, values are not unique
arr = np.array([unique_nulls_fixture,
unique_nulls_fixture2], dtype=np.object)
index = pd.Index(arr, dtype=np.object)
result = index.get_indexer([unique_nulls_fixture,
unique_nulls_fixture2, 'Unknown'])
expected = np.array([0, 1, -1], dtype=np.intp)
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize("method", [None, 'pad', 'backfill', 'nearest'])
def test_get_loc(self, method):
index = pd.Index([0, 1, 2])
assert index.get_loc(1, method=method) == 1
if method:
assert index.get_loc(1, method=method, tolerance=0) == 1
@pytest.mark.parametrize("method", [None, 'pad', 'backfill', 'nearest'])
def test_get_loc_raises_bad_label(self, method):
index = pd.Index([0, 1, 2])
if method:
# Messages vary across versions
if PY36:
msg = 'not supported between'
elif PY35:
msg = 'unorderable types'
else:
if method == 'nearest':
msg = 'unsupported operand'
else:
msg = 'requires scalar valued input'
else:
msg = 'invalid key'
with pytest.raises(TypeError, match=msg):
index.get_loc([1, 2], method=method)
@pytest.mark.parametrize("method,loc", [
('pad', 1), ('backfill', 2), ('nearest', 1)])
def test_get_loc_tolerance(self, method, loc):
index = pd.Index([0, 1, 2])
assert index.get_loc(1.1, method) == loc
assert index.get_loc(1.1, method, tolerance=1) == loc
@pytest.mark.parametrize("method", ['pad', 'backfill', 'nearest'])
def test_get_loc_outside_tolerance_raises(self, method):
index = pd.Index([0, 1, 2])
with pytest.raises(KeyError, match='1.1'):
index.get_loc(1.1, method, tolerance=0.05)
def test_get_loc_bad_tolerance_raises(self):
index = pd.Index([0, 1, 2])
with pytest.raises(ValueError, match='must be numeric'):
index.get_loc(1.1, 'nearest', tolerance='invalid')
def test_get_loc_tolerance_no_method_raises(self):
index = pd.Index([0, 1, 2])
with pytest.raises(ValueError, match='tolerance .* valid if'):
index.get_loc(1.1, tolerance=1)
def test_get_loc_raises_missized_tolerance(self):
index = pd.Index([0, 1, 2])
with pytest.raises(ValueError, match='tolerance size must match'):
index.get_loc(1.1, 'nearest', tolerance=[1, 1])
def test_get_loc_raises_object_nearest(self):
index = pd.Index(['a', 'c'])
with pytest.raises(TypeError, match='unsupported operand type'):
index.get_loc('a', method='nearest')
def test_get_loc_raises_object_tolerance(self):
index = pd.Index(['a', 'c'])
with pytest.raises(TypeError, match='unsupported operand type'):
index.get_loc('a', method='pad', tolerance='invalid')
@pytest.mark.parametrize("dtype", [int, float])
def test_slice_locs(self, dtype):
index = Index(np.array([0, 1, 2, 5, 6, 7, 9, 10], dtype=dtype))
n = len(index)
assert index.slice_locs(start=2) == (2, n)
assert index.slice_locs(start=3) == (3, n)
assert index.slice_locs(3, 8) == (3, 6)
assert index.slice_locs(5, 10) == (3, n)
assert index.slice_locs(end=8) == (0, 6)
assert index.slice_locs(end=9) == (0, 7)
# reversed
index2 = index[::-1]
assert index2.slice_locs(8, 2) == (2, 6)
assert index2.slice_locs(7, 3) == (2, 5)
def test_slice_float_locs(self):
index = Index(np.array([0, 1, 2, 5, 6, 7, 9, 10], dtype=float))
n = len(index)
assert index.slice_locs(5.0, 10.0) == (3, n)
assert index.slice_locs(4.5, 10.5) == (3, 8)
index2 = index[::-1]
assert index2.slice_locs(8.5, 1.5) == (2, 6)
assert index2.slice_locs(10.5, -1) == (0, n)
@pytest.mark.xfail(reason="Assertions were not correct - see GH#20915")
def test_slice_ints_with_floats_raises(self):
# int slicing with floats
# GH 4892, these are all TypeErrors
index = Index(np.array([0, 1, 2, 5, 6, 7, 9, 10], dtype=int))
n = len(index)
pytest.raises(TypeError,
lambda: index.slice_locs(5.0, 10.0))
pytest.raises(TypeError,
lambda: index.slice_locs(4.5, 10.5))
index2 = index[::-1]
pytest.raises(TypeError,
lambda: index2.slice_locs(8.5, 1.5), (2, 6))
pytest.raises(TypeError,
lambda: index2.slice_locs(10.5, -1), (0, n))
def test_slice_locs_dup(self):
index = Index(['a', 'a', 'b', 'c', 'd', 'd'])
assert index.slice_locs('a', 'd') == (0, 6)
assert index.slice_locs(end='d') == (0, 6)
assert index.slice_locs('a', 'c') == (0, 4)
assert index.slice_locs('b', 'd') == (2, 6)
index2 = index[::-1]
assert index2.slice_locs('d', 'a') == (0, 6)
assert index2.slice_locs(end='a') == (0, 6)
assert index2.slice_locs('d', 'b') == (0, 4)
assert index2.slice_locs('c', 'a') == (2, 6)
@pytest.mark.parametrize("dtype", [int, float])
def test_slice_locs_dup_numeric(self, dtype):
index = Index(np.array([10, 12, 12, 14], dtype=dtype))
assert index.slice_locs(12, 12) == (1, 3)
assert index.slice_locs(11, 13) == (1, 3)
index2 = index[::-1]
assert index2.slice_locs(12, 12) == (1, 3)
assert index2.slice_locs(13, 11) == (1, 3)
def test_slice_locs_na(self):
index = Index([np.nan, 1, 2])
assert index.slice_locs(1) == (1, 3)
assert index.slice_locs(np.nan) == (0, 3)
index = Index([0, np.nan, np.nan, 1, 2])
assert index.slice_locs(np.nan) == (1, 5)
def test_slice_locs_na_raises(self):
index = Index([np.nan, 1, 2])
with pytest.raises(KeyError, match=''):
index.slice_locs(start=1.5)
with pytest.raises(KeyError, match=''):
index.slice_locs(end=1.5)
@pytest.mark.parametrize("in_slice,expected", [
(pd.IndexSlice[::-1], 'yxdcb'), (pd.IndexSlice['b':'y':-1], ''),
(pd.IndexSlice['b'::-1], 'b'), (pd.IndexSlice[:'b':-1], 'yxdcb'),
(pd.IndexSlice[:'y':-1], 'y'), (pd.IndexSlice['y'::-1], 'yxdcb'),
(pd.IndexSlice['y'::-4], 'yb'),
# absent labels
(pd.IndexSlice[:'a':-1], 'yxdcb'), (pd.IndexSlice[:'a':-2], 'ydb'),
(pd.IndexSlice['z'::-1], 'yxdcb'), (pd.IndexSlice['z'::-3], 'yc'),
(pd.IndexSlice['m'::-1], 'dcb'), (pd.IndexSlice[:'m':-1], 'yx'),
(pd.IndexSlice['a':'a':-1], ''), (pd.IndexSlice['z':'z':-1], ''),
(pd.IndexSlice['m':'m':-1], '')
])
def test_slice_locs_negative_step(self, in_slice, expected):
index = Index(list('bcdxy'))
s_start, s_stop = index.slice_locs(in_slice.start, in_slice.stop,
in_slice.step)
result = index[s_start:s_stop:in_slice.step]
expected = pd.Index(list(expected))
tm.assert_index_equal(result, expected)
def test_drop_by_str_label(self):
# TODO: Parametrize these after replacing self.strIndex with fixture
n = len(self.strIndex)
drop = self.strIndex[lrange(5, 10)]
dropped = self.strIndex.drop(drop)
expected = self.strIndex[lrange(5) + lrange(10, n)]
tm.assert_index_equal(dropped, expected)
dropped = self.strIndex.drop(self.strIndex[0])
expected = self.strIndex[1:]
tm.assert_index_equal(dropped, expected)
@pytest.mark.parametrize("keys", [['foo', 'bar'], ['1', 'bar']])
def test_drop_by_str_label_raises_missing_keys(self, keys):
with pytest.raises(KeyError, match=''):
self.strIndex.drop(keys)
def test_drop_by_str_label_errors_ignore(self):
# TODO: Parametrize these after replacing self.strIndex with fixture
# errors='ignore'
n = len(self.strIndex)
drop = self.strIndex[lrange(5, 10)]
mixed = drop.tolist() + ['foo']
dropped = self.strIndex.drop(mixed, errors='ignore')
expected = self.strIndex[lrange(5) + lrange(10, n)]
tm.assert_index_equal(dropped, expected)
dropped = self.strIndex.drop(['foo', 'bar'], errors='ignore')
expected = self.strIndex[lrange(n)]
tm.assert_index_equal(dropped, expected)
def test_drop_by_numeric_label_loc(self):
# TODO: Parametrize numeric and str tests after self.strIndex fixture
index = Index([1, 2, 3])
dropped = index.drop(1)
expected = Index([2, 3])
tm.assert_index_equal(dropped, expected)
def test_drop_by_numeric_label_raises_missing_keys(self):
index = Index([1, 2, 3])
with pytest.raises(KeyError, match=''):
index.drop([3, 4])
@pytest.mark.parametrize("key,expected", [
(4, Index([1, 2, 3])), ([3, 4, 5], Index([1, 2]))])
def test_drop_by_numeric_label_errors_ignore(self, key, expected):
index = Index([1, 2, 3])
dropped = index.drop(key, errors='ignore')
tm.assert_index_equal(dropped, expected)
@pytest.mark.parametrize("values", [['a', 'b', ('c', 'd')],
['a', ('c', 'd'), 'b'],
[('c', 'd'), 'a', 'b']])
@pytest.mark.parametrize("to_drop", [[('c', 'd'), 'a'], ['a', ('c', 'd')]])
def test_drop_tuple(self, values, to_drop):
# GH 18304
index = pd.Index(values)
expected = pd.Index(['b'])
result = index.drop(to_drop)
tm.assert_index_equal(result, expected)
removed = index.drop(to_drop[0])
for drop_me in to_drop[1], [to_drop[1]]:
result = removed.drop(drop_me)
tm.assert_index_equal(result, expected)
removed = index.drop(to_drop[1])
for drop_me in to_drop[1], [to_drop[1]]:
pytest.raises(KeyError, removed.drop, drop_me)
@pytest.mark.parametrize("method,expected,sort", [
('intersection', np.array([(1, 'A'), (2, 'A'), (1, 'B'), (2, 'B')],
dtype=[('num', int), ('let', 'a1')]),
False),
('intersection', np.array([(1, 'A'), (1, 'B'), (2, 'A'), (2, 'B')],
dtype=[('num', int), ('let', 'a1')]),
None),
('union', np.array([(1, 'A'), (1, 'B'), (1, 'C'), (2, 'A'), (2, 'B'),
(2, 'C')], dtype=[('num', int), ('let', 'a1')]),
None)
])
def test_tuple_union_bug(self, method, expected, sort):
index1 = Index(np.array([(1, 'A'), (2, 'A'), (1, 'B'), (2, 'B')],
dtype=[('num', int), ('let', 'a1')]))
index2 = Index(np.array([(1, 'A'), (2, 'A'), (1, 'B'),
(2, 'B'), (1, 'C'), (2, 'C')],
dtype=[('num', int), ('let', 'a1')]))
result = getattr(index1, method)(index2, sort=sort)
assert result.ndim == 1
expected = Index(expected)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("attr", [
'is_monotonic_increasing', 'is_monotonic_decreasing',
'_is_strictly_monotonic_increasing',
'_is_strictly_monotonic_decreasing'])
def test_is_monotonic_incomparable(self, attr):
index = Index([5, datetime.now(), 7])
assert not getattr(index, attr)
def test_get_set_value(self):
# TODO: Remove function? GH 19728
values = np.random.randn(100)
date = self.dateIndex[67]
assert_almost_equal(self.dateIndex.get_value(values, date), values[67])
self.dateIndex.set_value(values, date, 10)
assert values[67] == 10
@pytest.mark.parametrize("values", [
['foo', 'bar', 'quux'], {'foo', 'bar', 'quux'}])
@pytest.mark.parametrize("index,expected", [
(Index(['qux', 'baz', 'foo', 'bar']),
np.array([False, False, True, True])),
(Index([]), np.array([], dtype=bool)) # empty
])
def test_isin(self, values, index, expected):
result = index.isin(values)
tm.assert_numpy_array_equal(result, expected)
def test_isin_nan_common_object(self, nulls_fixture, nulls_fixture2):
# Test cartesian product of null fixtures and ensure that we don't
# mangle the various types (save a corner case with PyPy)
# all nans are the same
if (isinstance(nulls_fixture, float) and
isinstance(nulls_fixture2, float) and
math.isnan(nulls_fixture) and
math.isnan(nulls_fixture2)):
tm.assert_numpy_array_equal(Index(['a', nulls_fixture]).isin(
[nulls_fixture2]), np.array([False, True]))
elif nulls_fixture is nulls_fixture2: # should preserve NA type
tm.assert_numpy_array_equal(Index(['a', nulls_fixture]).isin(
[nulls_fixture2]), np.array([False, True]))
else:
tm.assert_numpy_array_equal(Index(['a', nulls_fixture]).isin(
[nulls_fixture2]), np.array([False, False]))
def test_isin_nan_common_float64(self, nulls_fixture):
if nulls_fixture is pd.NaT:
pytest.skip("pd.NaT not compatible with Float64Index")
# Float64Index overrides isin, so must be checked separately
tm.assert_numpy_array_equal(Float64Index([1.0, nulls_fixture]).isin(
[np.nan]), np.array([False, True]))
# we cannot compare NaT with NaN
tm.assert_numpy_array_equal(Float64Index([1.0, nulls_fixture]).isin(
[pd.NaT]), np.array([False, False]))
@pytest.mark.parametrize("level", [0, -1])
@pytest.mark.parametrize("index", [
Index(['qux', 'baz', 'foo', 'bar']),
# Float64Index overrides isin, so must be checked separately
Float64Index([1.0, 2.0, 3.0, 4.0])])
def test_isin_level_kwarg(self, level, index):
values = index.tolist()[-2:] + ['nonexisting']
expected = np.array([False, False, True, True])
tm.assert_numpy_array_equal(expected, index.isin(values, level=level))
index.name = 'foobar'
tm.assert_numpy_array_equal(expected,
index.isin(values, level='foobar'))
@pytest.mark.parametrize("level", [1, 10, -2])
@pytest.mark.parametrize("index", [
Index(['qux', 'baz', 'foo', 'bar']),
# Float64Index overrides isin, so must be checked separately
Float64Index([1.0, 2.0, 3.0, 4.0])])
def test_isin_level_kwarg_raises_bad_index(self, level, index):
with pytest.raises(IndexError, match='Too many levels'):
index.isin([], level=level)
@pytest.mark.parametrize("level", [1.0, 'foobar', 'xyzzy', np.nan])
@pytest.mark.parametrize("index", [
Index(['qux', 'baz', 'foo', 'bar']),
Float64Index([1.0, 2.0, 3.0, 4.0])])
def test_isin_level_kwarg_raises_key(self, level, index):
with pytest.raises(KeyError, match='must be same as name'):
index.isin([], level=level)
@pytest.mark.parametrize("empty", [[], Series(), np.array([])])
def test_isin_empty(self, empty):
# see gh-16991
index = Index(["a", "b"])
expected = np.array([False, False])
result = index.isin(empty)
tm.assert_numpy_array_equal(expected, result)
@pytest.mark.parametrize("values", [
[1, 2, 3, 4],
[1., 2., 3., 4.],
[True, True, True, True],
["foo", "bar", "baz", "qux"],
pd.date_range('2018-01-01', freq='D', periods=4)])
def test_boolean_cmp(self, values):
index = Index(values)
result = (index == values)
expected = np.array([True, True, True, True], dtype=bool)
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize("name,level", [
(None, 0), ('a', 'a')])
def test_get_level_values(self, name, level):
expected = self.strIndex.copy()
if name:
expected.name = name
result = expected.get_level_values(level)
tm.assert_index_equal(result, expected)
def test_slice_keep_name(self):
index = Index(['a', 'b'], name='asdf')
assert index.name == index[1:].name
# instance attributes of the form self.<name>Index
@pytest.mark.parametrize('index_kind',
['unicode', 'str', 'date', 'int', 'float'])
def test_join_self(self, join_type, index_kind):
res = getattr(self, '{0}Index'.format(index_kind))
joined = res.join(res, how=join_type)
assert res is joined
@pytest.mark.parametrize("method", ['strip', 'rstrip', 'lstrip'])
def test_str_attribute(self, method):
# GH9068
index = Index([' jack', 'jill ', ' jesse ', 'frank'])
expected = Index([getattr(str, method)(x) for x in index.values])
result = getattr(index.str, method)()
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("index", [
Index(range(5)), tm.makeDateIndex(10),
MultiIndex.from_tuples([('foo', '1'), ('bar', '3')]),
period_range(start='2000', end='2010', freq='A')])
def test_str_attribute_raises(self, index):
with pytest.raises(AttributeError, match='only use .str accessor'):
index.str.repeat(2)
@pytest.mark.parametrize("expand,expected", [
(None, Index([['a', 'b', 'c'], ['d', 'e'], ['f']])),
(False, Index([['a', 'b', 'c'], ['d', 'e'], ['f']])),
(True, MultiIndex.from_tuples([('a', 'b', 'c'), ('d', 'e', np.nan),
('f', np.nan, np.nan)]))])
def test_str_split(self, expand, expected):
index = Index(['a b c', 'd e', 'f'])
if expand is not None:
result = index.str.split(expand=expand)
else:
result = index.str.split()
tm.assert_index_equal(result, expected)
def test_str_bool_return(self):
# test boolean case, should return np.array instead of boolean Index
index = Index(['a1', 'a2', 'b1', 'b2'])
result = index.str.startswith('a')
expected = np.array([True, True, False, False])
tm.assert_numpy_array_equal(result, expected)
assert isinstance(result, np.ndarray)
def test_str_bool_series_indexing(self):
index = Index(['a1', 'a2', 'b1', 'b2'])
s = Series(range(4), index=index)
result = s[s.index.str.startswith('a')]
expected = Series(range(2), index=['a1', 'a2'])
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("index,expected", [
(Index(list('abcd')), True), (Index(range(4)), False)])
def test_tab_completion(self, index, expected):
# GH 9910
result = 'str' in dir(index)
assert result == expected
def test_indexing_doesnt_change_class(self):
index = Index([1, 2, 3, 'a', 'b', 'c'])
assert index[1:3].identical(pd.Index([2, 3], dtype=np.object_))
assert index[[0, 1]].identical(pd.Index([1, 2], dtype=np.object_))
def test_outer_join_sort(self):
left_index = Index(np.random.permutation(15))
right_index = tm.makeDateIndex(10)
with tm.assert_produces_warning(RuntimeWarning):
result = left_index.join(right_index, how='outer')
# right_index in this case because DatetimeIndex has join precedence
# over Int64Index
with tm.assert_produces_warning(RuntimeWarning):
expected = right_index.astype(object).union(
left_index.astype(object))
tm.assert_index_equal(result, expected)
def test_nan_first_take_datetime(self):
index = Index([pd.NaT, Timestamp('20130101'), Timestamp('20130102')])
result = index.take([-1, 0, 1])
expected = Index([index[-1], index[0], index[1]])
tm.assert_index_equal(result, expected)
def test_take_fill_value(self):
# GH 12631
index = pd.Index(list('ABC'), name='xxx')
result = index.take(np.array([1, 0, -1]))
expected = pd.Index(list('BAC'), name='xxx')
tm.assert_index_equal(result, expected)
# fill_value
result = index.take(np.array([1, 0, -1]), fill_value=True)
expected = pd.Index(['B', 'A', np.nan], name='xxx')
tm.assert_index_equal(result, expected)
# allow_fill=False
result = index.take(np.array([1, 0, -1]), allow_fill=False,
fill_value=True)
expected = pd.Index(['B', 'A', 'C'], name='xxx')
tm.assert_index_equal(result, expected)
def test_take_fill_value_none_raises(self):
index = pd.Index(list('ABC'), name='xxx')
msg = ('When allow_fill=True and fill_value is not None, '
'all indices must be >= -1')
with pytest.raises(ValueError, match=msg):
index.take(np.array([1, 0, -2]), fill_value=True)
with pytest.raises(ValueError, match=msg):
index.take(np.array([1, 0, -5]), fill_value=True)
def test_take_bad_bounds_raises(self):
index = pd.Index(list('ABC'), name='xxx')
with pytest.raises(IndexError, match='out of bounds'):
index.take(np.array([1, -5]))
@pytest.mark.parametrize("name", [None, 'foobar'])
@pytest.mark.parametrize("labels", [
[], np.array([]), ['A', 'B', 'C'], ['C', 'B', 'A'],
np.array(['A', 'B', 'C']), np.array(['C', 'B', 'A']),
# Must preserve name even if dtype changes
pd.date_range('20130101', periods=3).values,
pd.date_range('20130101', periods=3).tolist()])
def test_reindex_preserves_name_if_target_is_list_or_ndarray(self, name,
labels):
# GH6552
index = pd.Index([0, 1, 2])
index.name = name
assert index.reindex(labels)[0].name == name
@pytest.mark.parametrize("labels", [
[], np.array([]), np.array([], dtype=np.int64)])
def test_reindex_preserves_type_if_target_is_empty_list_or_array(self,
labels):
# GH7774
index = pd.Index(list('abc'))
assert index.reindex(labels)[0].dtype.type == np.object_
@pytest.mark.parametrize("labels,dtype", [
(pd.Int64Index([]), np.int64),
(pd.Float64Index([]), np.float64),
(pd.DatetimeIndex([]), np.datetime64)])
def test_reindex_doesnt_preserve_type_if_target_is_empty_index(self,
labels,
dtype):
# GH7774
index = pd.Index(list('abc'))
assert index.reindex(labels)[0].dtype.type == dtype
def test_reindex_no_type_preserve_target_empty_mi(self):
index = pd.Index(list('abc'))
result = index.reindex(pd.MultiIndex(
[pd.Int64Index([]), pd.Float64Index([])], [[], []]))[0]
assert result.levels[0].dtype.type == np.int64
assert result.levels[1].dtype.type == np.float64
def test_groupby(self):
index = Index(range(5))
result = index.groupby(np.array([1, 1, 2, 2, 2]))
expected = {1: pd.Index([0, 1]), 2: pd.Index([2, 3, 4])}
tm.assert_dict_equal(result, expected)
@pytest.mark.parametrize("mi,expected", [
(MultiIndex.from_tuples([(1, 2), (4, 5)]), np.array([True, True])),
(MultiIndex.from_tuples([(1, 2), (4, 6)]), np.array([True, False]))])
def test_equals_op_multiindex(self, mi, expected):
# GH9785
# test comparisons of multiindex
df = pd.read_csv(StringIO('a,b,c\n1,2,3\n4,5,6'), index_col=[0, 1])
result = df.index == mi
tm.assert_numpy_array_equal(result, expected)
def test_equals_op_multiindex_identify(self):
df = pd.read_csv(StringIO('a,b,c\n1,2,3\n4,5,6'), index_col=[0, 1])
result = df.index == df.index
expected = np.array([True, True])
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize("index", [
MultiIndex.from_tuples([(1, 2), (4, 5), (8, 9)]),
Index(['foo', 'bar', 'baz'])])
def test_equals_op_mismatched_multiindex_raises(self, index):
df = pd.read_csv(StringIO('a,b,c\n1,2,3\n4,5,6'), index_col=[0, 1])
with pytest.raises(ValueError, match="Lengths must match"):
df.index == index
def test_equals_op_index_vs_mi_same_length(self):
mi = MultiIndex.from_tuples([(1, 2), (4, 5), (8, 9)])
index = Index(['foo', 'bar', 'baz'])
result = mi == index
expected = np.array([False, False, False])
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize("dt_conv", [
pd.to_datetime, pd.to_timedelta])
def test_dt_conversion_preserves_name(self, dt_conv):
# GH 10875
index = pd.Index(['01:02:03', '01:02:04'], name='label')
assert index.name == dt_conv(index).name
@pytest.mark.skipif(not PY3, reason="compat test")
@pytest.mark.parametrize("index,expected", [
# ASCII
# short
(pd.Index(['a', 'bb', 'ccc']),
u"""Index(['a', 'bb', 'ccc'], dtype='object')"""),
# multiple lines
(pd.Index(['a', 'bb', 'ccc'] * 10),
u"""\
Index(['a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc',
'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc',
'a', 'bb', 'ccc', 'a', 'bb', 'ccc'],
dtype='object')"""),
# truncated
(pd.Index(['a', 'bb', 'ccc'] * 100),
u"""\
Index(['a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a',
...
'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc', 'a', 'bb', 'ccc'],
dtype='object', length=300)"""),
# Non-ASCII
# short
(pd.Index([u'あ', u'いい', u'ううう']),
u"""Index(['あ', 'いい', 'ううう'], dtype='object')"""),
# multiple lines
(pd.Index([u'あ', u'いい', u'ううう'] * 10),
(u"Index(['あ', 'いい', 'ううう', 'あ', 'いい', 'ううう', "
u"'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう',\n"
u" 'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう', "
u"'あ', 'いい', 'ううう', 'あ', 'いい', 'ううう',\n"
u" 'あ', 'いい', 'ううう', 'あ', 'いい', "
u"'ううう'],\n"
u" dtype='object')")),
# truncated
(pd.Index([u'あ', u'いい', u'ううう'] * 100),
(u"Index(['あ', 'いい', 'ううう', 'あ', 'いい', 'ううう', "
u"'あ', 'いい', 'ううう', 'あ',\n"
u" ...\n"
u" 'ううう', 'あ', 'いい', 'ううう', 'あ', 'いい', "
u"'ううう', 'あ', 'いい', 'ううう'],\n"
u" dtype='object', length=300)"))])
def test_string_index_repr(self, index, expected):
result = repr(index)
assert result == expected
@pytest.mark.skipif(PY3, reason="compat test")
@pytest.mark.parametrize("index,expected", [
# ASCII
# short
(pd.Index(['a', 'bb', 'ccc']),
u"""Index([u'a', u'bb', u'ccc'], dtype='object')"""),
# multiple lines
(pd.Index(['a', 'bb', 'ccc'] * 10),
u"""\
Index([u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a',
u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb',
u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc'],
dtype='object')"""),
# truncated
(pd.Index(['a', 'bb', 'ccc'] * 100),
u"""\
Index([u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a',
...
u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc', u'a', u'bb', u'ccc'],
dtype='object', length=300)"""),
# Non-ASCII
# short
(pd.Index([u'あ', u'いい', u'ううう']),
u"""Index([u'あ', u'いい', u'ううう'], dtype='object')"""),
# multiple lines
(pd.Index([u'あ', u'いい', u'ううう'] * 10),
(u"Index([u'あ', u'いい', u'ううう', u'あ', u'いい', "
u"u'ううう', u'あ', u'いい', u'ううう', u'あ',\n"
u" u'いい', u'ううう', u'あ', u'いい', u'ううう', "
u"u'あ', u'いい', u'ううう', u'あ', u'いい',\n"
u" u'ううう', u'あ', u'いい', u'ううう', u'あ', "
u"u'いい', u'ううう', u'あ', u'いい', u'ううう'],\n"
u" dtype='object')")),
# truncated
(pd.Index([u'あ', u'いい', u'ううう'] * 100),
(u"Index([u'あ', u'いい', u'ううう', u'あ', u'いい', "
u"u'ううう', u'あ', u'いい', u'ううう', u'あ',\n"
u" ...\n"
u" u'ううう', u'あ', u'いい', u'ううう', u'あ', "
u"u'いい', u'ううう', u'あ', u'いい', u'ううう'],\n"
u" dtype='object', length=300)"))])
def test_string_index_repr_compat(self, index, expected):
result = unicode(index) # noqa
assert result == expected
@pytest.mark.skipif(not PY3, reason="compat test")
@pytest.mark.parametrize("index,expected", [
# short
(pd.Index([u'あ', u'いい', u'ううう']),
(u"Index(['あ', 'いい', 'ううう'], "
u"dtype='object')")),
# multiple lines
(pd.Index([u'あ', u'いい', u'ううう'] * 10),
(u"Index(['あ', 'いい', 'ううう', 'あ', 'いい', "
u"'ううう', 'あ', 'いい', 'ううう',\n"
u" 'あ', 'いい', 'ううう', 'あ', 'いい', "
u"'ううう', 'あ', 'いい', 'ううう',\n"
u" 'あ', 'いい', 'ううう', 'あ', 'いい', "
u"'ううう', 'あ', 'いい', 'ううう',\n"
u" 'あ', 'いい', 'ううう'],\n"
u" dtype='object')""")),
# truncated
(pd.Index([u'あ', u'いい', u'ううう'] * 100),
(u"Index(['あ', 'いい', 'ううう', 'あ', 'いい', "
u"'ううう', 'あ', 'いい', 'ううう',\n"
u" 'あ',\n"
u" ...\n"
u" 'ううう', 'あ', 'いい', 'ううう', 'あ', "
u"'いい', 'ううう', 'あ', 'いい',\n"
u" 'ううう'],\n"
u" dtype='object', length=300)"))])
def test_string_index_repr_with_unicode_option(self, index, expected):
# Enable Unicode option -----------------------------------------
with cf.option_context('display.unicode.east_asian_width', True):
result = repr(index)
assert result == expected
@pytest.mark.skipif(PY3, reason="compat test")
@pytest.mark.parametrize("index,expected", [
# short
(pd.Index([u'あ', u'いい', u'ううう']),
(u"Index([u'あ', u'いい', u'ううう'], "
u"dtype='object')")),
# multiple lines
(pd.Index([u'あ', u'いい', u'ううう'] * 10),
(u"Index([u'あ', u'いい', u'ううう', u'あ', u'いい', "
u"u'ううう', u'あ', u'いい',\n"
u" u'ううう', u'あ', u'いい', u'ううう', "
u"u'あ', u'いい', u'ううう', u'あ',\n"
u" u'いい', u'ううう', u'あ', u'いい', "
u"u'ううう', u'あ', u'いい',\n"
u" u'ううう', u'あ', u'いい', u'ううう', "
u"u'あ', u'いい', u'ううう'],\n"
u" dtype='object')")),
# truncated
(pd.Index([u'あ', u'いい', u'ううう'] * 100),
(u"Index([u'あ', u'いい', u'ううう', u'あ', u'いい', "
u"u'ううう', u'あ', u'いい',\n"
u" u'ううう', u'あ',\n"
u" ...\n"
u" u'ううう', u'あ', u'いい', u'ううう', "
u"u'あ', u'いい', u'ううう', u'あ',\n"
u" u'いい', u'ううう'],\n"
u" dtype='object', length=300)"))])
def test_string_index_repr_with_unicode_option_compat(self, index,
expected):
# Enable Unicode option -----------------------------------------
with cf.option_context('display.unicode.east_asian_width', True):
result = unicode(index) # noqa
assert result == expected
def test_cached_properties_not_settable(self):
index = pd.Index([1, 2, 3])
with pytest.raises(AttributeError, match="Can't set attribute"):
index.is_unique = False
def test_get_duplicates_deprecated(self):
index = pd.Index([1, 2, 3])
with tm.assert_produces_warning(FutureWarning):
index.get_duplicates()
def test_tab_complete_warning(self, ip):
# https://github.com/pandas-dev/pandas/issues/16409
pytest.importorskip('IPython', minversion="6.0.0")
from IPython.core.completer import provisionalcompleter
code = "import pandas as pd; idx = pd.Index([1, 2])"
ip.run_code(code)
with tm.assert_produces_warning(None):
with provisionalcompleter('ignore'):
list(ip.Completer.completions('idx.', 4))
class TestMixedIntIndex(Base):
# Mostly the tests from common.py for which the results differ
# in py2 and py3 because ints and strings are uncomparable in py3
# (GH 13514)
_holder = Index
def setup_method(self, method):
self.indices = dict(mixedIndex=Index([0, 'a', 1, 'b', 2, 'c']))
self.setup_indices()
def create_index(self):
return self.mixedIndex
def test_argsort(self):
index = self.create_index()
if PY36:
with pytest.raises(TypeError, match="'>|<' not supported"):
result = index.argsort()
elif PY3:
with pytest.raises(TypeError, match="unorderable types"):
result = index.argsort()
else:
result = index.argsort()
expected = np.array(index).argsort()
tm.assert_numpy_array_equal(result, expected, check_dtype=False)
def test_numpy_argsort(self):
index = self.create_index()
if PY36:
with pytest.raises(TypeError, match="'>|<' not supported"):
result = np.argsort(index)
elif PY3:
with pytest.raises(TypeError, match="unorderable types"):
result = np.argsort(index)
else:
result = np.argsort(index)
expected = index.argsort()
tm.assert_numpy_array_equal(result, expected)
def test_copy_name(self):
# Check that "name" argument passed at initialization is honoured
# GH12309
index = self.create_index()
first = index.__class__(index, copy=True, name='mario')
second = first.__class__(first, copy=False)
# Even though "copy=False", we want a new object.
assert first is not second
tm.assert_index_equal(first, second)
assert first.name == 'mario'
assert second.name == 'mario'
s1 = Series(2, index=first)
s2 = Series(3, index=second[:-1])
s3 = s1 * s2
assert s3.index.name == 'mario'
def test_copy_name2(self):
# Check that adding a "name" parameter to the copy is honored
# GH14302
index = pd.Index([1, 2], name='MyName')
index1 = index.copy()
tm.assert_index_equal(index, index1)
index2 = index.copy(name='NewName')
tm.assert_index_equal(index, index2, check_names=False)
assert index.name == 'MyName'
assert index2.name == 'NewName'
index3 = index.copy(names=['NewName'])
tm.assert_index_equal(index, index3, check_names=False)
assert index.name == 'MyName'
assert index.names == ['MyName']
assert index3.name == 'NewName'
assert index3.names == ['NewName']
def test_union_base(self):
index = self.create_index()
first = index[3:]
second = index[:5]
result = first.union(second)
expected = Index([0, 1, 2, 'a', 'b', 'c'])
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("klass", [
np.array, Series, list])
def test_union_different_type_base(self, klass):
# GH 10149
index = self.create_index()
first = index[3:]
second = index[:5]
result = first.union(klass(second.values))
assert tm.equalContents(result, index)
def test_unique_na(self):
idx = pd.Index([2, np.nan, 2, 1], name='my_index')
expected = pd.Index([2, np.nan, 1], name='my_index')
result = idx.unique()
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("sort", [None, False])
def test_intersection_base(self, sort):
# (same results for py2 and py3 but sortedness not tested elsewhere)
index = self.create_index()
first = index[:5]
second = index[:3]
expected = Index([0, 1, 'a']) if sort is None else Index([0, 'a', 1])
result = first.intersection(second, sort=sort)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("klass", [
np.array, Series, list])
@pytest.mark.parametrize("sort", [None, False])
def test_intersection_different_type_base(self, klass, sort):
# GH 10149
index = self.create_index()
first = index[:5]
second = index[:3]
result = first.intersection(klass(second.values), sort=sort)
assert tm.equalContents(result, second)
@pytest.mark.parametrize("sort", [None, False])
def test_difference_base(self, sort):
# (same results for py2 and py3 but sortedness not tested elsewhere)
index = self.create_index()
first = index[:4]
second = index[3:]
result = first.difference(second, sort)
expected = Index([0, 'a', 1])
if sort is None:
expected = Index(safe_sort(expected))
tm.assert_index_equal(result, expected)
def test_symmetric_difference(self):
# (same results for py2 and py3 but sortedness not tested elsewhere)
index = self.create_index()
first = index[:4]
second = index[3:]
result = first.symmetric_difference(second)
expected = Index([0, 1, 2, 'a', 'c'])
tm.assert_index_equal(result, expected)
def test_logical_compat(self):
index = self.create_index()
assert index.all() == index.values.all()
assert index.any() == index.values.any()
@pytest.mark.parametrize("how", ['any', 'all'])
@pytest.mark.parametrize("dtype", [
None, object, 'category'])
@pytest.mark.parametrize("vals,expected", [
([1, 2, 3], [1, 2, 3]), ([1., 2., 3.], [1., 2., 3.]),
([1., 2., np.nan, 3.], [1., 2., 3.]),
(['A', 'B', 'C'], ['A', 'B', 'C']),
(['A', np.nan, 'B', 'C'], ['A', 'B', 'C'])])
def test_dropna(self, how, dtype, vals, expected):
# GH 6194
index = pd.Index(vals, dtype=dtype)
result = index.dropna(how=how)
expected = pd.Index(expected, dtype=dtype)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("how", ['any', 'all'])
@pytest.mark.parametrize("index,expected", [
(pd.DatetimeIndex(['2011-01-01', '2011-01-02', '2011-01-03']),
pd.DatetimeIndex(['2011-01-01', '2011-01-02', '2011-01-03'])),
(pd.DatetimeIndex(['2011-01-01', '2011-01-02', '2011-01-03', pd.NaT]),
pd.DatetimeIndex(['2011-01-01', '2011-01-02', '2011-01-03'])),
(pd.TimedeltaIndex(['1 days', '2 days', '3 days']),
pd.TimedeltaIndex(['1 days', '2 days', '3 days'])),
(pd.TimedeltaIndex([pd.NaT, '1 days', '2 days', '3 days', pd.NaT]),
pd.TimedeltaIndex(['1 days', '2 days', '3 days'])),
(pd.PeriodIndex(['2012-02', '2012-04', '2012-05'], freq='M'),
pd.PeriodIndex(['2012-02', '2012-04', '2012-05'], freq='M')),
(pd.PeriodIndex(['2012-02', '2012-04', 'NaT', '2012-05'], freq='M'),
pd.PeriodIndex(['2012-02', '2012-04', '2012-05'], freq='M'))])
def test_dropna_dt_like(self, how, index, expected):
result = index.dropna(how=how)
tm.assert_index_equal(result, expected)
def test_dropna_invalid_how_raises(self):
msg = "invalid how option: xxx"
with pytest.raises(ValueError, match=msg):
pd.Index([1, 2, 3]).dropna(how='xxx')
def test_get_combined_index(self):
result = _get_combined_index([])
expected = Index([])
tm.assert_index_equal(result, expected)
def test_repeat(self):
repeats = 2
index = pd.Index([1, 2, 3])
expected = pd.Index([1, 1, 2, 2, 3, 3])
result = index.repeat(repeats)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize("index", [
pd.Index([np.nan]), pd.Index([np.nan, 1]),
pd.Index([1, 2, np.nan]), pd.Index(['a', 'b', np.nan]),
pd.to_datetime(['NaT']), pd.to_datetime(['NaT', '2000-01-01']),
pd.to_datetime(['2000-01-01', 'NaT', '2000-01-02']),
pd.to_timedelta(['1 day', 'NaT'])])
def test_is_monotonic_na(self, index):
assert index.is_monotonic_increasing is False
assert index.is_monotonic_decreasing is False
assert index._is_strictly_monotonic_increasing is False
assert index._is_strictly_monotonic_decreasing is False
def test_repr_summary(self):
with cf.option_context('display.max_seq_items', 10):
result = repr(pd.Index(np.arange(1000)))
assert len(result) < 200
assert "..." in result
@pytest.mark.parametrize("klass", [Series, DataFrame])
def test_int_name_format(self, klass):
index = Index(['a', 'b', 'c'], name=0)
result = klass(lrange(3), index=index)
assert '0' in repr(result)
def test_print_unicode_columns(self):
df = pd.DataFrame({u("\u05d0"): [1, 2, 3],
"\u05d1": [4, 5, 6],
"c": [7, 8, 9]})
repr(df.columns) # should not raise UnicodeDecodeError
@pytest.mark.parametrize("func,compat_func", [
(str, text_type), # unicode string
(bytes, str) # byte string
])
def test_with_unicode(self, func, compat_func):
index = Index(lrange(1000))
if PY3:
func(index)
else:
compat_func(index)
def test_intersect_str_dates(self):
dt_dates = [datetime(2012, 2, 9), datetime(2012, 2, 22)]
index1 = Index(dt_dates, dtype=object)
index2 = Index(['aa'], dtype=object)
result = index2.intersection(index1)
expected = Index([], dtype=object)
tm.assert_index_equal(result, expected)
class TestIndexUtils(object):
@pytest.mark.parametrize('data, names, expected', [
([[1, 2, 3]], None, Index([1, 2, 3])),
([[1, 2, 3]], ['name'], Index([1, 2, 3], name='name')),
([['a', 'a'], ['c', 'd']], None,
MultiIndex([['a'], ['c', 'd']], [[0, 0], [0, 1]])),
([['a', 'a'], ['c', 'd']], ['L1', 'L2'],
MultiIndex([['a'], ['c', 'd']], [[0, 0], [0, 1]],
names=['L1', 'L2'])),
])
def test_ensure_index_from_sequences(self, data, names, expected):
result = ensure_index_from_sequences(data, names)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize('opname', ['eq', 'ne', 'le', 'lt', 'ge', 'gt',
'add', 'radd', 'sub', 'rsub',
'mul', 'rmul', 'truediv', 'rtruediv',
'floordiv', 'rfloordiv',
'pow', 'rpow', 'mod', 'divmod'])
def test_generated_op_names(opname, indices):
index = indices
if isinstance(index, ABCIndex) and opname == 'rsub':
# pd.Index.__rsub__ does not exist; though the method does exist
# for subclasses. see GH#19723
return
opname = '__{name}__'.format(name=opname)
method = getattr(index, opname)
assert method.__name__ == opname
@pytest.mark.parametrize('index_maker', tm.index_subclass_makers_generator())
def test_index_subclass_constructor_wrong_kwargs(index_maker):
# GH #19348
with pytest.raises(TypeError, match='unexpected keyword argument'):
index_maker(foo='bar')
def test_deprecated_fastpath():
with tm.assert_produces_warning(FutureWarning):
idx = pd.Index(
np.array(['a', 'b'], dtype=object), name='test', fastpath=True)
expected = pd.Index(['a', 'b'], name='test')
tm.assert_index_equal(idx, expected)
with tm.assert_produces_warning(FutureWarning):
idx = pd.Int64Index(
np.array([1, 2, 3], dtype='int64'), name='test', fastpath=True)
expected = pd.Index([1, 2, 3], name='test', dtype='int64')
tm.assert_index_equal(idx, expected)
with tm.assert_produces_warning(FutureWarning):
idx = pd.RangeIndex(0, 5, 2, name='test', fastpath=True)
expected = pd.RangeIndex(0, 5, 2, name='test')
tm.assert_index_equal(idx, expected)
with tm.assert_produces_warning(FutureWarning):
idx = pd.CategoricalIndex(['a', 'b', 'c'], name='test', fastpath=True)
expected = pd.CategoricalIndex(['a', 'b', 'c'], name='test')
tm.assert_index_equal(idx, expected)
| [
"[email protected]"
] | |
677d22f42d470e7e6fab11f89b82637deaaa0fb6 | be80a2468706ab99c838fa85555c75db8f38bdeb | /app/reward/migrations/0002_auto_20180822_0903.py | 2e25721da289ed95493031d61d3ce8c3cf1f9c9a | [] | no_license | kimdohwan/Wadiz | 5468d218ba069387deabf83376b42a4f69360881 | 91f85f09a7c9a59864b69990127911a112d4bdbd | refs/heads/master | 2021-06-24T06:41:04.111305 | 2019-07-03T12:51:18 | 2019-07-03T12:51:18 | 143,955,968 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,105 | py | # Generated by Django 2.1 on 2018-08-22 00:03
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('reward', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='FundingOrder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=20)),
('phone_number', models.CharField(blank=True, max_length=11, validators=[django.core.validators.RegexValidator(message='Phone number must be 11 numbers', regex='\\d{11}')])),
('address1', models.CharField(max_length=30)),
('address2', models.CharField(max_length=30)),
('comment', models.TextField()),
('requested_at', models.DateTimeField(auto_now_add=True)),
('cancel_at', models.DateTimeField(null=True)),
],
),
migrations.RemoveField(
model_name='funding',
name='address1',
),
migrations.RemoveField(
model_name='funding',
name='address2',
),
migrations.RemoveField(
model_name='funding',
name='cancel_at',
),
migrations.RemoveField(
model_name='funding',
name='comment',
),
migrations.RemoveField(
model_name='funding',
name='phone_number',
),
migrations.RemoveField(
model_name='funding',
name='requested_at',
),
migrations.RemoveField(
model_name='funding',
name='username',
),
migrations.AddField(
model_name='funding',
name='order',
field=models.ForeignKey(default='', on_delete=django.db.models.deletion.CASCADE, related_name='order', to='reward.FundingOrder'),
preserve_default=False,
),
]
| [
"[email protected]"
] | |
f44f6d9972814a4e7a1f84001a60cf2ac08ac418 | 5c26eafece0ee85a7ed4b6a34ee52753d7c86e49 | /polyaxon/estimators/hooks/step_hooks.py | 0e177575b29f1a02195d3439137b45db2c0d2a1a | [
"MIT"
] | permissive | StetHD/polyaxon | 345257076d484b2267ba20d9d346f1367cdd92d3 | dabddb9b6ea922a0549e3c6fd7711231f7462fa3 | refs/heads/master | 2021-03-19T06:45:51.806485 | 2017-09-26T14:31:26 | 2017-09-26T14:36:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,184 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from collections import OrderedDict
from tensorflow.python.training import basic_session_run_hooks
from polyaxon.estimators.hooks.utils import can_run_hook
class StepLoggingTensorHook(basic_session_run_hooks.LoggingTensorHook):
"""Prints the given tensors once every N local steps or once every N seconds.
A modified version of tensorflow.python.training.basic_session_run_hooks LoggingTensorHook.
Checks the context for `no_run_hooks_op` before calling the the hook.
The tensors will be printed to the log, with `INFO` severity.
Args:
tensors: `dict` that maps string-valued tags to tensors/tensor names,
or `iterable` of tensors/tensor names.
every_n_iter: `int`, print the values of `tensors` once every N local
steps taken on the current worker.
every_n_secs: `int` or `float`, print the values of `tensors` once every N
seconds. Exactly one of `every_n_iter` and `every_n_secs` should be
provided.
formatter: function, takes dict of `tag`->`Tensor` and returns a string.
If `None` uses default printing all tensors.
Raises:
ValueError: if `every_n_iter` is non-positive.
"""
def __init__(self, tensors, every_n_iter=None, every_n_secs=None, formatter=None):
super(StepLoggingTensorHook, self).__init__(tensors, every_n_iter, every_n_secs, formatter)
def before_run(self, run_context): # pylint: disable=unused-argument
self._should_trigger = can_run_hook(run_context)
if self._should_trigger:
return super(StepLoggingTensorHook, self).before_run(run_context)
else:
return None
class StopAtStepHook(basic_session_run_hooks.StopAtStepHook):
"""Monitor to request stop at a specified step.
(A mirror to tensorflow.python.training.basic_session_run_hooks StopAtStepHook.)
This hook requests stop after either a number of steps have been
executed or a last step has been reached. Only one of the two options can be
specified.
if `num_steps` is specified, it indicates the number of steps to execute
after `begin()` is called. If instead `last_step` is specified, it
indicates the last step we want to execute, as passed to the `after_run()`
call.
Args:
num_steps: Number of steps to execute.
last_step: Step after which to stop.
Raises:
ValueError: If one of the arguments is invalid.
"""
def __init__(self, num_steps=None, last_step=None):
super(StopAtStepHook, self).__init__(num_steps, last_step)
class StepCheckpointSaverHook(basic_session_run_hooks.CheckpointSaverHook):
"""Saves checkpoints every N steps or seconds.
(A mirror to tensorflow.python.training.basic_session_run_hooks CheckpointSaverHook.)
Args:
checkpoint_dir: `str`, base directory for the checkpoint files.
save_secs: `int`, save every N secs.
save_steps: `int`, save every N steps.
saver: `Saver` object, used for saving.
checkpoint_basename: `str`, base name for the checkpoint files.
scaffold: `Scaffold`, use to get saver object.
listeners: List of `CheckpointSaverListener` subclass instances.
Used for callbacks that run immediately after the corresponding
CheckpointSaverHook callbacks, only in steps where the
CheckpointSaverHook was triggered.
Raises:
ValueError: One of `save_steps` or `save_secs` should be set.
ValueError: Exactly one of saver or scaffold should be set.
"""
def __init__(self, checkpoint_dir, save_secs=None, save_steps=None, saver=None,
checkpoint_basename="model.ckpt", scaffold=None, listeners=None):
super(StepCheckpointSaverHook, self).__init__(checkpoint_dir, save_secs, save_steps, saver,
checkpoint_basename, scaffold, listeners)
class StepCounterHook(basic_session_run_hooks.StepCounterHook):
"""Steps per second monitor.
(A mirror to tensorflow.python.training.basic_session_run_hooks CheckpointSaverHook.)
"""
def __init__(self, every_n_steps=100, every_n_secs=None, output_dir=None, summary_writer=None):
super(StepCounterHook, self).__init__(
every_n_steps, every_n_secs, output_dir, summary_writer)
class StepSummarySaverHook(basic_session_run_hooks.SummarySaverHook):
"""Saves summaries every N steps.
(A mirror to tensorflow.python.training.basic_session_run_hooks NanTensorHook.)
Args:
save_steps: `int`, save summaries every N steps. Exactly one of
`save_secs` and `save_steps` should be set.
save_secs: `int`, save summaries every N seconds.
output_dir: `string`, the directory to save the summaries to. Only used
if no `summary_writer` is supplied.
summary_writer: `SummaryWriter`. If `None` and an `output_dir` was passed,
one will be created accordingly.
scaffold: `Scaffold` to get summary_op if it's not provided.
summary_op: `Tensor` of type `string` containing the serialized `Summary`
protocol buffer or a list of `Tensor`. They are most likely an output
by TF summary methods like `tf.summary.scalar` or
`tf.summary.merge_all`. It can be passed in as one tensor; if more
than one, they must be passed in as a list.
Raises:
ValueError: Exactly one of scaffold or summary_op should be set.
"""
def __init__(self, save_steps=None, save_secs=None, output_dir=None, summary_writer=None,
scaffold=None, summary_op=None):
super(StepSummarySaverHook, self).__init__(
save_steps, save_secs, output_dir, summary_writer, scaffold, summary_op)
STEP_HOOKS = OrderedDict([
('StepLoggingTensorHook', StepLoggingTensorHook),
('StopAtStepHook', StopAtStepHook),
('StepCheckpointSaverHook', StepCheckpointSaverHook),
('StepCounterHook', StepCounterHook),
('StepSummarySaverHook', StepSummarySaverHook),
])
| [
"[email protected]"
] | |
1e30a64ef30c526d7e94b66f205c369d97dd8da2 | fa04309288a0f8b2daae2fd73c8224a1c0ad4d95 | /eventkit_cloud/tasks/tests/test_task_factory.py | b02b4477e9ad630dbbdb95b91ae28bb1c39b5c47 | [] | no_license | jj0hns0n/eventkit-cloud | 7bb828c57f29887621e47fe7ce0baa14071ef39e | 2f749090baf796b507e79251a4c4b30cb0b4e126 | refs/heads/master | 2021-01-01T19:45:32.464729 | 2017-07-24T19:01:24 | 2017-07-24T19:01:24 | 98,675,805 | 0 | 0 | null | 2017-07-28T18:16:34 | 2017-07-28T18:16:34 | null | UTF-8 | Python | false | false | 7,545 | py | # -*- coding: utf-8 -*-
import logging
import os
import uuid
from django.contrib.auth.models import Group, User
from django.contrib.gis.geos import GEOSGeometry, Polygon
from django.db import DatabaseError
from django.test import TestCase
from eventkit_cloud.jobs.models import Job, Region, ProviderTask, ExportProvider, License, UserLicense
from eventkit_cloud.tasks.models import ExportRun
from eventkit_cloud.tasks.task_factory import (TaskFactory, create_run, create_finalize_run_task_collection,
get_invalid_licenses)
from mock import patch, Mock, MagicMock
logger = logging.getLogger(__name__)
class TestExportTaskFactory(TestCase):
"""
Test cases for the TaskFactory.
"""
fixtures = ('insert_provider_types.json', 'osm_provider.json',)
def setUp(self,):
self.path = os.path.dirname(os.path.realpath(__file__))
Group.objects.create(name='TestDefaultExportExtentGroup')
self.user = User.objects.create(username='demo', email='[email protected]', password='demo')
bbox = Polygon.from_bbox((-10.85, 6.25, -10.62, 6.40))
the_geom = GEOSGeometry(bbox, srid=4326)
self.job = Job.objects.create(name='TestJob', description='Test description', user=self.user,
the_geom=the_geom)
provider = ExportProvider.objects.get(slug='osm')
self.license = License.objects.create(slug='odbl-test', name='test_osm_license')
provider.license = self.license
provider.save()
UserLicense.objects.create(license=self.license, user=self.user)
provider_task = ProviderTask.objects.create(provider=provider)
self.job.provider_tasks.add(provider_task)
self.region = Region.objects.get(name='Africa')
self.job.region = self.region
self.uid = str(provider_task.uid)
self.job.save()
def test_create_run_success(self):
run_uid = create_run(job_uid=self.job.uid)
self.assertIsNotNone(run_uid)
self.assertIsNotNone(ExportRun.objects.get(uid=run_uid))
@patch('eventkit_cloud.tasks.task_factory.ExportRun')
def test_create_run_failure(self, ExportRun):
ExportRun.objects.create.side_effect = DatabaseError('FAIL')
with self.assertRaises(DatabaseError):
run_uid = create_run(job_uid=self.job.uid)
self.assertIsNone(run_uid)
@patch('eventkit_cloud.tasks.task_factory.get_invalid_licenses')
@patch('eventkit_cloud.tasks.task_factory.finalize_export_provider_task')
@patch('eventkit_cloud.tasks.task_factory.create_task')
@patch('eventkit_cloud.tasks.task_factory.chain')
def test_task_factory(self, task_factory_chain, create_task,
finalize_task, mock_invalid_licenses):
mock_invalid_licenses.return_value = []
run_uid = create_run(job_uid=self.job.uid)
self.assertIsNotNone(run_uid)
self.assertIsNotNone(ExportRun.objects.get(uid=run_uid))
worker = "some_worker"
provider_uuid = uuid.uuid4()
task_runner = MagicMock()
task = Mock()
task_runner().run_task.return_value = (provider_uuid, task)
create_task.return_value = task
task_factory = TaskFactory()
task_factory.type_task_map = {'osm-generic': task_runner, 'osm': task_runner}
task_factory.parse_tasks(run_uid=run_uid, worker=worker)
task_factory_chain.assert_called()
create_task.assert_called()
finalize_task.s.assert_called()
# Test that run is prevented and deleted if the user has not agreed to the licenses.
mock_invalid_licenses.return_value = ['invalid-licenses']
with self.assertRaises(Exception):
task_factory.parse_tasks(run_uid=run_uid, worker=worker)
run = ExportRun.objects.filter(uid=run_uid).first()
self.assertIsNone(run)
def test_get_invalid_licenses(self):
# The license should not be returned if the user has agreed to it.
expected_invalid_licenses = []
invalid_licenses = get_invalid_licenses(self.job)
self.assertEquals(invalid_licenses, expected_invalid_licenses)
# A license should be returned if the user has not agreed to it.
UserLicense.objects.get(license=self.license, user=self.user).delete()
expected_invalid_licenses = [self.license.name]
invalid_licenses = get_invalid_licenses(self.job)
self.assertEquals(invalid_licenses, expected_invalid_licenses)
UserLicense.objects.create(license=self.license, user=self.user)
class CreateFinalizeRunTaskCollectionTests(TestCase):
@patch('eventkit_cloud.tasks.task_factory.example_finalize_run_hook_task')
@patch('eventkit_cloud.tasks.task_factory.prepare_for_export_zip_task')
@patch('eventkit_cloud.tasks.task_factory.zip_file_task')
@patch('eventkit_cloud.tasks.task_factory.finalize_run_task_as_errback')
@patch('eventkit_cloud.tasks.task_factory.finalize_run_task')
@patch('eventkit_cloud.tasks.task_factory.chain')
def test_create_finalize_run_task_collection(
self, chain, finalize_run_task, finalize_run_task_as_errback, zip_file_task, prepare_for_export_zip_task, example_finalize_run_hook_task):
""" Checks that all of the expected tasks were prepared and combined in a chain for return.
"""
chain.return_value = 'When not mocked, this would be a celery chain'
# None of these need correspond to real things, they're just to check the inner calls.
run_uid = 1
run_dir = 'test_dir'
worker = 'test_worker'
expected_task_settings = {
'interval': 1, 'max_retries': 10, 'queue': worker, 'routing_key': worker, 'priority': 70}
# This should return a chain of tasks ending in the finalize_run_task, plus a task sig for just the
# finalize_run_task.
finalize_chain, errback = create_finalize_run_task_collection(run_uid=run_uid, run_dir=run_dir, worker=worker)
example_finalize_run_hook_task.si.assert_called_once_with([], run_uid=run_uid)
example_finalize_run_hook_task.si.return_value.set.assert_called_once_with(**expected_task_settings)
prepare_for_export_zip_task.s.assert_called_once_with(run_uid=run_uid)
prepare_for_export_zip_task.s.return_value.set.assert_called_once_with(**expected_task_settings)
zip_file_task.s.assert_called_once_with(run_uid=run_uid)
zip_file_task.s.return_value.set.assert_called_once_with(**expected_task_settings)
finalize_run_task.si.assert_called_once_with(run_uid=run_uid, stage_dir=run_dir)
finalize_run_task.si.return_value.set.assert_called_once_with(**expected_task_settings)
self.assertEqual(finalize_chain, 'When not mocked, this would be a celery chain')
self.assertEqual(errback, finalize_run_task_as_errback.si())
self.assertEqual(chain.call_count, 1)
# Grab the args for the first (only) call
chain_inputs = chain.call_args[0]
# The result of setting the args & settings for each task,
# which unmocked would be a task signature, should be passed to celery.chain
expected_chain_inputs = (
example_finalize_run_hook_task.si.return_value.set.return_value,
prepare_for_export_zip_task.s.return_value.set.return_value,
zip_file_task.s.return_value.set.return_value,
finalize_run_task.si.return_value.set.return_value,
)
self.assertEqual(chain_inputs, expected_chain_inputs)
| [
"[email protected]"
] | |
526512060ec60f64cab763dcdc20a58c882fa21b | e3040a2e23a856e319e02037dc6baf3882c796b9 | /samples/openapi3/client/petstore/python/petstore_api/paths/pet_find_by_status/get.py | bca423ad68f208522270ab2159908c0f06ae7b00 | [
"Apache-2.0"
] | permissive | mishin/openapi-generator | 2ed2e0739c0cc2a627c25191d5898071d9294036 | 3ed650307513d552404f3d76487f3b4844acae41 | refs/heads/master | 2023-06-10T03:01:09.612130 | 2022-10-14T08:29:15 | 2022-10-14T08:29:15 | 271,080,285 | 0 | 0 | Apache-2.0 | 2023-05-30T02:01:25 | 2020-06-09T18:29:41 | Java | UTF-8 | Python | false | false | 12,472 | py | # coding: utf-8
"""
Generated by: https://openapi-generator.tech
"""
from dataclasses import dataclass
import typing_extensions
import urllib3
from urllib3._collections import HTTPHeaderDict
from petstore_api import api_client, exceptions
from datetime import date, datetime # noqa: F401
import decimal # noqa: F401
import functools # noqa: F401
import io # noqa: F401
import re # noqa: F401
import typing # noqa: F401
import typing_extensions # noqa: F401
import uuid # noqa: F401
import frozendict # noqa: F401
from petstore_api import schemas # noqa: F401
from petstore_api.model.pet import Pet
from . import path
# Query params
class StatusSchema(
schemas.ListSchema
):
class MetaOapg:
class items(
schemas.EnumBase,
schemas.StrSchema
):
class MetaOapg:
enum_value_to_name = {
"available": "AVAILABLE",
"pending": "PENDING",
"sold": "SOLD",
}
@schemas.classproperty
def AVAILABLE(cls):
return cls("available")
@schemas.classproperty
def PENDING(cls):
return cls("pending")
@schemas.classproperty
def SOLD(cls):
return cls("sold")
def __new__(
cls,
arg: typing.Union[typing.Tuple[typing.Union[MetaOapg.items, str, ]], typing.List[typing.Union[MetaOapg.items, str, ]]],
_configuration: typing.Optional[schemas.Configuration] = None,
) -> 'StatusSchema':
return super().__new__(
cls,
arg,
_configuration=_configuration,
)
def __getitem__(self, i: int) -> MetaOapg.items:
return super().__getitem__(i)
RequestRequiredQueryParams = typing_extensions.TypedDict(
'RequestRequiredQueryParams',
{
'status': typing.Union[StatusSchema, list, tuple, ],
}
)
RequestOptionalQueryParams = typing_extensions.TypedDict(
'RequestOptionalQueryParams',
{
},
total=False
)
class RequestQueryParams(RequestRequiredQueryParams, RequestOptionalQueryParams):
pass
request_query_status = api_client.QueryParameter(
name="status",
style=api_client.ParameterStyle.FORM,
schema=StatusSchema,
required=True,
)
_auth = [
'http_signature_test',
'petstore_auth',
]
class SchemaFor200ResponseBodyApplicationXml(
schemas.ListSchema
):
class MetaOapg:
@staticmethod
def items() -> typing.Type['Pet']:
return Pet
def __new__(
cls,
arg: typing.Union[typing.Tuple['Pet'], typing.List['Pet']],
_configuration: typing.Optional[schemas.Configuration] = None,
) -> 'SchemaFor200ResponseBodyApplicationXml':
return super().__new__(
cls,
arg,
_configuration=_configuration,
)
def __getitem__(self, i: int) -> 'Pet':
return super().__getitem__(i)
class SchemaFor200ResponseBodyApplicationJson(
schemas.ListSchema
):
class MetaOapg:
@staticmethod
def items() -> typing.Type['Pet']:
return Pet
def __new__(
cls,
arg: typing.Union[typing.Tuple['Pet'], typing.List['Pet']],
_configuration: typing.Optional[schemas.Configuration] = None,
) -> 'SchemaFor200ResponseBodyApplicationJson':
return super().__new__(
cls,
arg,
_configuration=_configuration,
)
def __getitem__(self, i: int) -> 'Pet':
return super().__getitem__(i)
@dataclass
class ApiResponseFor200(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor200ResponseBodyApplicationXml,
SchemaFor200ResponseBodyApplicationJson,
]
headers: schemas.Unset = schemas.unset
_response_for_200 = api_client.OpenApiResponse(
response_cls=ApiResponseFor200,
content={
'application/xml': api_client.MediaType(
schema=SchemaFor200ResponseBodyApplicationXml),
'application/json': api_client.MediaType(
schema=SchemaFor200ResponseBodyApplicationJson),
},
)
@dataclass
class ApiResponseFor400(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: schemas.Unset = schemas.unset
headers: schemas.Unset = schemas.unset
_response_for_400 = api_client.OpenApiResponse(
response_cls=ApiResponseFor400,
)
_status_code_to_response = {
'200': _response_for_200,
'400': _response_for_400,
}
_all_accept_content_types = (
'application/xml',
'application/json',
)
class BaseApi(api_client.Api):
@typing.overload
def _find_pets_by_status_oapg(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def _find_pets_by_status_oapg(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
) -> api_client.ApiResponseWithoutDeserialization: ...
@typing.overload
def _find_pets_by_status_oapg(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = ...,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization,
]: ...
def _find_pets_by_status_oapg(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
):
"""
Finds Pets by status
:param skip_deserialization: If true then api_response.response will be set but
api_response.body and api_response.headers will not be deserialized into schema
class instances
"""
self._verify_typed_dict_inputs_oapg(RequestQueryParams, query_params)
used_path = path.value
prefix_separator_iterator = None
for parameter in (
request_query_status,
):
parameter_data = query_params.get(parameter.name, schemas.unset)
if parameter_data is schemas.unset:
continue
if prefix_separator_iterator is None:
prefix_separator_iterator = parameter.get_prefix_separator_iterator()
serialized_data = parameter.serialize(parameter_data, prefix_separator_iterator)
for serialized_value in serialized_data.values():
used_path += serialized_value
_headers = HTTPHeaderDict()
# TODO add cookie handling
if accept_content_types:
for accept_content_type in accept_content_types:
_headers.add('Accept', accept_content_type)
response = self.api_client.call_api(
resource_path=used_path,
method='get'.upper(),
headers=_headers,
auth_settings=_auth,
stream=stream,
timeout=timeout,
)
if skip_deserialization:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
else:
response_for_status = _status_code_to_response.get(str(response.status))
if response_for_status:
api_response = response_for_status.deserialize(response, self.api_client.configuration)
else:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
if not 200 <= response.status <= 299:
raise exceptions.ApiException(api_response=api_response)
return api_response
class FindPetsByStatus(BaseApi):
# this class is used by api classes that refer to endpoints with operationId fn names
@typing.overload
def find_pets_by_status(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def find_pets_by_status(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
) -> api_client.ApiResponseWithoutDeserialization: ...
@typing.overload
def find_pets_by_status(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = ...,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization,
]: ...
def find_pets_by_status(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
):
return self._find_pets_by_status_oapg(
query_params=query_params,
accept_content_types=accept_content_types,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
class ApiForget(BaseApi):
# this class is used by api classes that refer to endpoints by path and http method names
@typing.overload
def get(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def get(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
) -> api_client.ApiResponseWithoutDeserialization: ...
@typing.overload
def get(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = ...,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization,
]: ...
def get(
self,
query_params: RequestQueryParams = frozendict.frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
):
return self._find_pets_by_status_oapg(
query_params=query_params,
accept_content_types=accept_content_types,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
| [
"[email protected]"
] | |
bfe394598000549c8aa731dc5185e43ee6e450f1 | 15581a76b36eab6062e71d4e5641cdfaf768b697 | /Leetcode Contests/Biweekly Contest 24/Minimum Value to Get Positive Step by Step Sum.py | ed393ceda76cec842051a7cd8dd259618306c947 | [] | no_license | MarianDanaila/Competitive-Programming | dd61298cc02ca3556ebc3394e8d635b57f58b4d2 | 3c5a662e931a5aa1934fba74b249bce65a5d75e2 | refs/heads/master | 2023-05-25T20:03:18.468713 | 2023-05-16T21:45:08 | 2023-05-16T21:45:08 | 254,296,597 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 318 | py | from typing import List
class Solution:
def minStartValue(self, nums: List[int]) -> int:
sum = 0
min = nums[0]
for i in nums:
sum += i
if sum < min:
min = sum
if min >= 0:
return 1
else:
return abs(min) + 1
| [
"[email protected]"
] | |
06ce341e0e7626e2104a0667155275b069268653 | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Kivy/pycon2013/html5slides/scripts/md/render.py | b5ef0975e20eb201985c57c5b48cd150050171da | [] | no_license | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:6e3940fcf589334234bc7943dfc2c0d8e860fc139a432eae485128714022232c
size 1807
| [
"[email protected]"
] | |
e5b0887d810d27576528bafda388fdfd915d3c4f | c6320d68968de93ce9d686f5a59bb34909d089bb | /03_Polynomial_Regression/polynomial_regression_rad.py | fafb65739a4f26fa1c7981097fe77412704b96b8 | [] | no_license | rbartosinski/MachineLearningRes | 0835e6b9f94c309bf2ce8ff7ceb73912a7eeea63 | 5a1af15e77d589149aa1cb22cb96f56956fd9a0f | refs/heads/master | 2020-04-07T00:58:03.692579 | 2019-01-11T13:49:12 | 2019-01-11T13:49:12 | 157,925,825 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,313 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 20 15:04:28 2018
@author: radek
"""
#wczytanie bibliotek
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
#wczytanie danych
dataset = pd.read_csv('Position_Salaries.csv')
X = dataset.iloc[:, 1:2].values
y = dataset.iloc[:, 2].values
#dopasowanie LR do setu
from sklearn.linear_model import LinearRegression
lin_reg = LinearRegression()
lin_reg.fit(X, y)
#dopasowanie Polynomial Regr. do setu
from sklearn.preprocessing import PolynomialFeatures
poly_reg = PolynomialFeatures(degree=4)
X_poly = poly_reg.fit_transform(X)
lin_reg2 = LinearRegression()
lin_reg2.fit(X_poly, y)
#wizualizacja LR
plt.scatter(X, y, color='red')
plt.plot(X, lin_reg.predict(X), color='blue')
plt.title('Position level vs. Salary (Linear Regression')
plt.xlabel('Position level')
plt.ylabel('Salary')
plt.show()
#wizulizacja PR
X_grid = np.arange(min(X), max(X), 0.1)
X_grid = X_grid.reshape((len(X_grid), 1))
plt.scatter(X, y, color='red')
plt.plot(X_grid, lin_reg2.predict(poly_reg.fit_transform(X_grid)), color='blue')
plt.title('Position level vs. Salary (Polynomial Regression)')
plt.xlabel('Position level')
plt.ylabel('Salary')
plt.show()
#wynik z LR
lin_reg.predict(6.5)
#wynik z PR
lin_reg_2.predict(poly_reg.fit_transform(6.5)) | [
"[email protected]"
] | |
e4ae96c0131406c2419a148c0186b3269acfa42f | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03964/s755365360.py | 9f2a66cabd6d3f24f2aafce6d59b731dbfbc227f | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 619 | py | import bisect
import collections
import copy
import functools
import heapq
import math
import sys
from collections import deque
from collections import defaultdict
input = sys.stdin.readline
MOD = 10**9+7
N = int(input())
T = [0]*N
A = [0]*N
for i in range(N):
T[i],A[i] = map(int,(input().split()))
t,a = T[0],A[0]
for i in range(1,N):
s = T[i] + A[i]
now = 1
l = 1
r = 10**18//s + 1
mae = -1
while now != mae:
mae = now
if T[i]*now < t or A[i]*now < a:
l = now
else:
r = now
now = (l+r+1)//2
t,a = T[i]*now,A[i]*now
print(t+a) | [
"[email protected]"
] | |
5a1e071972d89f69b241aff120e8fcd705ae1ca1 | cc0d06e2aad3d30152c4a3f3356befdc58748313 | /2.til8.oktober/plot_wavepacket.py | a4583b0987077f652a46aaf25eff8dbe8cd4c6bb | [] | no_license | lasse-steinnes/IN1900 | db0bb4da33fa024d4fe9207337c0f1d956197c50 | c8d97c2903078471f8e419f88cc8488d9b8fc7da | refs/heads/master | 2020-12-14T15:34:36.429764 | 2020-01-18T19:59:46 | 2020-01-18T19:59:46 | 234,789,653 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 473 | py | ###
## Definerer funksjonen
from numpy import exp, sin, pi, linspace
bølge = lambda x,t=0: exp(-(x-3*t)**2)*sin(3*pi*(x-t))
## Lager intervallet for x
x_matrise = linspace(-4,4,1500)
# Slik at
bølge_t0 = bølge(x_matrise)
### Plotter funksjonen
import matplotlib.pyplot as plt
plt.plot(x_matrise, bølge_t0, label = 'bølgepakke for t=0')
plt.legend()
plt.xlabel("x")
plt.ylabel("Amplitude")
plt.show()
## Kjøreeksempel
"""
>> python plot_wavepacket.py
(plot)
"""
| [
"[email protected]"
] | |
3701dcb0526d0abec2a1850baf3176ed362ec0d1 | d0eb582894eff3c44e3de4bd50f571f9d9ab3a02 | /venv/lib/python3.7/site-packages/flake8/plugins/pyflakes.py | 018d1c98a1f847fa743d847fa6d66a99ac4dbc0c | [
"MIT"
] | permissive | tdle94/app-store-scrapper | 159187ef3825213d40425215dd9c9806b415769e | ed75880bac0c9ef685b2c1bf57a6997901abface | refs/heads/master | 2022-12-20T21:10:59.621305 | 2020-10-28T00:32:21 | 2020-10-28T00:32:21 | 247,291,364 | 1 | 2 | MIT | 2022-12-08T03:53:08 | 2020-03-14T14:25:44 | Python | UTF-8 | Python | false | false | 6,021 | py | """Plugin built-in to Flake8 to treat pyflakes as a plugin."""
# -*- coding: utf-8 -*-
from __future__ import absolute_import
try:
# The 'demandimport' breaks pyflakes and flake8.plugins.pyflakes
from mercurial import demandimport
except ImportError:
pass
else:
demandimport.disable()
import os
from typing import List
import pyflakes
import pyflakes.checker
from flake8 import utils
FLAKE8_PYFLAKES_CODES = {
"UnusedImport": "F401",
"ImportShadowedByLoopVar": "F402",
"ImportStarUsed": "F403",
"LateFutureImport": "F404",
"ImportStarUsage": "F405",
"ImportStarNotPermitted": "F406",
"FutureFeatureNotDefined": "F407",
"MultiValueRepeatedKeyLiteral": "F601",
"MultiValueRepeatedKeyVariable": "F602",
"TooManyExpressionsInStarredAssignment": "F621",
"TwoStarredExpressions": "F622",
"AssertTuple": "F631",
"IsLiteral": "F632",
"InvalidPrintSyntax": "F633",
"BreakOutsideLoop": "F701",
"ContinueOutsideLoop": "F702",
"ContinueInFinally": "F703",
"YieldOutsideFunction": "F704",
"ReturnWithArgsInsideGenerator": "F705",
"ReturnOutsideFunction": "F706",
"DefaultExceptNotLast": "F707",
"DoctestSyntaxError": "F721",
"ForwardAnnotationSyntaxError": "F722",
"CommentAnnotationSyntaxError": "F723",
"RedefinedWhileUnused": "F811",
"RedefinedInListComp": "F812",
"UndefinedName": "F821",
"UndefinedExport": "F822",
"UndefinedLocal": "F823",
"DuplicateArgument": "F831",
"UnusedVariable": "F841",
"RaiseNotImplemented": "F901",
}
class FlakesChecker(pyflakes.checker.Checker):
"""Subclass the Pyflakes checker to conform with the flake8 API."""
name = "pyflakes"
version = pyflakes.__version__
with_doctest = False
include_in_doctest = [] # type: List[str]
exclude_from_doctest = [] # type: List[str]
def __init__(self, tree, file_tokens, filename):
"""Initialize the PyFlakes plugin with an AST tree and filename."""
filename = utils.normalize_path(filename)
with_doctest = self.with_doctest
included_by = [
include
for include in self.include_in_doctest
if include != "" and filename.startswith(include)
]
if included_by:
with_doctest = True
for exclude in self.exclude_from_doctest:
if exclude != "" and filename.startswith(exclude):
with_doctest = False
overlaped_by = [
include
for include in included_by
if include.startswith(exclude)
]
if overlaped_by:
with_doctest = True
super(FlakesChecker, self).__init__(
tree,
filename=filename,
withDoctest=with_doctest,
file_tokens=file_tokens,
)
@classmethod
def add_options(cls, parser):
"""Register options for PyFlakes on the Flake8 OptionManager."""
parser.add_option(
"--builtins",
parse_from_config=True,
comma_separated_list=True,
help="define more built-ins, comma separated",
)
parser.add_option(
"--doctests",
default=False,
action="store_true",
parse_from_config=True,
help="check syntax of the doctests",
)
parser.add_option(
"--include-in-doctest",
default="",
dest="include_in_doctest",
parse_from_config=True,
comma_separated_list=True,
normalize_paths=True,
help="Run doctests only on these files",
type="string",
)
parser.add_option(
"--exclude-from-doctest",
default="",
dest="exclude_from_doctest",
parse_from_config=True,
comma_separated_list=True,
normalize_paths=True,
help="Skip these files when running doctests",
type="string",
)
@classmethod
def parse_options(cls, options):
"""Parse option values from Flake8's OptionManager."""
if options.builtins:
cls.builtIns = cls.builtIns.union(options.builtins)
cls.with_doctest = options.doctests
included_files = []
for included_file in options.include_in_doctest:
if included_file == "":
continue
if not included_file.startswith((os.sep, "./", "~/")):
included_files.append("./" + included_file)
else:
included_files.append(included_file)
cls.include_in_doctest = utils.normalize_paths(included_files)
excluded_files = []
for excluded_file in options.exclude_from_doctest:
if excluded_file == "":
continue
if not excluded_file.startswith((os.sep, "./", "~/")):
excluded_files.append("./" + excluded_file)
else:
excluded_files.append(excluded_file)
cls.exclude_from_doctest = utils.normalize_paths(excluded_files)
inc_exc = set(cls.include_in_doctest).intersection(
cls.exclude_from_doctest
)
if inc_exc:
raise ValueError(
'"%s" was specified in both the '
"include-in-doctest and exclude-from-doctest "
"options. You are not allowed to specify it in "
"both for doctesting." % inc_exc
)
def run(self):
"""Run the plugin."""
for message in self.messages:
col = getattr(message, "col", 0)
yield (
message.lineno,
col,
"{} {}".format(
FLAKE8_PYFLAKES_CODES.get(type(message).__name__, "F999"),
message.message % message.message_args,
),
message.__class__,
)
| [
"[email protected]"
] | |
1d0479b10748363c8598f680dd8ac691974f0c9e | 11060ca244940baef96a51d794d73aab44fc31c6 | /src/brainstorming/tornado/modbus/pymodbus/__init__.py | 0bb3d9b53e2360b44fb5246e72a6c065e1fdb427 | [] | no_license | D3f0/txscada | eb54072b7311068a181c05a03076a0b835bb0fe1 | f8e1fd067a1d001006163e8c3316029f37af139c | refs/heads/master | 2020-12-24T06:27:17.042056 | 2016-07-27T17:17:56 | 2016-07-27T17:17:56 | 3,565,335 | 9 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,280 | py | """
Pymodbus: Modbus Protocol Implementation
-----------------------------------------
This package can supply modbus clients and servers:
client:
- Can perform single get/set on discretes and registers
- Can perform multiple get/set on discretes and registers
- Working on diagnostic/file/pipe/setting/info requets
- Can fully scrape a host to be cloned
server:
- Can function as a fully implemented TCP modbus server
- Working on creating server control context
- Working on serial communication
- Working on funtioning as a RTU/ASCII
- Can mimic a server based on the supplied input data
TwistedModbus is built on top of the Pymodbus developed from code by:
Copyright (c) 2001-2005 S.W.A.C. GmbH, Germany.
Copyright (c) 2001-2005 S.W.A.C. Bohemia s.r.o., Czech Republic.
Hynek Petrak <[email protected]>
Released under the the GPLv2
"""
from pymodbus.version import _version
__version__ = _version.short().split('+')[0]
#---------------------------------------------------------------------------#
# Block unhandled logging
#---------------------------------------------------------------------------#
import logging
class NullHandler(logging.Handler):
def emit(self, record):
pass
h = NullHandler()
logging.getLogger("pymodbus").addHandler(h)
| [
"devnull@localhost"
] | devnull@localhost |
ac8fcee7be310f87e1cf6a7479d7dec05c585cc6 | 6413fe58b04ac2a7efe1e56050ad42d0e688adc6 | /tempenv/lib/python3.7/site-packages/dash_bootstrap_components/_components/CardText.py | c0146873ce75910bc6733eabc85670d925f82320 | [
"MIT"
] | permissive | tytechortz/Denver_temperature | 7f91e0ac649f9584147d59193568f6ec7efe3a77 | 9d9ea31cd7ec003e8431dcbb10a3320be272996d | refs/heads/master | 2022-12-09T06:22:14.963463 | 2019-10-09T16:30:52 | 2019-10-09T16:30:52 | 170,581,559 | 1 | 0 | MIT | 2022-06-21T23:04:21 | 2019-02-13T21:22:53 | Python | UTF-8 | Python | false | false | 3,332 | py | # AUTO GENERATED FILE - DO NOT EDIT
from dash.development.base_component import Component, _explicitize_args
class CardText(Component):
"""A CardText component.
Keyword arguments:
- children (a list of or a singular dash component, string or number; optional): The children of this component
- id (string; optional): The ID of this component, used to identify dash components
in callbacks. The ID needs to be unique across all of the
components in an app.
- style (dict; optional): Defines CSS styles which will override styles previously set.
- className (string; optional): Often used with CSS to style elements with common properties.
- key (string; optional): A unique identifier for the component, used to improve
performance by React.js while rendering components
See https://reactjs.org/docs/lists-and-keys.html for more info
- tag (string; optional): HTML tag to use for the card text, default: p
- color (string; optional): Text color, options: primary, secondary, success, warning, danger, info,
muted, light, dark, body, white, black-50, white-50."""
@_explicitize_args
def __init__(self, children=None, id=Component.UNDEFINED, style=Component.UNDEFINED, className=Component.UNDEFINED, key=Component.UNDEFINED, tag=Component.UNDEFINED, color=Component.UNDEFINED, **kwargs):
self._prop_names = ['children', 'id', 'style', 'className', 'key', 'tag', 'color']
self._type = 'CardText'
self._namespace = 'dash_bootstrap_components/_components'
self._valid_wildcard_attributes = []
self.available_properties = ['children', 'id', 'style', 'className', 'key', 'tag', 'color']
self.available_wildcard_properties = []
_explicit_args = kwargs.pop('_explicit_args')
_locals = locals()
_locals.update(kwargs) # For wildcard attrs
args = {k: _locals[k] for k in _explicit_args if k != 'children'}
for k in []:
if k not in args:
raise TypeError(
'Required argument `' + k + '` was not specified.')
super(CardText, self).__init__(children=children, **args)
def __repr__(self):
if(any(getattr(self, c, None) is not None
for c in self._prop_names
if c is not self._prop_names[0])
or any(getattr(self, c, None) is not None
for c in self.__dict__.keys()
if any(c.startswith(wc_attr)
for wc_attr in self._valid_wildcard_attributes))):
props_string = ', '.join([c+'='+repr(getattr(self, c, None))
for c in self._prop_names
if getattr(self, c, None) is not None])
wilds_string = ', '.join([c+'='+repr(getattr(self, c, None))
for c in self.__dict__.keys()
if any([c.startswith(wc_attr)
for wc_attr in
self._valid_wildcard_attributes])])
return ('CardText(' + props_string +
(', ' + wilds_string if wilds_string != '' else '') + ')')
else:
return (
'CardText(' +
repr(getattr(self, self._prop_names[0], None)) + ')')
| [
"[email protected]"
] | |
f2acacf75129142364d47c4372031342a19566a9 | 1554150a9720ebf35cd11c746f69169b595dca10 | /tk_practise/shape_display_view.py | 908a4219294e3677bf29d3a5afa33665d56b7ca5 | [] | no_license | andrewili/shape-grammar-engine | 37a809f8cf78b133f8f1c3f9cf13a7fbbb564713 | 2859d8021442542561bdd1387deebc85e26f2d03 | refs/heads/master | 2021-01-18T22:46:51.221257 | 2016-05-31T21:15:28 | 2016-05-31T21:15:28 | 14,129,359 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,640 | py | # shape_display_view.py
import Tkinter as tk
import tkFileDialog
import tkFont
import ttk
class Observable(object):
def __init__(self):
self.observers = []
def broadcast(self, widget):
for observer in self.observers:
observer.respond(widget)
def add_observer(self, observer):
self.observers.append(observer)
class View(tk.Toplevel, Observable):
def __init__(self, master):
tk.Toplevel.__init__(self, master)
self.protocol('WM_DELETE_WINDOW', self.master.destroy)
Observable.__init__(self)
self.title('Shape display 2014-04-03')
self.text_var_a = tk.StringVar()
self.text_var_b = tk.StringVar()
self.text_var_c = tk.StringVar()
self.label_width = 28
self.label_height = 15
self.label_font = ('Andale Mono', '11')
self.background_color = '#EEEEEE'
self._make_main_frame()
self._make_label_frame_a( 0, 0)
self._make_spacer( 1, 0)
self._make_label_frame_b( 2, 0)
self._make_spacer( 3, 0)
self._make_label_frame_buttons( 4, 0)
self._make_spacer( 5, 0)
self._make_label_frame_c( 6, 0)
def _make_main_frame(self):
self.mainframe = ttk.Frame(
self,
padding='10 10 10 10')
self.mainframe.grid(
column=0,
row=0,
sticky='NSEW')
self.mainframe.rowconfigure(
0,
weight=1)
self.mainframe.columnconfigure(
0,
weight=1)
def _make_label_frame_a(self, column_in, row_in):
self.label_frame_a = ttk.LabelFrame(
self.mainframe)
self.label_frame_a.grid(
column=column_in,
row=row_in,
sticky='EW')
self.canvas_a = self.make_canvas(
self.label_frame_a,
0, 0)
self.get_lshape_a_button = ttk.Button(
self.label_frame_a,
width=15,
text='Get A',
command=(self.get_lshape_a))
self.get_lshape_a_button.grid(
column=0,
row=2)
self.label_a = tk.Label(
self.label_frame_a,
width=self.label_width,
height=self.label_height,
textvariable=self.text_var_a,
anchor=tk.NW,
justify=tk.LEFT,
font=self.label_font)
self.label_a.grid(
column=0,
row=3)
def _make_label_frame_b(self, column_in, row_in):
self.label_frame_b = ttk.LabelFrame(
self.mainframe)
self.label_frame_b.grid(
column=column_in,
row=row_in,
sticky='EW')
self.canvas_b = self.make_canvas(
self.label_frame_b,
0, 0)
self.get_lshape_b_button = ttk.Button(
self.label_frame_b,
width=15,
text='Get B',
command=self.get_lshape_b)
self.get_lshape_b_button.grid(
column=0,
row=2)
self.label_b = tk.Label(
self.label_frame_b,
width=self.label_width,
height=self.label_height,
textvariable=self.text_var_b,
anchor=tk.NW,
justify=tk.LEFT,
font=self.label_font)
self.label_b.grid(
column=0,
row=3)
def _make_label_frame_buttons(self, column_in, row_in):
self.label_frame_buttons = ttk.LabelFrame(
self.mainframe)
self.label_frame_buttons.grid(
column=column_in,
row=row_in,
sticky='NEW')
self.result_button_frame_spacer_upper = tk.Label(
self.label_frame_buttons,
height=5,
background=self.background_color)
self.result_button_frame_spacer_upper.grid(
column=0,
row=0)
self.get_lshape_a_plus_b_button = ttk.Button(
self.label_frame_buttons,
width=15,
text='A + B',
command=self.get_lshape_a_plus_b)
self.get_lshape_a_plus_b_button.grid(
column=0,
row=1)
self.get_lshape_a_minus_b_button = ttk.Button(
self.label_frame_buttons,
width=15,
text='A - B',
command=self.get_lshape_a_minus_b)
self.get_lshape_a_minus_b_button.grid(
column=0,
row=2)
self.get_lshape_a_sub_lshape_b_button = ttk.Button(
self.label_frame_buttons,
width=15,
text='A <= B',
command=self.get_lshape_a_sub_lshape_b)
self.get_lshape_a_sub_lshape_b_button.grid(
column=0,
row=3)
self.result_button_frame_spacer_lower = tk.Label(
self.label_frame_buttons,
height=17,
background=self.background_color)
self.result_button_frame_spacer_lower.grid(
column=0,
row=4)
def _make_label_frame_c(self, column_in, row_in):
self.label_frame_c = ttk.LabelFrame(
self.mainframe)
self.label_frame_c.grid(
column=column_in,
row=row_in,
sticky='NEW')
self.canvas_c = self.make_canvas(
self.label_frame_c,
0, 0)
self.spacer_c = tk.Label(
self.label_frame_c,
width=2,
background=self.background_color,
text=' ')
self.spacer_c.grid(
column=0,
row=1)
self.label_c = tk.Label(
self.label_frame_c,
width=self.label_width,
height=self.label_height,
textvariable=self.text_var_c,
anchor=tk.NW,
justify=tk.LEFT,
font=self.label_font)
self.label_c.grid(
column=0,
row=2)
def make_canvas(self, parent, column_in, row_in):
canvas = tk.Canvas(
parent,
width=200,
height=200,
background='#DDDDDD') # use constant
canvas.xview_moveto(0) # move origin to visible area
canvas.yview_moveto(0)
canvas.grid(
column=column_in,
row=row_in,
sticky='EW')
return canvas
def _make_spacer(self, column_in, row_in):
self.spacer = tk.Label(
self.mainframe,
width=2,
background=self.background_color,
text=' ')
self.spacer.grid(
column=column_in,
row=row_in)
## def make_spacer_above_buttons(self, column_in, row_in):
## spacer = tk.Label(
## self.mainframe,
## width=2,
## height=5,
## text=' ')
## spacer.grid(
## column=column_in,
## row=row_in)
def get_lshape_a(self):
self.file_a = tkFileDialog.askopenfile()
self.broadcast(self.get_lshape_a_button)
def get_lshape_b(self):
self.file_b = tkFileDialog.askopenfile()
self.broadcast(self.get_lshape_b_button)
def get_lshape_a_plus_b(self):
self.broadcast(self.get_lshape_a_plus_b_button)
def get_lshape_a_minus_b(self):
self.broadcast(self.get_lshape_a_minus_b_button)
def get_lshape_a_sub_lshape_b(self):
self.broadcast(self.get_lshape_a_sub_lshape_b_button)
if __name__ == '__main__':
import doctest
doctest.testfile('tests/shape_display_view_test.txt')
| [
"[email protected]"
] | |
3609cbd86fe366108bed83305f57d5ac02c3ce24 | a2dc75a80398dee58c49fa00759ac99cfefeea36 | /bluebottle/bb_projects/migrations/0018_auto_20210302_1417.py | 69d49b4e0234875309c1a920a6cf0af3e76ba9e8 | [
"BSD-2-Clause"
] | permissive | onepercentclub/bluebottle | e38b0df2218772adf9febb8c6e25a2937889acc0 | 2b5f3562584137c8c9f5392265db1ab8ee8acf75 | refs/heads/master | 2023-08-29T14:01:50.565314 | 2023-08-24T11:18:58 | 2023-08-24T11:18:58 | 13,149,527 | 15 | 9 | BSD-3-Clause | 2023-09-13T10:46:20 | 2013-09-27T12:09:13 | Python | UTF-8 | Python | false | false | 956 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2021-03-02 13:17
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('bb_projects', '0017_auto_20210302_1417'),
('projects', '0095_auto_20210302_1417'),
('suggestions', '0005_auto_20210302_1417'),
('initiatives', '0030_auto_20210302_1405'),
('members', '0041_auto_20210302_1416'),
]
state_operations = [
migrations.DeleteModel(
name='ProjectTheme',
),
migrations.DeleteModel(
name='ProjectThemeTranslation',
),
]
operations = [
migrations.SeparateDatabaseAndState(
state_operations=state_operations
),
migrations.DeleteModel(
name='ProjectPhase',
),
migrations.DeleteModel(
name='ProjectPhaseTranslation',
),
]
| [
"[email protected]"
] | |
0fbc3c0d1ea493c7b8c03b62c9104b1f4803931c | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/2/ef3.py | 7ea48f160f18a4c7e1914e1c32d84b3d8df9be75 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'eF3':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
9b73114f7ea4cb451dfbd939500b3c97b30e2d8a | 673440c09033912157d1c3767d5308f95755e76a | /ManachersAlgo.py | 34e2ae34f01f3af98fb2e6b72aa5e397af5e4c02 | [] | no_license | jagadeshwarrao/programming | 414193b1c538e37684378233d0532bd786d63b32 | 1b343251a8ad6a81e307d31b2025b11e0b28a707 | refs/heads/master | 2023-02-02T19:26:21.187561 | 2020-12-21T18:21:00 | 2020-12-21T18:21:00 | 274,644,612 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,482 | py |
def findLongestPalindromicString(text):
N = len(text)
if N == 0:
return
N = 2*N+1
L = [0] * N
L[0] = 0
L[1] = 1
C = 1
R = 2
i = 0
iMirror = 0
maxLPSLength = 0
maxLPSCenterPosition = 0
start = -1
end = -1
diff = -1
for i in xrange(2,N):
iMirror = 2*C-i
L[i] = 0
diff = R - i
if diff > 0:
L[i] = min(L[iMirror], diff)
try:
while ((i+L[i]) < N and (i-L[i]) > 0) and \
(((i+L[i]+1) % 2 == 0) or \
(text[(i+L[i]+1)/2] == text[(i-L[i]-1)/2])):
L[i]+=1
except Exception as e:
pass
if L[i] > maxLPSLength:
maxLPSLength = L[i]
maxLPSCenterPosition = i
if i + L[i] > R:
C = i
R = i + L[i]
start = (maxLPSCenterPosition - maxLPSLength) / 2
end = start + maxLPSLength - 1
print "LPS of string is " + text + " : ",
print text[start:end+1],
print "\n",
text1 = "babcbabcbaccba"
findLongestPalindromicString(text1)
text2 = "abaaba"
findLongestPalindromicString(text2)
text3 = "abababa"
findLongestPalindromicString(text3)
text4 = "abcbabcbabcba"
findLongestPalindromicString(text4)
text5 = "forgeeksskeegfor"
findLongestPalindromicString(text5)
text6 = "caba"
findLongestPalindromicString(text6)
text7 = "abacdfgdcaba"
findLongestPalindromicString(text7)
text8 = "abacdfgdcabba"
findLongestPalindromicString(text8)
text9 = "abacdedcaba"
findLongestPalindromicString(text9)
| [
"[email protected]"
] | |
a40784738ed092668081456e1b724bb29a5780e8 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2790/60589/243105.py | 230152d093784ddcfff077a0a0b37bbdb892f405 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 350 | py | nm=input().split(' ')
n=int(nm[0])
m=int(nm[1])
a=list(map(int,input().split(' ')))
b=list(map(int,input().split(' ')))
ans=[]
a.sort()
for e in b:
has=False
for i in range(n):
if a[i]>e:
has=True
ans.append(i)
break
if not has:
ans.append(n)
ans=list(map(str,ans))
print(' '.join(ans)) | [
"[email protected]"
] | |
6677355c1c7383d94b434226fae40b8cf76ba2d0 | bdf86d69efc1c5b21950c316ddd078ad8a2f2ec0 | /venv/Lib/site-packages/twisted/plugins/twisted_core.py | a66ad7f0104dc02e960fa9fecfcfe59830bb8d40 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | DuaNoDo/PythonProject | 543e153553c58e7174031b910fd6451399afcc81 | 2c5c8aa89dda4dec2ff4ca7171189788bf8b5f2c | refs/heads/master | 2020-05-07T22:22:29.878944 | 2019-06-14T07:44:35 | 2019-06-14T07:44:35 | 180,941,166 | 1 | 1 | null | 2019-06-04T06:27:29 | 2019-04-12T06:05:42 | Python | UTF-8 | Python | false | false | 588 | py | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from __future__ import absolute_import, division
from twisted.internet.endpoints import (
_SystemdParser, _TCP6ServerParser, _StandardIOParser,
_TLSClientEndpointParser)
from twisted.protocols.haproxy._parser import (
HAProxyServerParser as _HAProxyServerParser
)
systemdEndpointParser = _SystemdParser()
tcp6ServerEndpointParser = _TCP6ServerParser()
stdioEndpointParser = _StandardIOParser()
tlsClientEndpointParser = _TLSClientEndpointParser()
_haProxyServerEndpointParser = _HAProxyServerParser()
| [
"[email protected]"
] | |
9b4d79f7d378c8eb47d4f656f32305c8efc4ff83 | a2d36e471988e0fae32e9a9d559204ebb065ab7f | /huaweicloud-sdk-ocr/huaweicloudsdkocr/v1/model/passport_result.py | 7d18f30f08e2ae7da04b81723bcf468ba27a7646 | [
"Apache-2.0"
] | permissive | zhouxy666/huaweicloud-sdk-python-v3 | 4d878a90b8e003875fc803a61414788e5e4c2c34 | cc6f10a53205be4cb111d3ecfef8135ea804fa15 | refs/heads/master | 2023-09-02T07:41:12.605394 | 2021-11-12T03:20:11 | 2021-11-12T03:20:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,941 | py | # coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class PassportResult:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'passport_type': 'str',
'country_code': 'str',
'passport_number': 'str',
'nationality': 'str',
'surname': 'str',
'given_name': 'str',
'sex': 'str',
'date_of_birth': 'str',
'date_of_expiry': 'str',
'date_of_issue': 'str',
'place_of_birth': 'str',
'place_of_issue': 'str',
'issuing_authority': 'str',
'confidence': 'object',
'extra_info': 'object'
}
attribute_map = {
'passport_type': 'passport_type',
'country_code': 'country_code',
'passport_number': 'passport_number',
'nationality': 'nationality',
'surname': 'surname',
'given_name': 'given_name',
'sex': 'sex',
'date_of_birth': 'date_of_birth',
'date_of_expiry': 'date_of_expiry',
'date_of_issue': 'date_of_issue',
'place_of_birth': 'place_of_birth',
'place_of_issue': 'place_of_issue',
'issuing_authority': 'issuing_authority',
'confidence': 'confidence',
'extra_info': 'extra_info'
}
def __init__(self, passport_type=None, country_code=None, passport_number=None, nationality=None, surname=None, given_name=None, sex=None, date_of_birth=None, date_of_expiry=None, date_of_issue=None, place_of_birth=None, place_of_issue=None, issuing_authority=None, confidence=None, extra_info=None):
"""PassportResult - a model defined in huaweicloud sdk"""
self._passport_type = None
self._country_code = None
self._passport_number = None
self._nationality = None
self._surname = None
self._given_name = None
self._sex = None
self._date_of_birth = None
self._date_of_expiry = None
self._date_of_issue = None
self._place_of_birth = None
self._place_of_issue = None
self._issuing_authority = None
self._confidence = None
self._extra_info = None
self.discriminator = None
if passport_type is not None:
self.passport_type = passport_type
if country_code is not None:
self.country_code = country_code
if passport_number is not None:
self.passport_number = passport_number
if nationality is not None:
self.nationality = nationality
if surname is not None:
self.surname = surname
if given_name is not None:
self.given_name = given_name
if sex is not None:
self.sex = sex
if date_of_birth is not None:
self.date_of_birth = date_of_birth
if date_of_expiry is not None:
self.date_of_expiry = date_of_expiry
if date_of_issue is not None:
self.date_of_issue = date_of_issue
if place_of_birth is not None:
self.place_of_birth = place_of_birth
if place_of_issue is not None:
self.place_of_issue = place_of_issue
if issuing_authority is not None:
self.issuing_authority = issuing_authority
if confidence is not None:
self.confidence = confidence
if extra_info is not None:
self.extra_info = extra_info
@property
def passport_type(self):
"""Gets the passport_type of this PassportResult.
护照类型(P:普通因私护照、W:外交护照、G:公务护照)(英文)。
:return: The passport_type of this PassportResult.
:rtype: str
"""
return self._passport_type
@passport_type.setter
def passport_type(self, passport_type):
"""Sets the passport_type of this PassportResult.
护照类型(P:普通因私护照、W:外交护照、G:公务护照)(英文)。
:param passport_type: The passport_type of this PassportResult.
:type: str
"""
self._passport_type = passport_type
@property
def country_code(self):
"""Gets the country_code of this PassportResult.
护照签发国的国家码(英文)。
:return: The country_code of this PassportResult.
:rtype: str
"""
return self._country_code
@country_code.setter
def country_code(self, country_code):
"""Sets the country_code of this PassportResult.
护照签发国的国家码(英文)。
:param country_code: The country_code of this PassportResult.
:type: str
"""
self._country_code = country_code
@property
def passport_number(self):
"""Gets the passport_number of this PassportResult.
护照号码(英文)。
:return: The passport_number of this PassportResult.
:rtype: str
"""
return self._passport_number
@passport_number.setter
def passport_number(self, passport_number):
"""Sets the passport_number of this PassportResult.
护照号码(英文)。
:param passport_number: The passport_number of this PassportResult.
:type: str
"""
self._passport_number = passport_number
@property
def nationality(self):
"""Gets the nationality of this PassportResult.
护照持有人国籍(英文)。
:return: The nationality of this PassportResult.
:rtype: str
"""
return self._nationality
@nationality.setter
def nationality(self, nationality):
"""Sets the nationality of this PassportResult.
护照持有人国籍(英文)。
:param nationality: The nationality of this PassportResult.
:type: str
"""
self._nationality = nationality
@property
def surname(self):
"""Gets the surname of this PassportResult.
姓(英文)。
:return: The surname of this PassportResult.
:rtype: str
"""
return self._surname
@surname.setter
def surname(self, surname):
"""Sets the surname of this PassportResult.
姓(英文)。
:param surname: The surname of this PassportResult.
:type: str
"""
self._surname = surname
@property
def given_name(self):
"""Gets the given_name of this PassportResult.
名字(英文)。
:return: The given_name of this PassportResult.
:rtype: str
"""
return self._given_name
@given_name.setter
def given_name(self, given_name):
"""Sets the given_name of this PassportResult.
名字(英文)。
:param given_name: The given_name of this PassportResult.
:type: str
"""
self._given_name = given_name
@property
def sex(self):
"""Gets the sex of this PassportResult.
性别(英文)。
:return: The sex of this PassportResult.
:rtype: str
"""
return self._sex
@sex.setter
def sex(self, sex):
"""Sets the sex of this PassportResult.
性别(英文)。
:param sex: The sex of this PassportResult.
:type: str
"""
self._sex = sex
@property
def date_of_birth(self):
"""Gets the date_of_birth of this PassportResult.
出生日期(英文)。
:return: The date_of_birth of this PassportResult.
:rtype: str
"""
return self._date_of_birth
@date_of_birth.setter
def date_of_birth(self, date_of_birth):
"""Sets the date_of_birth of this PassportResult.
出生日期(英文)。
:param date_of_birth: The date_of_birth of this PassportResult.
:type: str
"""
self._date_of_birth = date_of_birth
@property
def date_of_expiry(self):
"""Gets the date_of_expiry of this PassportResult.
护照有效期(英文)。
:return: The date_of_expiry of this PassportResult.
:rtype: str
"""
return self._date_of_expiry
@date_of_expiry.setter
def date_of_expiry(self, date_of_expiry):
"""Sets the date_of_expiry of this PassportResult.
护照有效期(英文)。
:param date_of_expiry: The date_of_expiry of this PassportResult.
:type: str
"""
self._date_of_expiry = date_of_expiry
@property
def date_of_issue(self):
"""Gets the date_of_issue of this PassportResult.
护照签发日期(英文)。
:return: The date_of_issue of this PassportResult.
:rtype: str
"""
return self._date_of_issue
@date_of_issue.setter
def date_of_issue(self, date_of_issue):
"""Sets the date_of_issue of this PassportResult.
护照签发日期(英文)。
:param date_of_issue: The date_of_issue of this PassportResult.
:type: str
"""
self._date_of_issue = date_of_issue
@property
def place_of_birth(self):
"""Gets the place_of_birth of this PassportResult.
出生地(英文)。
:return: The place_of_birth of this PassportResult.
:rtype: str
"""
return self._place_of_birth
@place_of_birth.setter
def place_of_birth(self, place_of_birth):
"""Sets the place_of_birth of this PassportResult.
出生地(英文)。
:param place_of_birth: The place_of_birth of this PassportResult.
:type: str
"""
self._place_of_birth = place_of_birth
@property
def place_of_issue(self):
"""Gets the place_of_issue of this PassportResult.
签发地(英文)。
:return: The place_of_issue of this PassportResult.
:rtype: str
"""
return self._place_of_issue
@place_of_issue.setter
def place_of_issue(self, place_of_issue):
"""Sets the place_of_issue of this PassportResult.
签发地(英文)。
:param place_of_issue: The place_of_issue of this PassportResult.
:type: str
"""
self._place_of_issue = place_of_issue
@property
def issuing_authority(self):
"""Gets the issuing_authority of this PassportResult.
签发机构(英文),其中对中国的英文简写统一输出为P.R.China。
:return: The issuing_authority of this PassportResult.
:rtype: str
"""
return self._issuing_authority
@issuing_authority.setter
def issuing_authority(self, issuing_authority):
"""Sets the issuing_authority of this PassportResult.
签发机构(英文),其中对中国的英文简写统一输出为P.R.China。
:param issuing_authority: The issuing_authority of this PassportResult.
:type: str
"""
self._issuing_authority = issuing_authority
@property
def confidence(self):
"""Gets the confidence of this PassportResult.
相关字段的置信度信息,置信度越大,表示本次识别的对应字段的可靠性越高,在统计意义上,置信度越大,准确率越高。 置信度由算法给出,不直接等价于对应字段的准确率。
:return: The confidence of this PassportResult.
:rtype: object
"""
return self._confidence
@confidence.setter
def confidence(self, confidence):
"""Sets the confidence of this PassportResult.
相关字段的置信度信息,置信度越大,表示本次识别的对应字段的可靠性越高,在统计意义上,置信度越大,准确率越高。 置信度由算法给出,不直接等价于对应字段的准确率。
:param confidence: The confidence of this PassportResult.
:type: object
"""
self._confidence = confidence
@property
def extra_info(self):
"""Gets the extra_info of this PassportResult.
默认为空。对于部分常见国家的护照OCR服务,extra_info内会包含护照上由本地官方语言描述的字段信息及其他信息。 如中国护照,里面会包含汉字表达的姓名、出生地等信息。
:return: The extra_info of this PassportResult.
:rtype: object
"""
return self._extra_info
@extra_info.setter
def extra_info(self, extra_info):
"""Sets the extra_info of this PassportResult.
默认为空。对于部分常见国家的护照OCR服务,extra_info内会包含护照上由本地官方语言描述的字段信息及其他信息。 如中国护照,里面会包含汉字表达的姓名、出生地等信息。
:param extra_info: The extra_info of this PassportResult.
:type: object
"""
self._extra_info = extra_info
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PassportResult):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
9962922584c412b05fbb00dc271d5fd91f46fe79 | 23611933f0faba84fc82a1bc0a85d97cf45aba99 | /google-cloud-sdk/.install/.backup/lib/third_party/ruamel/yaml/resolver.py | 84227072e066b8f2528baaf4a25c43995cd4061a | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] | permissive | KaranToor/MA450 | 1f112d1caccebdc04702a77d5a6cee867c15f75c | c98b58aeb0994e011df960163541e9379ae7ea06 | refs/heads/master | 2021-06-21T06:17:42.585908 | 2020-12-24T00:36:28 | 2020-12-24T00:36:28 | 79,285,433 | 1 | 1 | Apache-2.0 | 2020-12-24T00:38:09 | 2017-01-18T00:05:44 | Python | UTF-8 | Python | false | false | 14,599 | py | # coding: utf-8
from __future__ import absolute_import
import re
try:
from .error import * # NOQA
from .nodes import * # NOQA
from .compat import string_types
except (ImportError, ValueError): # for Jython
from ruamel.yaml.error import * # NOQA
from ruamel.yaml.nodes import * # NOQA
from ruamel.yaml.compat import string_types
__all__ = ['BaseResolver', 'Resolver', 'VersionedResolver']
_DEFAULT_VERSION = (1, 2)
class ResolverError(YAMLError):
pass
class BaseResolver(object):
DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str'
DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq'
DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map'
yaml_implicit_resolvers = {}
yaml_path_resolvers = {}
def __init__(self):
self._loader_version = None
self.resolver_exact_paths = []
self.resolver_prefix_paths = []
@classmethod
def add_implicit_resolver(cls, tag, regexp, first):
if 'yaml_implicit_resolvers' not in cls.__dict__:
cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
if first is None:
first = [None]
for ch in first:
cls.yaml_implicit_resolvers.setdefault(ch, []).append(
(tag, regexp))
@classmethod
def add_path_resolver(cls, tag, path, kind=None):
# Note: `add_path_resolver` is experimental. The API could be changed.
# `new_path` is a pattern that is matched against the path from the
# root to the node that is being considered. `node_path` elements are
# tuples `(node_check, index_check)`. `node_check` is a node class:
# `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
# matches any kind of a node. `index_check` could be `None`, a boolean
# value, a string value, or a number. `None` and `False` match against
# any _value_ of sequence and mapping nodes. `True` matches against
# any _key_ of a mapping node. A string `index_check` matches against
# a mapping value that corresponds to a scalar key which content is
# equal to the `index_check` value. An integer `index_check` matches
# against a sequence value with the index equal to `index_check`.
if 'yaml_path_resolvers' not in cls.__dict__:
cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
new_path = []
for element in path:
if isinstance(element, (list, tuple)):
if len(element) == 2:
node_check, index_check = element
elif len(element) == 1:
node_check = element[0]
index_check = True
else:
raise ResolverError("Invalid path element: %s" % element)
else:
node_check = None
index_check = element
if node_check is str:
node_check = ScalarNode
elif node_check is list:
node_check = SequenceNode
elif node_check is dict:
node_check = MappingNode
elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
and not isinstance(node_check, string_types) \
and node_check is not None:
raise ResolverError("Invalid node checker: %s" % node_check)
if not isinstance(index_check, (string_types, int)) \
and index_check is not None:
raise ResolverError("Invalid index checker: %s" % index_check)
new_path.append((node_check, index_check))
if kind is str:
kind = ScalarNode
elif kind is list:
kind = SequenceNode
elif kind is dict:
kind = MappingNode
elif kind not in [ScalarNode, SequenceNode, MappingNode] \
and kind is not None:
raise ResolverError("Invalid node kind: %s" % kind)
cls.yaml_path_resolvers[tuple(new_path), kind] = tag
def descend_resolver(self, current_node, current_index):
if not self.yaml_path_resolvers:
return
exact_paths = {}
prefix_paths = []
if current_node:
depth = len(self.resolver_prefix_paths)
for path, kind in self.resolver_prefix_paths[-1]:
if self.check_resolver_prefix(depth, path, kind,
current_node, current_index):
if len(path) > depth:
prefix_paths.append((path, kind))
else:
exact_paths[kind] = self.yaml_path_resolvers[path,
kind]
else:
for path, kind in self.yaml_path_resolvers:
if not path:
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
else:
prefix_paths.append((path, kind))
self.resolver_exact_paths.append(exact_paths)
self.resolver_prefix_paths.append(prefix_paths)
def ascend_resolver(self):
if not self.yaml_path_resolvers:
return
self.resolver_exact_paths.pop()
self.resolver_prefix_paths.pop()
def check_resolver_prefix(self, depth, path, kind,
current_node, current_index):
node_check, index_check = path[depth-1]
if isinstance(node_check, string_types):
if current_node.tag != node_check:
return
elif node_check is not None:
if not isinstance(current_node, node_check):
return
if index_check is True and current_index is not None:
return
if (index_check is False or index_check is None) \
and current_index is None:
return
if isinstance(index_check, string_types):
if not (isinstance(current_index, ScalarNode) and
index_check == current_index.value):
return
elif isinstance(index_check, int) and not isinstance(index_check,
bool):
if index_check != current_index:
return
return True
def resolve(self, kind, value, implicit):
if kind is ScalarNode and implicit[0]:
if value == u'':
resolvers = self.yaml_implicit_resolvers.get(u'', [])
else:
resolvers = self.yaml_implicit_resolvers.get(value[0], [])
resolvers += self.yaml_implicit_resolvers.get(None, [])
for tag, regexp in resolvers:
if regexp.match(value):
return tag
implicit = implicit[1]
if self.yaml_path_resolvers:
exact_paths = self.resolver_exact_paths[-1]
if kind in exact_paths:
return exact_paths[kind]
if None in exact_paths:
return exact_paths[None]
if kind is ScalarNode:
return self.DEFAULT_SCALAR_TAG
elif kind is SequenceNode:
return self.DEFAULT_SEQUENCE_TAG
elif kind is MappingNode:
return self.DEFAULT_MAPPING_TAG
@property
def processing_version(self):
return None
class Resolver(BaseResolver):
pass
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:bool',
re.compile(u'''^(?:yes|Yes|YES|no|No|NO
|true|True|TRUE|false|False|FALSE
|on|On|ON|off|Off|OFF)$''', re.X),
list(u'yYnNtTfFoO'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:float',
re.compile(u'''^(?:
[-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
|[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
|\\.[0-9_]+(?:[eE][-+][0-9]+)?
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*
|[-+]?\\.(?:inf|Inf|INF)
|\\.(?:nan|NaN|NAN))$''', re.X),
list(u'-+0123456789.'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:int',
re.compile(u'''^(?:[-+]?0b[0-1_]+
|[-+]?0o?[0-7_]+
|[-+]?(?:0|[1-9][0-9_]*)
|[-+]?0x[0-9a-fA-F_]+
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
list(u'-+0123456789'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:merge',
re.compile(u'^(?:<<)$'),
[u'<'])
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:null',
re.compile(u'''^(?: ~
|null|Null|NULL
| )$''', re.X),
[u'~', u'n', u'N', u''])
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:timestamp',
re.compile(u'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
(?:[Tt]|[ \\t]+)[0-9][0-9]?
:[0-9][0-9] :[0-9][0-9] (?:\\.[0-9]*)?
(?:[ \\t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
list(u'0123456789'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:value',
re.compile(u'^(?:=)$'),
[u'='])
# The following resolver is only for documentation purposes. It cannot work
# because plain scalars cannot start with '!', '&', or '*'.
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:yaml',
re.compile(u'^(?:!|&|\\*)$'),
list(u'!&*'))
# resolvers consist of
# - a list of applicable version
# - a tag
# - a regexp
# - a list of first characters to match
implicit_resolvers = [
([(1, 2)],
u'tag:yaml.org,2002:bool',
re.compile(u'''^(?:true|True|TRUE|false|False|FALSE)$''', re.X),
list(u'tTfF')),
([(1, 1)],
u'tag:yaml.org,2002:bool',
re.compile(u'''^(?:yes|Yes|YES|no|No|NO
|true|True|TRUE|false|False|FALSE
|on|On|ON|off|Off|OFF)$''', re.X),
list(u'yYnNtTfFoO')),
([(1, 2), (1, 1)],
u'tag:yaml.org,2002:float',
re.compile(u'''^(?:
[-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
|[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
|\\.[0-9_]+(?:[eE][-+][0-9]+)?
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*
|[-+]?\\.(?:inf|Inf|INF)
|\\.(?:nan|NaN|NAN))$''', re.X),
list(u'-+0123456789.')),
([(1, 2)],
u'tag:yaml.org,2002:int',
re.compile(u'''^(?:[-+]?0b[0-1_]+
|[-+]?0o?[0-7_]+
|[-+]?(?:0|[1-9][0-9_]*)
|[-+]?0x[0-9a-fA-F_]+)$''', re.X),
list(u'-+0123456789')),
([(1, 1)],
u'tag:yaml.org,2002:int',
re.compile(u'''^(?:[-+]?0b[0-1_]+
|[-+]?0o?[0-7_]+
|[-+]?(?:0|[1-9][0-9_]*)
|[-+]?0x[0-9a-fA-F_]+
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
list(u'-+0123456789')),
([(1, 2), (1, 1)],
u'tag:yaml.org,2002:merge',
re.compile(u'^(?:<<)$'),
[u'<']),
([(1, 2), (1, 1)],
u'tag:yaml.org,2002:null',
re.compile(u'''^(?: ~
|null|Null|NULL
| )$''', re.X),
[u'~', u'n', u'N', u'']),
([(1, 2), (1, 1)],
u'tag:yaml.org,2002:timestamp',
re.compile(u'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
(?:[Tt]|[ \\t]+)[0-9][0-9]?
:[0-9][0-9] :[0-9][0-9] (?:\\.[0-9]*)?
(?:[ \\t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
list(u'0123456789')),
([(1, 2), (1, 1)],
u'tag:yaml.org,2002:value',
re.compile(u'^(?:=)$'),
[u'=']),
# The following resolver is only for documentation purposes. It cannot work
# because plain scalars cannot start with '!', '&', or '*'.
([(1, 2), (1, 1)],
u'tag:yaml.org,2002:yaml',
re.compile(u'^(?:!|&|\\*)$'),
list(u'!&*')),
]
class VersionedResolver(BaseResolver):
"""
contrary to the "normal" resolver, the smart resolver delays loading
the pattern matching rules. That way it can decide to load 1.1 rules
or the (default) 1.2 that no longer support octal without 0o, sexagesimals
and Yes/No/On/Off booleans.
"""
def __init__(self, version=None):
BaseResolver.__init__(self)
self._loader_version = self.get_loader_version(version)
self._version_implicit_resolver = {}
def add_version_implicit_resolver(self, version, tag, regexp, first):
if first is None:
first = [None]
impl_resolver = self._version_implicit_resolver.setdefault(version, {})
for ch in first:
impl_resolver.setdefault(ch, []).append((tag, regexp))
def get_loader_version(self, version):
if version is None or isinstance(version, tuple):
return version
if isinstance(version, list):
return tuple(version)
# assume string
return tuple(map(int, version.split(u'.')))
@property
def resolver(self):
"""
select the resolver based on the version we are parsing
"""
version = self.processing_version
if version not in self._version_implicit_resolver:
for x in implicit_resolvers:
if version in x[0]:
self.add_version_implicit_resolver(version, x[1], x[2], x[3])
return self._version_implicit_resolver[version]
def resolve(self, kind, value, implicit):
if kind is ScalarNode and implicit[0]:
if value == u'':
resolvers = self.resolver.get(u'', [])
else:
resolvers = self.resolver.get(value[0], [])
resolvers += self.resolver.get(None, [])
for tag, regexp in resolvers:
if regexp.match(value):
return tag
implicit = implicit[1]
if self.yaml_path_resolvers:
exact_paths = self.resolver_exact_paths[-1]
if kind in exact_paths:
return exact_paths[kind]
if None in exact_paths:
return exact_paths[None]
if kind is ScalarNode:
return self.DEFAULT_SCALAR_TAG
elif kind is SequenceNode:
return self.DEFAULT_SEQUENCE_TAG
elif kind is MappingNode:
return self.DEFAULT_MAPPING_TAG
@property
def processing_version(self):
try:
version = self.yaml_version
except AttributeError:
# dumping
version = self.use_version
if version is None:
version = self._loader_version
if version is None:
version = _DEFAULT_VERSION
return version
| [
"[email protected]"
] | |
d2f61390d6b2c4b81f9dcb27acbe7b81d9e4cc13 | 16734d189c2bafa9c66fdc989126b7d9aa95c478 | /Python/flask/counter/server.py | 1c6c4ef1a9e0e7f137e799fcf25543dac002609e | [] | no_license | Ericksmith/CD-projects | 3dddd3a3819341be7202f11603cf793a2067c140 | 3b06b6e289d241c2f1115178c693d304280c2502 | refs/heads/master | 2021-08-15T17:41:32.329647 | 2017-11-18T01:18:04 | 2017-11-18T01:18:04 | 104,279,162 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 534 | py | from flask import Flask, session, render_template, request, redirect
app = Flask(__name__)
app.secret_key = "Dojo"
@app.route('/')
def index():
if session.get('counter') == None:
session['counter'] = 0
session['counter'] += 1
return render_template('index.html', counter = session['counter'])
@app.route('/doubleCount')
def doubleCount():
session['counter'] += 2
return redirect('/')
@app.route('/countReset')
def countReset():
session['counter'] = 0
return redirect('/')
app.run(debug=True)
| [
"[email protected]"
] | |
c702a1355b9688ac31eb5f513f2d151be4f47134 | f242b489b9d3db618cf04415d4a7d490bac36db0 | /Archives_Homework/src/archivesziped.py | 2b15451a84885e66b830d42976855d566e4d935e | [] | no_license | LABETE/Python2_Homework | e33d92d4f8a1867a850430600ccc7baf7ebc6dad | b24207b74c7883c220efc28d315e386dedead41d | refs/heads/master | 2016-08-12T19:04:05.304348 | 2015-05-27T04:05:18 | 2015-05-27T04:05:18 | 36,182,485 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 710 | py | import zipfile
import os
import glob
def zippedfiles(zipfilename):
path = os.getcwd()
zip_file = os.path.join(path, os.path.basename(zipfilename)+".zip")
files_to_zip = [os.path.basename(fn) for fn in glob.glob(zipfilename+"\*") if os.path.isfile(fn)]
zf = zipfile.ZipFile(zip_file, "w", zipfile.ZIP_DEFLATED)
file_to_zip = os.path.split(zipfilename)
file_to_zip = file_to_zip[-1]
for file in files_to_zip:
zf.write(os.path.join(path,file),os.path.join(file_to_zip,file))
list_ziped_files = zf.namelist()
zf.close()
sorted_ziped_files = []
for file in list_ziped_files:
sorted_ziped_files.append(file.replace("/","\\"))
return sorted_ziped_files | [
"[email protected]"
] | |
0a79edc64c01026d73147c2ba199040dde418acb | 0d75e69be45600c5ef5f700e409e8522b9678a02 | /IWDjangoAssignment1/settings.py | fbefbb96d64fcd6e7f9d12d0300504134dbaecd7 | [] | no_license | sdrsnadkry/IWDjangoAssignment1 | 28d4d6c264aac250e66a7be568fee29f1700464b | 6eb533d8bbdae68a6952113511626405e718cac6 | refs/heads/master | 2022-11-29T07:53:37.374821 | 2020-07-18T03:27:52 | 2020-07-18T03:27:52 | 280,572,491 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,138 | py | """
Django settings for IWDjangoAssignment1 project.
Generated by 'django-admin startproject' using Django 3.0.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%g^opnnuo)*09sbtnne1)v9%b%r&k$166ox+no@$%eeshu42ho'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'IWDjangoAssignment1.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'IWDjangoAssignment1.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
5bc8073cfa36a998bb67cbfb0078c319d984d68b | f561a219c57bd75790d3155acac6f54299a88b08 | /city/migrations/0010_auto_20170406_1957.py | c4a0d52767af5f7c0852ea55762bea83e23cf8ea | [] | no_license | ujjwalagrawal17/OfferCartServer | 1e81cf2dc17f19fa896062c2a084e6b232a8929e | b3cd1c5f8eecc167b6f4baebed3c4471140d905f | refs/heads/master | 2020-12-30T15:31:04.380084 | 2017-05-24T18:26:20 | 2017-05-24T18:26:20 | 91,155,405 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 472 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-06 19:57
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('city', '0009_auto_20170406_1951'),
]
operations = [
migrations.AlterField(
model_name='citydata',
name='name',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| [
"[email protected]"
] | |
00525edd9f91bf0763fa8d35db247a55724a0f90 | ad13583673551857615498b9605d9dcab63bb2c3 | /output/models/nist_data/atomic/duration/schema_instance/nistschema_sv_iv_atomic_duration_enumeration_2_xsd/__init__.py | aa2a5364c28c8263b4cba85ac2516304e22deade | [
"MIT"
] | permissive | tefra/xsdata-w3c-tests | 397180205a735b06170aa188f1f39451d2089815 | 081d0908382a0e0b29c8ee9caca6f1c0e36dd6db | refs/heads/main | 2023-08-03T04:25:37.841917 | 2023-07-29T17:10:13 | 2023-07-30T12:11:13 | 239,622,251 | 2 | 0 | MIT | 2023-07-25T14:19:04 | 2020-02-10T21:59:47 | Python | UTF-8 | Python | false | false | 381 | py | from output.models.nist_data.atomic.duration.schema_instance.nistschema_sv_iv_atomic_duration_enumeration_2_xsd.nistschema_sv_iv_atomic_duration_enumeration_2 import (
NistschemaSvIvAtomicDurationEnumeration2,
NistschemaSvIvAtomicDurationEnumeration2Type,
)
__all__ = [
"NistschemaSvIvAtomicDurationEnumeration2",
"NistschemaSvIvAtomicDurationEnumeration2Type",
]
| [
"[email protected]"
] | |
71f9088eb2850508d7b32b8291db9c48eaf63ed4 | 649bd422025e421d86025743eac324c9b882a2e8 | /exam/1_three-dimensional_atomic_system/dump/phasetrans/temp50_8000.py | bb136f85f5c8e67ef30d0a736db52e8c424e4cff | [] | no_license | scheuclu/atom_class | 36ddee1f6a5995872e858add151c5942c109847c | 0c9a8c63d9b38898c1869fe8983126cef17662cd | refs/heads/master | 2021-01-21T10:52:28.448221 | 2017-03-07T23:04:41 | 2017-03-07T23:04:41 | 83,489,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68,893 | py | ITEM: TIMESTEP
8000
ITEM: NUMBER OF ATOMS
2048
ITEM: BOX BOUNDS pp pp pp
6.8399774235701472e-01 4.6516002257638277e+01
6.8399774235701472e-01 4.6516002257638277e+01
6.8399774235701472e-01 4.6516002257638277e+01
ITEM: ATOMS id type xs ys zs
8 1 0.125275 0.0524332 0.0700987
35 1 0.055693 0.119442 0.0648769
130 1 0.0616181 0.0547876 0.124558
165 1 0.126576 0.11896 0.126558
117 1 0.629185 0.381969 0.00479994
147 1 0.562191 0.00221148 0.189248
1039 1 0.436585 0.503297 0.061793
12 1 0.252674 0.0544219 0.058728
39 1 0.182635 0.120352 0.0618921
43 1 0.314606 0.119882 0.0651928
134 1 0.187246 0.0604005 0.122769
138 1 0.317592 0.0604458 0.123622
169 1 0.252452 0.122122 0.124648
82 1 0.558777 0.315202 0.00646483
85 1 0.618607 0.255592 0.00203588
397 1 0.37019 -0.00249033 0.372018
271 1 0.440397 -0.000869142 0.309943
259 1 0.060496 -0.00193926 0.308511
16 1 0.375499 0.0603909 0.0657616
47 1 0.436956 0.12426 0.0635057
142 1 0.436794 0.0616845 0.131913
173 1 0.373818 0.124945 0.126361
20 1 0.500724 0.0683344 0.0697854
518 1 0.188316 0.0599628 0.502126
1043 1 0.559323 0.496893 0.0600433
121 1 0.744977 0.378285 0.00809734
177 1 0.496375 0.128972 0.129971
24 1 0.619914 0.0621852 0.0615539
51 1 0.560747 0.123861 0.0670863
146 1 0.557121 0.0657963 0.127899
181 1 0.623399 0.125702 0.1258
512 1 0.87847 0.440042 0.440048
135 1 0.18672 0.00215882 0.190384
28 1 0.747809 0.0567115 0.0638979
55 1 0.686831 0.117041 0.0587394
59 1 0.821391 0.126433 0.0628188
150 1 0.685996 0.0634803 0.122317
154 1 0.808534 0.0601883 0.126617
185 1 0.749955 0.123422 0.121806
122 1 0.813307 0.441177 0.00224049
511 1 0.933441 0.37016 0.43958
86 1 0.688381 0.315152 0.00723756
267 1 0.305731 0.000642363 0.309925
403 1 0.564603 7.83976e-05 0.438374
161 1 0.98911 0.121528 0.125743
4 1 1.00032 0.0577103 0.0613343
32 1 0.875994 0.0551447 0.0584332
63 1 0.933112 0.117627 0.0604438
158 1 0.940662 0.0556838 0.122455
189 1 0.876372 0.128137 0.123092
510 1 0.938072 0.439574 0.370386
405 1 0.625534 -0.000763179 0.374656
137 1 0.24609 -0.0027281 0.127834
40 1 0.12264 0.187154 0.064434
67 1 0.0621523 0.246944 0.0598532
72 1 0.125549 0.306594 0.0622905
162 1 0.0583119 0.181749 0.122893
194 1 0.058968 0.312171 0.127075
197 1 0.119212 0.246858 0.123427
68 1 0.00112882 0.310166 0.0644128
193 1 1.00337 0.250352 0.128425
36 1 0.994991 0.188341 0.0624711
53 1 0.621827 0.118643 -0.00124165
570 1 0.811756 0.183528 0.504084
44 1 0.244967 0.184196 0.0642356
71 1 0.188147 0.248519 0.0633854
75 1 0.303371 0.245034 0.0600301
76 1 0.246176 0.318303 0.0653256
166 1 0.18216 0.182365 0.126476
170 1 0.30957 0.182367 0.124042
198 1 0.185137 0.31214 0.124392
201 1 0.249644 0.248747 0.12439
202 1 0.310898 0.313749 0.129209
1411 1 0.0649441 0.496923 0.440246
1419 1 0.308126 0.495667 0.440822
1031 1 0.181989 0.496211 0.0597779
48 1 0.364943 0.186716 0.0619609
79 1 0.434004 0.247066 0.0590481
80 1 0.36766 0.310752 0.0656114
174 1 0.43521 0.190688 0.125649
205 1 0.366921 0.245658 0.125254
206 1 0.433675 0.309355 0.124266
52 1 0.498767 0.183013 0.0615933
209 1 0.497324 0.254251 0.127659
1427 1 0.568519 0.49429 0.436818
263 1 0.189523 0.000990593 0.310561
84 1 0.498894 0.307384 0.065758
56 1 0.622955 0.185115 0.0640302
83 1 0.562237 0.249481 0.0663199
88 1 0.624964 0.318461 0.0650322
178 1 0.566754 0.186921 0.126492
210 1 0.56921 0.308622 0.127212
213 1 0.628612 0.251258 0.130193
395 1 0.31312 -0.0022712 0.433694
60 1 0.747822 0.184389 0.0631689
87 1 0.683456 0.248918 0.064574
91 1 0.812381 0.250316 0.0612225
92 1 0.753786 0.315455 0.0633242
182 1 0.688258 0.186508 0.123109
186 1 0.810621 0.188832 0.12471
214 1 0.688962 0.31556 0.126657
217 1 0.749284 0.251193 0.120148
218 1 0.817371 0.311749 0.128465
49 1 0.495497 0.128481 -0.00224216
265 1 0.248562 0.0063132 0.247857
393 1 0.244096 0.000473523 0.37808
606 1 0.934114 0.30689 0.500771
64 1 0.872013 0.191676 0.0537877
95 1 0.934179 0.250345 0.0603664
96 1 0.870875 0.315334 0.0639238
190 1 0.937174 0.187316 0.125603
221 1 0.87279 0.245532 0.123346
222 1 0.934524 0.305976 0.127856
385 1 -0.0038265 0.00827323 0.378861
99 1 0.05997 0.372612 0.0624401
104 1 0.120514 0.435591 0.0593198
226 1 0.0673868 0.437571 0.12167
229 1 0.125455 0.375979 0.116444
550 1 0.188235 0.184894 0.499874
1161 1 0.247494 0.500671 0.125602
509 1 0.87392 0.37827 0.373062
590 1 0.43866 0.313227 0.497585
1153 1 0.00142726 0.500154 0.127384
151 1 0.681864 0.00248686 0.185075
391 1 0.184335 0.000527043 0.441033
1165 1 0.371129 0.497274 0.125852
257 1 -0.000722187 -0.0021065 0.247572
103 1 0.184689 0.374186 0.0593363
107 1 0.31269 0.375578 0.0581362
108 1 0.246532 0.438378 0.0621988
230 1 0.184564 0.44092 0.120139
233 1 0.25054 0.379865 0.126184
234 1 0.315735 0.433948 0.115661
111 1 0.431998 0.372218 0.0607526
112 1 0.370097 0.434042 0.0569016
237 1 0.371124 0.369449 0.126185
238 1 0.429018 0.436489 0.124979
241 1 0.497142 0.372591 0.126605
116 1 0.492474 0.435492 0.0661674
153 1 0.746881 -0.00351767 0.124165
508 1 0.751862 0.438625 0.432939
507 1 0.810479 0.373301 0.435856
593 1 0.49481 0.252887 0.496526
115 1 0.557046 0.376685 0.0680944
120 1 0.622822 0.43363 0.0680285
242 1 0.556513 0.440184 0.133747
245 1 0.622741 0.372414 0.127897
119 1 0.683351 0.380104 0.069721
123 1 0.819333 0.373428 0.0694191
124 1 0.755192 0.44184 0.0674971
246 1 0.685507 0.444619 0.128739
249 1 0.754461 0.378796 0.124713
250 1 0.81934 0.437737 0.126296
506 1 0.81597 0.443169 0.372735
573 1 0.876193 0.12455 0.498012
1159 1 0.189929 0.499101 0.190003
1155 1 0.0661853 0.494945 0.183006
505 1 0.7567 0.376026 0.373672
269 1 0.38028 -0.000995045 0.248845
225 1 0.00140618 0.378519 0.122551
100 1 0.99722 0.438281 0.0627536
127 1 0.940094 0.373136 0.0656695
128 1 0.880842 0.435283 0.0671569
253 1 0.876499 0.375233 0.12917
254 1 0.935618 0.442535 0.130081
503 1 0.693837 0.375691 0.436161
136 1 0.130032 0.0577017 0.185799
163 1 0.0622222 0.123473 0.191151
258 1 0.0618529 0.0603591 0.245699
264 1 0.119866 0.0609495 0.316587
291 1 0.0616754 0.128749 0.313389
293 1 0.124726 0.121152 0.253429
132 1 1.00366 0.0666796 0.186091
502 1 0.691358 0.43324 0.375588
140 1 0.253535 0.063444 0.185774
167 1 0.186052 0.119076 0.187346
171 1 0.308814 0.126064 0.189942
262 1 0.183941 0.0647644 0.257193
266 1 0.309982 0.0633069 0.245493
268 1 0.255611 0.0623238 0.311484
295 1 0.188015 0.123768 0.311465
297 1 0.251724 0.123683 0.249312
299 1 0.314813 0.127527 0.314328
1289 1 0.250522 0.500426 0.252412
144 1 0.376605 0.0623253 0.192652
175 1 0.433705 0.125159 0.19272
270 1 0.438075 0.063988 0.257555
272 1 0.376435 0.0652942 0.316689
301 1 0.373117 0.118548 0.253367
303 1 0.439181 0.126787 0.318896
19 1 0.560558 0.00104114 0.0690607
500 1 0.504023 0.431466 0.434951
1413 1 0.124008 0.497642 0.375293
276 1 0.500628 0.0630437 0.317454
305 1 0.496777 0.123844 0.254675
148 1 0.495672 0.065702 0.189262
152 1 0.624977 0.064086 0.185297
179 1 0.563084 0.12722 0.189447
274 1 0.558808 0.0673548 0.247966
280 1 0.628174 0.0628327 0.302833
307 1 0.564124 0.123972 0.310662
309 1 0.630232 0.121424 0.242136
1299 1 0.566599 0.499947 0.316063
498 1 0.562864 0.432113 0.374401
526 1 0.439744 0.0655042 0.499322
275 1 0.562759 0.00257009 0.312551
156 1 0.749968 0.0622159 0.186322
183 1 0.686991 0.125795 0.182222
187 1 0.812156 0.12108 0.183956
278 1 0.689955 0.0594975 0.249463
282 1 0.810719 0.0635833 0.249408
284 1 0.755152 0.0520319 0.313863
311 1 0.692497 0.120989 0.315162
313 1 0.751691 0.120688 0.24523
315 1 0.807939 0.12176 0.309793
499 1 0.565468 0.369109 0.434116
289 1 -0.00201895 0.123361 0.254482
501 1 0.63035 0.373314 0.366725
260 1 -0.000380063 0.0581724 0.316554
160 1 0.874388 0.0635622 0.186119
191 1 0.935696 0.122384 0.191213
286 1 0.931742 0.0579076 0.249366
288 1 0.874633 0.0621917 0.310625
317 1 0.871151 0.123195 0.251103
319 1 0.935115 0.122973 0.313743
168 1 0.124861 0.181999 0.18609
195 1 0.0693674 0.252688 0.188914
200 1 0.13126 0.313201 0.186735
290 1 0.0601946 0.184571 0.250954
296 1 0.126871 0.185965 0.313751
322 1 0.0616025 0.311995 0.253695
323 1 0.0572014 0.251001 0.31574
325 1 0.122313 0.250848 0.254287
328 1 0.125773 0.319958 0.315956
164 1 1.00063 0.185065 0.189062
172 1 0.248334 0.189026 0.184437
199 1 0.186781 0.24843 0.187984
203 1 0.313618 0.25066 0.192746
204 1 0.244636 0.310183 0.187102
294 1 0.18889 0.187028 0.25177
298 1 0.317703 0.187292 0.255181
300 1 0.248049 0.186243 0.318227
326 1 0.190467 0.310967 0.253694
327 1 0.18858 0.25014 0.317268
329 1 0.249769 0.245222 0.248566
330 1 0.313377 0.308267 0.252065
331 1 0.313374 0.248078 0.31365
332 1 0.252545 0.314851 0.311563
176 1 0.370371 0.185437 0.191674
207 1 0.439425 0.246783 0.196445
208 1 0.371415 0.31286 0.185663
302 1 0.433668 0.185326 0.251421
304 1 0.375948 0.190314 0.313465
333 1 0.379412 0.251983 0.254961
334 1 0.439711 0.312241 0.253663
335 1 0.438017 0.251382 0.315074
336 1 0.375193 0.314662 0.310975
340 1 0.505232 0.315173 0.312396
180 1 0.500039 0.186295 0.191424
337 1 0.501246 0.251567 0.255027
212 1 0.496234 0.313262 0.187513
308 1 0.501838 0.188552 0.310634
184 1 0.630361 0.18977 0.184386
211 1 0.559389 0.247819 0.192429
216 1 0.631821 0.314822 0.188217
306 1 0.564115 0.185223 0.252794
312 1 0.629103 0.185063 0.30847
338 1 0.569682 0.308022 0.253439
339 1 0.570209 0.248414 0.312682
341 1 0.631747 0.246689 0.250928
344 1 0.630342 0.314927 0.309583
188 1 0.753959 0.187356 0.185688
215 1 0.694445 0.25086 0.188963
219 1 0.812279 0.247236 0.183254
220 1 0.752743 0.320611 0.187919
310 1 0.689261 0.185336 0.243944
314 1 0.810073 0.186657 0.249921
316 1 0.748746 0.183796 0.309732
342 1 0.694492 0.320974 0.25128
343 1 0.694612 0.25713 0.307742
345 1 0.754485 0.252744 0.251958
346 1 0.8128 0.31294 0.244411
347 1 0.811905 0.252534 0.314156
348 1 0.752137 0.315317 0.309686
196 1 0.996599 0.314622 0.180825
292 1 1.00046 0.190175 0.31441
324 1 -0.00416669 0.317721 0.314576
321 1 0.000911338 0.251834 0.247826
192 1 0.872715 0.182099 0.187069
223 1 0.935426 0.243171 0.18871
224 1 0.880947 0.311094 0.1893
318 1 0.938047 0.184405 0.248423
320 1 0.871554 0.182515 0.311965
349 1 0.877234 0.250605 0.253622
350 1 0.939175 0.311894 0.25019
351 1 0.937758 0.247174 0.308245
352 1 0.877321 0.313384 0.306403
411 1 0.811961 0.00100094 0.440472
46 1 0.433533 0.186141 -0.000616561
227 1 0.0658652 0.374047 0.184487
232 1 0.128632 0.434663 0.187634
354 1 0.0678034 0.439872 0.25225
355 1 0.0617209 0.37569 0.311168
357 1 0.130175 0.375455 0.249329
360 1 0.128666 0.434545 0.311615
356 1 1.00012 0.436321 0.310084
504 1 0.625252 0.434702 0.443334
113 1 0.49685 0.378797 0.00225695
231 1 0.189851 0.375179 0.183973
235 1 0.31036 0.370212 0.189248
236 1 0.248064 0.439971 0.180328
358 1 0.189958 0.43286 0.245724
359 1 0.195322 0.379217 0.309987
361 1 0.251186 0.371979 0.247694
362 1 0.307869 0.440392 0.24672
363 1 0.311123 0.374146 0.312531
364 1 0.252787 0.439078 0.305073
493 1 0.369872 0.378909 0.375474
114 1 0.560023 0.437184 -0.00146678
495 1 0.434231 0.36992 0.435994
1297 1 0.499846 0.50412 0.251622
239 1 0.434901 0.371877 0.195412
240 1 0.374815 0.429233 0.188789
365 1 0.369274 0.373995 0.247718
366 1 0.427429 0.436427 0.255609
367 1 0.434864 0.373794 0.313992
368 1 0.368342 0.439149 0.312526
369 1 0.506779 0.372495 0.256039
244 1 0.494947 0.430412 0.190969
372 1 0.497215 0.434517 0.310592
277 1 0.622488 0.00310053 0.248512
1421 1 0.370078 0.495775 0.378936
494 1 0.438776 0.441141 0.377444
287 1 0.93451 -0.000581836 0.3172
496 1 0.377097 0.434583 0.441454
243 1 0.562879 0.370764 0.188731
248 1 0.622365 0.439639 0.182946
370 1 0.555512 0.438536 0.245991
371 1 0.570988 0.37934 0.309405
373 1 0.629533 0.376554 0.246913
376 1 0.626845 0.436742 0.307212
247 1 0.693106 0.380936 0.184976
251 1 0.816769 0.378511 0.187922
252 1 0.760006 0.439337 0.182568
374 1 0.683277 0.441651 0.245188
375 1 0.691997 0.381449 0.308978
377 1 0.749134 0.384162 0.245002
378 1 0.813362 0.440505 0.249992
379 1 0.815174 0.374103 0.310167
380 1 0.752788 0.440853 0.312655
497 1 0.496321 0.373674 0.374651
38 1 0.18858 0.194051 0.00152719
228 1 -0.00061946 0.437908 0.185375
353 1 0.00221678 0.378326 0.243058
255 1 0.939928 0.380092 0.183881
256 1 0.875097 0.442181 0.185089
381 1 0.878542 0.380172 0.244063
382 1 0.937914 0.439782 0.245908
383 1 0.9344 0.378243 0.307808
384 1 0.876709 0.442965 0.309751
50 1 0.560246 0.186099 0.000872627
1167 1 0.426487 0.498473 0.195197
1423 1 0.442568 0.495815 0.439192
386 1 0.0605286 0.0741717 0.382352
392 1 0.125108 0.0625699 0.436912
419 1 0.0690514 0.135868 0.443591
421 1 0.12458 0.130016 0.373592
388 1 0.997825 0.0640415 0.446233
417 1 1.0034 0.134298 0.377381
1415 1 0.192298 0.503811 0.435246
1295 1 0.434311 0.50107 0.31388
390 1 0.182978 0.0648384 0.372241
394 1 0.313614 0.0606214 0.376549
396 1 0.246154 0.0673198 0.441908
423 1 0.182464 0.124011 0.442589
425 1 0.24554 0.122171 0.377801
427 1 0.310732 0.124461 0.434754
78 1 0.439612 0.310033 0.002018
389 1 0.126422 -0.000391148 0.378946
585 1 0.245788 0.245831 0.499483
398 1 0.434782 0.0583538 0.379076
400 1 0.373881 0.0580505 0.439282
429 1 0.371447 0.12134 0.381348
431 1 0.443496 0.125576 0.440868
81 1 0.496731 0.247286 -0.00151338
1429 1 0.629374 0.491005 0.379029
404 1 0.499904 0.0591644 0.437739
433 1 0.506756 0.124516 0.373246
402 1 0.56267 0.0579228 0.375341
408 1 0.626523 0.0633241 0.439132
435 1 0.560292 0.127019 0.444884
437 1 0.622554 0.11925 0.375672
487 1 0.182877 0.377795 0.439403
406 1 0.685368 0.0572457 0.373184
410 1 0.814564 0.0610824 0.369814
412 1 0.756194 0.0665419 0.440036
439 1 0.693601 0.119483 0.438917
441 1 0.751889 0.121168 0.373412
443 1 0.814097 0.127512 0.437423
486 1 0.18789 0.438475 0.37521
492 1 0.249807 0.43693 0.447123
414 1 0.932316 0.0644499 0.382764
416 1 0.874438 0.0659964 0.447086
445 1 0.871253 0.11953 0.372377
447 1 0.936808 0.132414 0.436863
491 1 0.313525 0.375715 0.435679
489 1 0.242458 0.37707 0.378779
1163 1 0.306723 0.500018 0.18609
418 1 0.0649552 0.192759 0.379161
424 1 0.133522 0.193597 0.440728
450 1 0.0574928 0.316002 0.375403
451 1 0.0656448 0.253648 0.441216
453 1 0.121717 0.256546 0.372897
456 1 0.125206 0.313591 0.436359
449 1 1.00106 0.248526 0.377819
77 1 0.369152 0.244393 0.00519506
490 1 0.304265 0.439911 0.376255
422 1 0.186855 0.193832 0.377395
426 1 0.3107 0.185156 0.376822
428 1 0.24507 0.185965 0.432234
454 1 0.189094 0.317555 0.374734
455 1 0.190236 0.250987 0.434792
457 1 0.255451 0.256071 0.373133
458 1 0.315247 0.314521 0.373333
459 1 0.309062 0.248267 0.438736
460 1 0.248639 0.310952 0.440933
629 1 0.630594 0.371211 0.496101
625 1 0.502912 0.368335 0.499646
430 1 0.436622 0.186099 0.376119
432 1 0.371374 0.190029 0.438597
461 1 0.369845 0.251917 0.372258
462 1 0.433113 0.311783 0.378494
463 1 0.431551 0.2487 0.43656
464 1 0.374082 0.313924 0.435888
468 1 0.505789 0.308597 0.431623
118 1 0.690406 0.445135 0.00711041
1433 1 0.751987 0.500545 0.373653
436 1 0.507268 0.191503 0.431551
465 1 0.507928 0.242982 0.372468
466 1 0.566708 0.313723 0.369081
440 1 0.629399 0.18093 0.434339
434 1 0.563279 0.186624 0.365837
467 1 0.572898 0.248158 0.43907
469 1 0.631549 0.247373 0.36795
472 1 0.634693 0.311807 0.435404
488 1 0.124849 0.441146 0.436271
474 1 0.813675 0.313773 0.375124
442 1 0.811116 0.188621 0.374541
444 1 0.755456 0.187115 0.441106
473 1 0.749352 0.247172 0.371041
475 1 0.812491 0.249281 0.443674
470 1 0.690011 0.31918 0.368648
471 1 0.686217 0.249916 0.436673
476 1 0.747473 0.308543 0.431833
438 1 0.689085 0.185946 0.372502
482 1 0.0591115 0.443409 0.371504
483 1 0.0666077 0.380195 0.439242
452 1 0.992585 0.309521 0.43498
477 1 0.872036 0.249661 0.380288
420 1 0.000553346 0.187583 0.436936
448 1 0.8758 0.190131 0.442231
480 1 0.8726 0.31081 0.436664
446 1 0.935056 0.188087 0.373036
479 1 0.933283 0.246491 0.438527
478 1 0.937181 0.30887 0.373998
485 1 0.12172 0.38201 0.371918
481 1 0.996763 0.378385 0.37694
484 1 0.992663 0.438829 0.434315
1553 1 0.502103 0.496841 0.498846
1055 1 0.937843 0.499356 0.0624657
273 1 0.500204 0.00387479 0.25242
21 1 0.619268 0.000905519 0.00227541
155 1 0.81501 0.00014099 0.190314
90 1 0.811367 0.311814 0.00137175
1285 1 0.128182 0.499018 0.246301
1431 1 0.686755 0.497907 0.442855
1417 1 0.248388 0.501897 0.372361
577 1 0.00128273 0.246963 0.497633
582 1 0.188902 0.309943 0.498519
578 1 0.0557411 0.313581 0.498177
125 1 0.878502 0.374255 -0.000702241
557 1 0.37073 0.124266 0.500574
633 1 0.751862 0.372625 0.498465
18 1 0.552554 0.0651696 0.00435307
69 1 0.123021 0.248902 -0.00370827
637 1 0.870534 0.374045 0.494995
41 1 0.25227 0.123435 0.00193999
73 1 0.246631 0.254953 0.00320193
101 1 0.124262 0.373423 0.00214216
34 1 0.0589355 0.182273 0.00673058
558 1 0.439946 0.190376 0.491329
621 1 0.376034 0.369274 0.50379
97 1 -0.000415093 0.37177 0.00336964
58 1 0.805271 0.179441 -0.00142972
54 1 0.684661 0.180691 0.00276046
6 1 0.192467 0.0590213 0.000853212
102 1 0.184805 0.438561 0.00107606
26 1 0.811599 0.061485 0.00352554
89 1 0.744864 0.244662 0.00308647
74 1 0.312452 0.315216 0.00115518
65 1 0.99676 0.247599 0.00376918
609 1 0.990732 0.37149 0.496292
37 1 0.124931 0.132362 0.00306417
98 1 0.0657646 0.442464 -0.00280968
520 1 0.125524 0.0638617 0.56188
547 1 0.0659808 0.123097 0.56819
642 1 0.0658016 0.061886 0.624484
677 1 0.126159 0.124929 0.625086
673 1 0.999189 0.125459 0.626666
93 1 0.875777 0.255133 1.00199
921 1 0.74815 0.000460717 0.875456
524 1 0.252091 0.0572195 0.564048
551 1 0.186413 0.122502 0.560904
555 1 0.319572 0.131882 0.564111
646 1 0.189207 0.063987 0.624761
650 1 0.315345 0.0619518 0.622212
681 1 0.251996 0.127907 0.621018
1695 1 0.933893 0.500271 0.684607
1941 1 0.623806 0.498919 0.874514
542 1 0.936413 0.0594249 0.504971
528 1 0.37705 0.0615907 0.560516
559 1 0.439015 0.127812 0.556624
654 1 0.443992 0.0549569 0.62632
685 1 0.379661 0.127214 0.618276
532 1 0.500496 0.0638964 0.560844
689 1 0.50462 0.128176 0.627047
1689 1 0.751282 0.499692 0.632069
586 1 0.310669 0.312791 0.494839
514 1 0.0628666 0.0618707 0.496416
781 1 0.379513 0.00499161 0.750569
536 1 0.622621 0.0571451 0.564948
563 1 0.559061 0.126422 0.562662
658 1 0.56224 0.0688272 0.625393
693 1 0.627517 0.123776 0.629261
546 1 0.0653231 0.193418 0.500946
1803 1 0.315142 0.497802 0.816773
14 1 0.441759 0.0672902 0.997374
601 1 0.749888 0.254476 0.500229
540 1 0.758661 0.0678208 0.567378
567 1 0.685567 0.124198 0.569349
571 1 0.819727 0.120528 0.562834
662 1 0.685344 0.0614185 0.624517
666 1 0.821275 0.0651819 0.63014
697 1 0.753268 0.126563 0.626426
661 1 0.624196 0.00368177 0.631185
1809 1 0.508631 0.505684 0.745013
62 1 0.936575 0.190192 1.00072
29 1 0.871462 0.00213121 0.99784
110 1 0.435657 0.437321 1.00435
516 1 0.99786 0.0716583 0.56199
996 1 0.997944 0.438531 0.935663
544 1 0.877367 0.062453 0.55995
575 1 0.934454 0.126884 0.56327
670 1 0.931905 0.0627556 0.627768
701 1 0.878187 0.129257 0.62782
515 1 0.0648278 -0.00281249 0.55872
552 1 0.123232 0.187969 0.560814
579 1 0.0608053 0.255615 0.561765
584 1 0.123948 0.31466 0.558474
674 1 0.064458 0.186942 0.631155
706 1 0.0643131 0.309058 0.62446
709 1 0.123503 0.248347 0.621941
580 1 -0.00372522 0.314828 0.560734
705 1 0.999326 0.250114 0.623209
777 1 0.25607 -0.00257122 0.745076
663 1 0.685112 -0.000900688 0.689102
30 1 0.939087 0.0570406 1.00206
1679 1 0.446635 0.50295 0.684274
556 1 0.25177 0.184586 0.565992
583 1 0.185487 0.24569 0.560674
587 1 0.311419 0.250325 0.560791
588 1 0.246433 0.306751 0.559187
678 1 0.184211 0.186363 0.628395
682 1 0.314686 0.186134 0.624454
710 1 0.186902 0.309659 0.620353
713 1 0.244517 0.24789 0.624694
714 1 0.309722 0.310512 0.620808
560 1 0.381851 0.189021 0.559495
591 1 0.437465 0.252388 0.562125
592 1 0.372224 0.310518 0.560495
686 1 0.444101 0.187084 0.626455
717 1 0.383068 0.24784 0.624619
718 1 0.437454 0.313894 0.622922
596 1 0.502794 0.310829 0.561982
564 1 0.498636 0.189033 0.562648
523 1 0.314728 0.00210603 0.560625
773 1 0.125341 0.00564625 0.749099
721 1 0.502036 0.248482 0.623416
568 1 0.621898 0.187727 0.569837
595 1 0.568819 0.256275 0.564755
600 1 0.631123 0.315527 0.562168
690 1 0.56318 0.193018 0.625238
722 1 0.567904 0.314803 0.629501
725 1 0.62473 0.250678 0.626229
779 1 0.314402 -0.00151518 0.810305
913 1 0.498265 -0.00321982 0.873319
572 1 0.751892 0.193032 0.564018
599 1 0.68936 0.252011 0.571567
603 1 0.809745 0.252912 0.567233
604 1 0.748857 0.312562 0.562591
694 1 0.690173 0.185172 0.627023
698 1 0.817888 0.186332 0.626528
726 1 0.68953 0.314483 0.629646
729 1 0.754108 0.254278 0.628016
730 1 0.813563 0.316284 0.626046
634 1 0.807781 0.437293 0.498871
1567 1 0.940818 0.501329 0.563482
10 1 0.315197 0.0546459 0.998801
33 1 0.995233 0.120877 0.997509
548 1 0.00876758 0.186169 0.565901
576 1 0.873495 0.185508 0.561963
607 1 0.941229 0.247449 0.560208
608 1 0.878436 0.313204 0.560935
702 1 0.939696 0.185789 0.623704
733 1 0.875437 0.247375 0.623246
734 1 0.935181 0.306222 0.624226
602 1 0.81065 0.311771 0.505378
126 1 0.94064 0.432282 0.997026
611 1 0.0616222 0.376244 0.569655
616 1 0.126389 0.436856 0.566742
738 1 0.0654033 0.436975 0.62473
741 1 0.122709 0.374611 0.627321
769 1 0.00234852 0.0040723 0.751022
737 1 1.00039 0.37437 0.62526
1024 1 0.870541 0.432505 0.937213
1805 1 0.376572 0.495761 0.752527
614 1 0.186281 0.436364 0.500145
1929 1 0.251071 0.503398 0.873288
615 1 0.186932 0.373501 0.562401
619 1 0.310988 0.369963 0.557302
620 1 0.247907 0.43756 0.567286
742 1 0.187171 0.440196 0.626988
745 1 0.25092 0.373038 0.622809
746 1 0.314123 0.437807 0.627237
1023 1 0.933678 0.374415 0.935882
57 1 0.751061 0.118486 1.00252
566 1 0.694625 0.178944 0.503505
623 1 0.443964 0.381497 0.560754
624 1 0.375935 0.437914 0.567978
749 1 0.373072 0.371117 0.621401
750 1 0.441755 0.436793 0.622937
628 1 0.508439 0.438387 0.567438
753 1 0.50249 0.369798 0.623516
1795 1 0.0680139 0.496921 0.817023
923 1 0.81505 -0.00299176 0.93023
1797 1 0.124797 0.494565 0.7545
1022 1 0.937339 0.437521 0.868823
627 1 0.563492 0.372319 0.565606
632 1 0.627209 0.435765 0.558897
754 1 0.567104 0.435507 0.625451
757 1 0.628692 0.376433 0.629195
1937 1 0.504501 0.500762 0.87685
1563 1 0.809727 0.501278 0.563689
631 1 0.688578 0.373975 0.564067
635 1 0.808935 0.373558 0.561259
636 1 0.752666 0.441817 0.567418
758 1 0.686823 0.438498 0.621677
761 1 0.748459 0.374813 0.624475
762 1 0.808268 0.438443 0.62981
1021 1 0.873599 0.377862 0.871218
61 1 0.877154 0.117904 0.998128
42 1 0.304219 0.18717 1.00052
1799 1 0.184603 0.497245 0.809724
2 1 0.0592654 0.0604957 1.00122
612 1 -0.00291966 0.438938 0.567328
639 1 0.939057 0.379554 0.56461
640 1 0.86552 0.435267 0.561297
765 1 0.872841 0.376993 0.627532
766 1 0.93928 0.438334 0.62905
993 1 0.997611 0.378485 0.87607
1793 1 0.995578 0.503242 0.744058
771 1 0.0611653 0.00478981 0.811443
1925 1 0.129507 0.497173 0.877507
1923 1 0.0638308 0.504705 0.933462
648 1 0.125312 0.0645273 0.682663
675 1 0.0622054 0.124661 0.684607
770 1 0.0599226 0.0616294 0.746958
776 1 0.129899 0.0604782 0.814284
803 1 0.0660322 0.122811 0.816235
805 1 0.122404 0.127686 0.75363
772 1 1.00416 0.0687619 0.819225
915 1 0.559955 0.00296863 0.938722
519 1 0.184443 -0.00910927 0.559073
652 1 0.253348 0.0640687 0.684763
679 1 0.187708 0.127434 0.687972
683 1 0.310623 0.12224 0.691784
774 1 0.191477 0.0669668 0.748921
778 1 0.314552 0.0636012 0.749903
780 1 0.251788 0.0606666 0.812188
807 1 0.190384 0.121301 0.814232
809 1 0.249848 0.124207 0.755063
811 1 0.313599 0.131145 0.81265
1053 1 0.880147 0.490422 1.0069
1933 1 0.379073 0.501022 0.874506
1667 1 0.0648859 0.500747 0.68688
656 1 0.382271 0.0627836 0.676605
687 1 0.43787 0.124528 0.683629
782 1 0.440445 0.0614457 0.74378
784 1 0.378527 0.0638554 0.813454
813 1 0.377069 0.129557 0.750245
815 1 0.43392 0.126668 0.811834
817 1 0.496021 0.130501 0.751876
788 1 0.495958 0.067682 0.821056
660 1 0.500691 0.0635231 0.691581
664 1 0.625148 0.0623164 0.690394
691 1 0.560616 0.12605 0.688891
786 1 0.561187 0.0631957 0.759012
792 1 0.625195 0.0619915 0.818201
819 1 0.563644 0.126651 0.814319
821 1 0.623088 0.126598 0.752071
535 1 0.691286 0.00134141 0.560226
990 1 0.946748 0.311087 0.879251
668 1 0.748541 0.0651572 0.691122
695 1 0.688494 0.12412 0.690562
699 1 0.812999 0.126318 0.690873
790 1 0.687037 0.0637855 0.752341
794 1 0.815486 0.0641048 0.754268
796 1 0.750975 0.0652244 0.814181
823 1 0.692757 0.121106 0.816474
825 1 0.753194 0.125212 0.755096
827 1 0.815816 0.128007 0.816598
958 1 0.936756 0.189067 0.87861
964 1 1.00518 0.307707 0.939031
994 1 0.064142 0.436486 0.873443
1691 1 0.808896 0.500912 0.694092
801 1 1.00115 0.122023 0.746438
644 1 0.996425 0.0659065 0.681497
672 1 0.87403 0.0674217 0.690164
703 1 0.93844 0.124577 0.689339
798 1 0.939566 0.0636997 0.746606
800 1 0.874424 0.0654601 0.818716
829 1 0.873557 0.128163 0.753664
831 1 0.933537 0.123992 0.815098
680 1 0.125712 0.183856 0.688334
707 1 0.0606057 0.247923 0.682792
712 1 0.124531 0.307174 0.686383
802 1 0.0583723 0.185823 0.745808
808 1 0.126394 0.188489 0.810823
834 1 0.064089 0.308091 0.748341
835 1 0.0554834 0.244574 0.806071
837 1 0.117743 0.244787 0.746616
840 1 0.120456 0.308784 0.807761
804 1 -0.00649249 0.184962 0.811227
836 1 0.000971131 0.316833 0.812033
833 1 0.999188 0.248977 0.750027
676 1 0.995967 0.181489 0.688514
684 1 0.24745 0.187238 0.685951
711 1 0.185722 0.24804 0.690427
715 1 0.312925 0.249333 0.688948
716 1 0.246378 0.313475 0.683468
806 1 0.186668 0.182163 0.750254
810 1 0.307533 0.192239 0.746639
812 1 0.249342 0.189313 0.807176
838 1 0.183117 0.30728 0.752601
839 1 0.189342 0.244879 0.812941
841 1 0.246804 0.250025 0.746706
842 1 0.32037 0.312974 0.747729
843 1 0.31631 0.254785 0.815557
844 1 0.254387 0.311896 0.810043
688 1 0.376831 0.187691 0.687436
719 1 0.442717 0.253548 0.689523
720 1 0.380616 0.314007 0.682196
814 1 0.436519 0.190436 0.752801
816 1 0.374504 0.188289 0.818978
845 1 0.376884 0.25271 0.745083
846 1 0.437692 0.313277 0.752375
847 1 0.435952 0.252671 0.821449
848 1 0.376395 0.315892 0.814558
724 1 0.498807 0.313279 0.688757
852 1 0.502062 0.314804 0.818081
692 1 0.505519 0.18841 0.687943
849 1 0.501978 0.251614 0.748763
820 1 0.497977 0.192851 0.810848
696 1 0.623583 0.186498 0.683228
723 1 0.564634 0.250542 0.691842
728 1 0.627713 0.310404 0.688208
818 1 0.563563 0.18663 0.75102
824 1 0.627747 0.18218 0.821927
850 1 0.561651 0.315859 0.747374
851 1 0.562338 0.249984 0.809234
853 1 0.629576 0.250992 0.753565
856 1 0.631019 0.313328 0.816407
700 1 0.744943 0.183953 0.695917
727 1 0.687955 0.251329 0.693922
731 1 0.807551 0.245063 0.690058
732 1 0.750494 0.318386 0.684729
822 1 0.684198 0.181005 0.753404
826 1 0.809777 0.194577 0.755911
828 1 0.748606 0.186788 0.810937
854 1 0.688762 0.316043 0.748139
855 1 0.692718 0.250314 0.81436
857 1 0.749115 0.254642 0.754153
858 1 0.811203 0.309358 0.749324
859 1 0.809979 0.247911 0.818871
860 1 0.746212 0.31264 0.812161
708 1 1.00125 0.312259 0.68958
704 1 0.87956 0.192165 0.690935
735 1 0.939809 0.250499 0.686428
736 1 0.874176 0.308407 0.68
830 1 0.936228 0.184718 0.753388
832 1 0.872394 0.189605 0.818815
861 1 0.872665 0.254661 0.746892
862 1 0.938158 0.309703 0.749341
863 1 0.938332 0.24954 0.813898
864 1 0.87309 0.304879 0.814245
1005 1 0.372345 0.376523 0.875702
739 1 0.066964 0.369716 0.686358
744 1 0.125845 0.437711 0.685311
866 1 0.0584013 0.440385 0.747779
867 1 0.0585787 0.377739 0.806291
869 1 0.125693 0.374197 0.743293
872 1 0.125656 0.434444 0.814931
865 1 0.997626 0.376724 0.747026
740 1 0.998862 0.436218 0.687237
743 1 0.186784 0.376839 0.686499
747 1 0.312185 0.36811 0.67958
748 1 0.251346 0.436798 0.68547
870 1 0.18667 0.435758 0.748506
871 1 0.185648 0.376315 0.804861
873 1 0.251924 0.372342 0.741554
874 1 0.316103 0.435145 0.746005
875 1 0.314533 0.376668 0.812755
876 1 0.253255 0.43484 0.807277
1020 1 0.757524 0.439004 0.93935
751 1 0.442692 0.376365 0.682044
752 1 0.377857 0.433119 0.681883
877 1 0.375791 0.375459 0.747118
878 1 0.440166 0.435229 0.748558
879 1 0.441228 0.376954 0.815383
880 1 0.377109 0.433026 0.818929
881 1 0.497237 0.377221 0.744318
884 1 0.502473 0.440294 0.814457
756 1 0.501644 0.437889 0.683695
755 1 0.562233 0.378585 0.686682
760 1 0.622389 0.443601 0.693733
882 1 0.561518 0.434401 0.751978
883 1 0.568747 0.373091 0.817475
885 1 0.623237 0.370562 0.749724
888 1 0.626202 0.441925 0.810593
1019 1 0.814265 0.375477 0.934915
759 1 0.683395 0.380932 0.687335
763 1 0.811626 0.377612 0.694129
764 1 0.741246 0.436623 0.68632
886 1 0.689556 0.438787 0.747395
887 1 0.685338 0.378282 0.812015
889 1 0.750683 0.374587 0.749568
890 1 0.809148 0.440894 0.75171
891 1 0.810737 0.373108 0.81345
892 1 0.751886 0.439707 0.81405
1018 1 0.8089 0.43857 0.87185
932 1 -0.00139067 0.188227 0.934265
960 1 0.873608 0.189582 0.939844
1008 1 0.378646 0.434393 0.942404
1007 1 0.438422 0.376476 0.935622
868 1 -0.00322461 0.429838 0.811978
1017 1 0.749348 0.375384 0.877974
767 1 0.931064 0.36753 0.687908
768 1 0.873003 0.432752 0.691938
893 1 0.872208 0.375758 0.757565
894 1 0.934229 0.438475 0.747136
895 1 0.937075 0.370027 0.810846
896 1 0.870936 0.441832 0.813234
1014 1 0.687353 0.441906 0.875553
1015 1 0.690772 0.378207 0.942622
1012 1 0.500155 0.440277 0.940036
898 1 0.0623248 0.0642602 0.879142
904 1 0.118934 0.0606588 0.942175
931 1 0.0649516 0.122943 0.937379
933 1 0.127568 0.128076 0.874174
929 1 0.0020116 0.127227 0.879112
900 1 0.000759682 0.0580404 0.937114
1006 1 0.442363 0.439766 0.876755
655 1 0.440283 -0.00440078 0.690931
902 1 0.186514 0.0599845 0.879195
906 1 0.312865 0.0577353 0.876842
908 1 0.250388 0.0619363 0.937411
935 1 0.187488 0.120401 0.936534
937 1 0.250578 0.128261 0.873995
939 1 0.306174 0.124009 0.940178
1013 1 0.631858 0.379579 0.877206
1004 1 0.248726 0.438826 0.941135
991 1 0.936328 0.24801 0.938436
998 1 0.192004 0.441633 0.873182
910 1 0.435063 0.0664207 0.877788
912 1 0.371204 0.0613356 0.938753
941 1 0.375281 0.124237 0.877438
943 1 0.435176 0.128495 0.940504
997 1 0.128259 0.373247 0.873797
999 1 0.183635 0.377579 0.93355
1000 1 0.124682 0.437692 0.937206
916 1 0.496021 0.0638796 0.93013
945 1 0.497796 0.131062 0.869381
914 1 0.562106 0.0621735 0.870541
920 1 0.621225 0.0627722 0.935179
947 1 0.555101 0.124235 0.933678
949 1 0.627007 0.123545 0.883042
1003 1 0.314062 0.373484 0.941452
1010 1 0.56777 0.436764 0.878561
918 1 0.685272 0.0617715 0.8761
922 1 0.809491 0.0668833 0.875016
924 1 0.750606 0.0587638 0.936149
951 1 0.686164 0.123329 0.943057
953 1 0.748621 0.128465 0.881093
955 1 0.80936 0.119788 0.943434
954 1 0.813262 0.182569 0.883106
1001 1 0.245843 0.374754 0.873702
992 1 0.875366 0.311137 0.935597
926 1 0.939654 0.0571989 0.874369
928 1 0.875721 0.0579646 0.933855
957 1 0.881501 0.125554 0.88184
959 1 0.940154 0.129154 0.937744
1002 1 0.312477 0.43945 0.879926
1016 1 0.631367 0.437664 0.941382
930 1 0.0631577 0.189292 0.872901
936 1 0.131946 0.18546 0.939778
962 1 0.0609175 0.316977 0.878404
963 1 0.0594037 0.246884 0.938557
965 1 0.121602 0.253437 0.874446
968 1 0.126396 0.315904 0.935129
961 1 0.00618091 0.253252 0.872069
1009 1 0.505048 0.374378 0.881498
659 1 0.564112 0.000313111 0.697867
934 1 0.187702 0.185483 0.877061
938 1 0.310193 0.18682 0.874728
940 1 0.248697 0.192445 0.940741
966 1 0.188006 0.315751 0.871711
967 1 0.185932 0.254188 0.937601
969 1 0.254067 0.250618 0.879714
970 1 0.313372 0.315896 0.880142
971 1 0.308971 0.253453 0.940975
972 1 0.248096 0.318922 0.93818
1665 1 1.00504 0.502219 0.627696
973 1 0.371827 0.246622 0.879983
944 1 0.367218 0.186894 0.940248
942 1 0.434586 0.185436 0.877877
976 1 0.372303 0.309884 0.937771
975 1 0.437151 0.251545 0.936483
974 1 0.436656 0.316681 0.878
980 1 0.499514 0.312311 0.940522
1011 1 0.569355 0.378616 0.94259
977 1 0.499196 0.252373 0.87663
948 1 0.501463 0.188216 0.941911
979 1 0.563463 0.256291 0.939447
978 1 0.568817 0.314161 0.87457
952 1 0.62445 0.190336 0.941478
946 1 0.560133 0.193137 0.877198
984 1 0.632297 0.312627 0.937261
981 1 0.622814 0.248405 0.874814
989 1 0.874215 0.245897 0.878404
995 1 0.0611582 0.379388 0.93988
985 1 0.743129 0.248928 0.880185
950 1 0.690116 0.18858 0.87746
983 1 0.68358 0.244777 0.946425
987 1 0.811481 0.250363 0.93999
956 1 0.750726 0.186112 0.940921
988 1 0.741936 0.312444 0.942305
982 1 0.687216 0.311794 0.87762
986 1 0.814728 0.308126 0.87866
1685 1 0.6295 0.493627 0.619179
1543 1 0.187167 0.501426 0.563635
643 1 0.0602964 0.00475864 0.684338
641 1 0.997968 0.00494186 0.623149
909 1 0.377731 -0.00170585 0.873262
1921 1 1.0057 0.496602 0.868729
1551 1 0.44039 0.495161 0.561508
1693 1 0.873414 0.494796 0.625838
667 1 0.812774 0.00453597 0.688845
1673 1 0.251885 0.497124 0.620386
797 1 0.874212 0.00243609 0.754186
1823 1 0.934529 0.499321 0.807704
1539 1 0.0615874 0.500951 0.55893
539 1 0.816023 0.0048805 0.562021
795 1 0.812928 0.00434707 0.818067
789 1 0.627697 0.00840436 0.753961
651 1 0.319954 0.0025621 0.687941
610 1 0.0589303 0.435337 0.506906
105 1 0.24089 0.374379 1.00066
569 1 0.751551 0.122976 0.503359
622 1 0.443689 0.432758 0.494647
1545 1 0.246696 0.501417 0.500115
45 1 0.371948 0.125041 1.00151
630 1 0.693003 0.438045 0.500933
106 1 0.308707 0.439522 1.00032
1557 1 0.624405 0.499578 0.50257
66 1 0.0629527 0.306414 1.00113
574 1 0.938259 0.188895 0.50279
617 1 0.251624 0.372984 0.498373
1549 1 0.382075 0.496172 0.497808
549 1 0.12002 0.124954 0.504357
109 1 0.377409 0.370399 0.999678
22 1 0.689833 0.0552659 0.996218
538 1 0.809879 0.0612325 0.501937
530 1 0.563769 0.0647628 0.496823
565 1 0.633057 0.120381 0.501727
534 1 0.684511 0.0590615 0.499053
94 1 0.940736 0.316053 0.996672
554 1 0.311841 0.18481 0.493428
605 1 0.869494 0.248031 0.503454
545 1 0.000836314 0.12669 0.50278
561 1 0.500815 0.130687 0.499463
562 1 0.562096 0.192935 0.501964
594 1 0.570494 0.310475 0.499598
70 1 0.187414 0.314215 0.995436
638 1 0.933817 0.441414 0.507004
613 1 0.122757 0.37728 0.507067
553 1 0.252168 0.122383 0.508927
529 1 0.498064 0.00196686 0.499332
598 1 0.688452 0.31158 0.505736
626 1 0.56153 0.432213 0.497876
589 1 0.377595 0.251464 0.500018
581 1 0.126411 0.253789 0.501567
597 1 0.632957 0.249846 0.506381
522 1 0.309035 0.0629851 0.498933
618 1 0.316785 0.43753 0.50408
25 1 0.749943 -0.00168218 0.997284
17 1 0.49784 0.000533077 0.998945
1032 1 0.121715 0.567841 0.0581706
1059 1 0.0616712 0.632312 0.0590649
1154 1 0.0668507 0.564922 0.128135
1189 1 0.12706 0.624978 0.125792
1303 1 0.686887 0.498546 0.314272
1036 1 0.250307 0.561842 0.0534581
1063 1 0.180995 0.626577 0.0572035
1067 1 0.310624 0.623542 0.0587446
1158 1 0.18258 0.560827 0.121246
1162 1 0.308894 0.561083 0.121626
1193 1 0.246429 0.621927 0.122397
1173 1 0.621947 0.505216 0.12766
1629 1 0.875252 0.754415 0.498359
1102 1 0.431371 0.811515 -0.00203503
1040 1 0.376473 0.561546 0.0713141
1071 1 0.436994 0.631784 0.0608481
1166 1 0.435664 0.574914 0.127145
1197 1 0.371056 0.634962 0.125361
1201 1 0.495674 0.624502 0.123397
1044 1 0.492512 0.562788 0.0683873
1047 1 0.68566 0.507362 0.0642785
517 1 0.121843 0.999391 0.500467
1546 1 0.316595 0.558534 0.50038
1048 1 0.618157 0.563253 0.0658057
1075 1 0.560056 0.619988 0.0637044
1170 1 0.553979 0.563105 0.130893
1205 1 0.620652 0.62695 0.125706
133 1 0.129687 0.996889 0.130186
1052 1 0.759816 0.570334 0.0602691
1079 1 0.686675 0.625583 0.0662786
1083 1 0.824588 0.627803 0.0653205
1174 1 0.690572 0.562662 0.132245
1178 1 0.813236 0.561508 0.119607
1209 1 0.751394 0.625113 0.12683
1281 1 1.00264 0.498659 0.24712
415 1 0.940064 1.00001 0.440506
1185 1 0.00563966 0.632158 0.125392
1028 1 0.00406876 0.566774 0.066571
1056 1 0.878185 0.564284 0.0647284
1087 1 0.937085 0.628354 0.0647682
1182 1 0.944624 0.559645 0.122583
1213 1 0.87754 0.629943 0.12905
1045 1 0.626538 0.502703 0.00472309
1042 1 0.557895 0.565848 -4.2701e-05
1085 1 0.879041 0.628087 -0.00349504
1064 1 0.120961 0.693895 0.0592766
1091 1 0.06427 0.753337 0.0686125
1096 1 0.127912 0.811978 0.0604893
1186 1 0.0636492 0.694901 0.124809
1218 1 0.0650339 0.816282 0.124318
1221 1 0.123373 0.753015 0.122607
1092 1 1.00294 0.817077 0.0630367
1217 1 -0.000805195 0.75495 0.126156
1060 1 0.00126228 0.690008 0.0632966
1586 1 0.563567 0.690822 0.496718
261 1 0.122902 1.00619 0.251651
1605 1 0.128644 0.749996 0.499853
1068 1 0.245016 0.686358 0.0632761
1095 1 0.188588 0.753489 0.0615379
1099 1 0.312404 0.750153 0.0625773
1100 1 0.257501 0.815589 0.0634385
1190 1 0.187525 0.684084 0.124199
1194 1 0.307564 0.686689 0.122865
1222 1 0.188318 0.817391 0.126886
1225 1 0.252275 0.748299 0.121161
1226 1 0.314937 0.808751 0.132515
1183 1 0.943527 0.503742 0.188071
159 1 0.934929 0.997384 0.19163
1577 1 0.249622 0.623964 0.497687
1072 1 0.374436 0.688799 0.0597274
1103 1 0.429683 0.748748 0.060813
1104 1 0.373338 0.815769 0.062906
1198 1 0.437623 0.686373 0.126556
1229 1 0.377494 0.745402 0.126998
1230 1 0.433103 0.815047 0.129973
1076 1 0.498948 0.690233 0.0619703
1283 1 0.060422 0.498188 0.311456
1233 1 0.498495 0.744624 0.129327
1108 1 0.488201 0.807763 0.0638636
1080 1 0.622932 0.686512 0.0619411
1107 1 0.559558 0.755266 0.0617867
1112 1 0.624624 0.812892 0.062648
1202 1 0.561138 0.683853 0.126457
1234 1 0.551942 0.809339 0.125371
1237 1 0.620598 0.751361 0.124696
1121 1 0.00222625 0.87474 -0.00380614
1653 1 0.621082 0.876555 0.502236
1084 1 0.751101 0.688458 0.0621534
1111 1 0.690671 0.746166 0.0587766
1115 1 0.808717 0.755329 0.0591742
1116 1 0.745896 0.811557 0.0679755
1206 1 0.689732 0.69046 0.126955
1210 1 0.812617 0.690122 0.122513
1238 1 0.685509 0.813125 0.123392
1241 1 0.746 0.747968 0.125197
1242 1 0.813708 0.814098 0.120523
1293 1 0.366437 0.501874 0.250438
1425 1 0.501839 0.498269 0.373598
1088 1 0.872866 0.690598 0.0618325
1119 1 0.94286 0.749331 0.0666618
1120 1 0.874339 0.812162 0.0577234
1214 1 0.939711 0.688254 0.124692
1245 1 0.875113 0.753353 0.127667
1246 1 0.934144 0.817553 0.124331
1307 1 0.817828 0.501377 0.311828
131 1 0.0615363 0.997756 0.186416
1077 1 0.628349 0.629333 0.0031508
1566 1 0.936575 0.564584 0.503887
1123 1 0.0644805 0.875521 0.0628198
1128 1 0.126806 0.937585 0.0666707
1250 1 0.0679056 0.935841 0.127669
1253 1 0.131115 0.875295 0.122201
1249 1 1.00515 0.879547 0.122952
1533 1 0.881335 0.873469 0.374849
1534 1 0.940824 0.939057 0.376195
1175 1 0.687935 0.504504 0.191872
1535 1 0.943041 0.875542 0.43515
1437 1 0.876973 0.500721 0.380491
413 1 0.879143 1.00133 0.379559
1127 1 0.186066 0.875013 0.0612147
1131 1 0.312193 0.878664 0.0588556
1132 1 0.248783 0.935129 0.0608904
1254 1 0.190455 0.934945 0.129257
1257 1 0.254604 0.872662 0.126508
1258 1 0.312866 0.938911 0.125445
1051 1 0.816155 0.49904 0.0629992
1536 1 0.87725 0.936287 0.440134
1135 1 0.438942 0.8733 0.0627346
1136 1 0.374587 0.937643 0.0655574
1261 1 0.370496 0.875887 0.127913
1262 1 0.439128 0.93388 0.125012
1140 1 0.507943 0.934898 0.0635214
1508 1 1.00509 0.935257 0.439047
1265 1 0.498689 0.875527 0.127143
1139 1 0.561208 0.868516 0.0677095
1144 1 0.618581 0.936126 0.0606493
1266 1 0.565271 0.937027 0.132392
1269 1 0.621961 0.872361 0.127206
3 1 0.0639591 0.991686 0.0643275
1309 1 0.882284 0.498878 0.251772
1574 1 0.192255 0.689824 0.499608
1291 1 0.313018 0.500621 0.318242
1143 1 0.681852 0.874426 0.0654438
1147 1 0.804714 0.877641 0.0584946
1148 1 0.746199 0.94011 0.0589346
1270 1 0.682049 0.935619 0.121647
1273 1 0.74752 0.878423 0.122561
1274 1 0.808199 0.942216 0.121547
279 1 0.68551 1.00126 0.312557
129 1 0.996903 0.993736 0.124855
1124 1 0.999152 0.937574 0.0561965
1151 1 0.936 0.872348 0.0578794
1152 1 0.870815 0.940113 0.0551809
1277 1 0.869429 0.872687 0.12394
1278 1 0.929386 0.939536 0.124844
1609 1 0.255309 0.747883 0.502076
1637 1 0.130453 0.873371 0.50203
1638 1 0.179995 0.93641 0.49511
1570 1 0.0573561 0.684387 0.498821
1526 1 0.687828 0.937728 0.371948
1160 1 0.126535 0.558021 0.185438
1187 1 0.0625849 0.627249 0.185935
1282 1 0.0668974 0.562065 0.248558
1288 1 0.12167 0.560324 0.312224
1315 1 0.0706615 0.624181 0.313929
1317 1 0.130958 0.62377 0.24505
1284 1 0.00583151 0.56208 0.311712
1313 1 0.007804 0.626392 0.246614
1156 1 0.00325178 0.560362 0.190504
1164 1 0.248346 0.562304 0.18593
1191 1 0.185545 0.615842 0.186141
1195 1 0.309533 0.629962 0.188631
1286 1 0.190109 0.559101 0.2482
1290 1 0.307518 0.566744 0.247822
1292 1 0.24783 0.559053 0.31428
1319 1 0.188528 0.626011 0.306953
1321 1 0.251783 0.628622 0.250871
1323 1 0.308972 0.626553 0.314718
1301 1 0.620955 0.500173 0.24822
1168 1 0.372323 0.568245 0.185107
1199 1 0.441574 0.629817 0.192618
1294 1 0.434733 0.563222 0.251903
1296 1 0.36959 0.566993 0.310323
1325 1 0.372018 0.626694 0.254867
1327 1 0.439468 0.624887 0.313214
1172 1 0.496074 0.560856 0.189455
1300 1 0.502 0.563502 0.312809
1329 1 0.504016 0.626479 0.254757
1176 1 0.619255 0.567381 0.185008
1203 1 0.559547 0.625154 0.190205
1298 1 0.56322 0.561931 0.247879
1304 1 0.627577 0.560903 0.313331
1331 1 0.561998 0.62292 0.317784
1333 1 0.621152 0.625519 0.254165
1527 1 0.688257 0.879298 0.439768
1529 1 0.756309 0.875139 0.382016
1180 1 0.752097 0.566632 0.189051
1207 1 0.685926 0.625369 0.191161
1211 1 0.81425 0.631948 0.191688
1302 1 0.682449 0.56371 0.250367
1306 1 0.816242 0.556228 0.250204
1308 1 0.750756 0.558764 0.315113
1335 1 0.688926 0.622243 0.320634
1337 1 0.744045 0.624396 0.255784
1339 1 0.811534 0.623805 0.312182
1562 1 0.816823 0.567625 0.502108
1530 1 0.813764 0.934873 0.378408
1589 1 0.623979 0.627296 0.499207
1184 1 0.8822 0.567041 0.183521
1215 1 0.944192 0.627492 0.188116
1310 1 0.942895 0.561307 0.2488
1312 1 0.871949 0.564178 0.317006
1341 1 0.879107 0.625579 0.25412
1343 1 0.947007 0.621961 0.307385
1192 1 0.131115 0.683966 0.183892
1219 1 0.0674389 0.754704 0.186592
1224 1 0.126161 0.813969 0.186523
1314 1 0.0680224 0.692986 0.243581
1320 1 0.126281 0.68697 0.30416
1346 1 0.0571668 0.810959 0.24972
1347 1 0.0632005 0.746613 0.308281
1349 1 0.129488 0.747931 0.249182
1352 1 0.127164 0.809826 0.314396
1188 1 0.00507888 0.690892 0.186988
1345 1 0.00119322 0.751227 0.244817
1196 1 0.247488 0.688214 0.19084
1223 1 0.18768 0.750946 0.182794
1227 1 0.309612 0.744724 0.186045
1228 1 0.251547 0.815544 0.184866
1318 1 0.186781 0.6868 0.246305
1322 1 0.309913 0.693617 0.253738
1324 1 0.248787 0.688404 0.313608
1350 1 0.185313 0.818744 0.245398
1351 1 0.194534 0.75138 0.306693
1353 1 0.247522 0.752039 0.24131
1354 1 0.311966 0.808804 0.250631
1355 1 0.318769 0.749812 0.319295
1356 1 0.255141 0.811673 0.309447
1200 1 0.373618 0.689974 0.191056
1231 1 0.438985 0.748685 0.189063
1232 1 0.374066 0.809357 0.188052
1326 1 0.442114 0.68661 0.252265
1328 1 0.368889 0.68516 0.313499
1357 1 0.37505 0.750256 0.2486
1358 1 0.435675 0.809763 0.249417
1359 1 0.436239 0.754246 0.309111
1360 1 0.373417 0.811788 0.316779
1332 1 0.500049 0.685031 0.309796
1364 1 0.493384 0.811419 0.314839
1236 1 0.493758 0.812526 0.18729
1361 1 0.50298 0.750074 0.254949
1204 1 0.500542 0.686042 0.193227
1208 1 0.626463 0.687101 0.1895
1235 1 0.561809 0.743136 0.190024
1240 1 0.624091 0.813931 0.190819
1330 1 0.558909 0.682731 0.254824
1336 1 0.621751 0.687577 0.317314
1362 1 0.561151 0.809282 0.24729
1363 1 0.560923 0.751605 0.313449
1365 1 0.620337 0.746647 0.25373
1368 1 0.619167 0.811617 0.312153
1212 1 0.749953 0.689576 0.190487
1239 1 0.684874 0.750061 0.19079
1243 1 0.810815 0.750069 0.180552
1244 1 0.740926 0.812283 0.185744
1334 1 0.681465 0.684111 0.258028
1338 1 0.813945 0.686986 0.255509
1340 1 0.750426 0.682372 0.313888
1366 1 0.68451 0.809603 0.250364
1367 1 0.68515 0.75078 0.317402
1369 1 0.749066 0.748293 0.249999
1370 1 0.809101 0.811979 0.24609
1371 1 0.808129 0.753293 0.311989
1372 1 0.745064 0.815517 0.312779
1220 1 0.997129 0.81781 0.184856
1348 1 -0.00222909 0.812497 0.310994
1316 1 1.00169 0.688033 0.313816
1216 1 0.884783 0.686724 0.188919
1247 1 0.938346 0.754136 0.184036
1248 1 0.870493 0.810986 0.185505
1342 1 0.940497 0.697332 0.24778
1344 1 0.87516 0.690957 0.313535
1373 1 0.876843 0.752874 0.246913
1374 1 0.937188 0.815144 0.245444
1375 1 0.934454 0.75563 0.311306
1376 1 0.876809 0.816196 0.314339
1531 1 0.815934 0.880372 0.438833
1251 1 0.063532 0.8739 0.186984
1256 1 0.121762 0.937233 0.196024
1378 1 0.0574043 0.936701 0.250249
1379 1 0.0597172 0.873829 0.316658
1381 1 0.116849 0.875623 0.252252
1384 1 0.125787 0.938801 0.310536
1380 1 1.00136 0.943533 0.315823
1252 1 -0.00719188 0.933372 0.188372
1377 1 0.000626505 0.874252 0.252552
1532 1 0.749315 0.937218 0.439146
521 1 0.251074 0.998802 0.499269
1171 1 0.559622 0.503346 0.185738
283 1 0.814911 0.999185 0.309557
1255 1 0.18963 0.877231 0.189236
1259 1 0.313979 0.873929 0.189178
1260 1 0.247089 0.940716 0.18969
1382 1 0.188948 0.939913 0.25003
1383 1 0.188957 0.88221 0.308799
1385 1 0.25237 0.873644 0.250073
1386 1 0.311225 0.936164 0.248267
1387 1 0.316593 0.877136 0.308029
1388 1 0.247721 0.938262 0.307538
1601 1 1.00287 0.745976 0.499002
1523 1 0.562289 0.877394 0.434541
1528 1 0.62368 0.936603 0.436911
1263 1 0.437507 0.871587 0.19179
1264 1 0.373388 0.935549 0.188737
1389 1 0.371978 0.870534 0.249594
1390 1 0.43236 0.932902 0.249249
1391 1 0.431744 0.87349 0.314904
1392 1 0.367343 0.943442 0.310988
1268 1 0.49613 0.942611 0.192611
1396 1 0.498309 0.940553 0.312688
1393 1 0.495566 0.873869 0.254638
1267 1 0.55879 0.875119 0.189005
1272 1 0.626573 0.938225 0.193183
1394 1 0.561531 0.936594 0.256259
1395 1 0.560652 0.877109 0.312856
1397 1 0.613699 0.872482 0.25069
1400 1 0.621341 0.936871 0.314367
1525 1 0.627109 0.876284 0.375568
1271 1 0.683787 0.874683 0.19115
1275 1 0.805979 0.87157 0.183801
1276 1 0.748843 0.937192 0.184868
1398 1 0.683795 0.93504 0.25202
1399 1 0.682418 0.868532 0.310272
1401 1 0.747778 0.8721 0.248649
1402 1 0.806683 0.933345 0.243771
1403 1 0.809781 0.870982 0.31444
1404 1 0.750575 0.942409 0.311777
1522 1 0.568476 0.938155 0.37469
1279 1 0.936667 0.879047 0.183324
1280 1 0.869915 0.934088 0.183769
1405 1 0.869442 0.876882 0.247866
1406 1 0.935316 0.934466 0.25651
1407 1 0.937645 0.878023 0.314223
1408 1 0.869478 0.930122 0.312236
1618 1 0.558346 0.818018 0.495463
409 1 0.751877 0.997656 0.376386
1305 1 0.751083 0.503607 0.25313
31 1 0.938918 0.993971 0.0595803
1521 1 0.499042 0.872796 0.378732
1410 1 0.0611685 0.558666 0.37236
1416 1 0.124895 0.55738 0.4352
1443 1 0.0585368 0.619816 0.436514
1445 1 0.128295 0.623593 0.376054
1412 1 -0.00511925 0.559094 0.434686
1518 1 0.429338 0.934582 0.374578
1520 1 0.368746 0.93849 0.434462
1420 1 0.249614 0.559223 0.440058
1451 1 0.310651 0.622121 0.437585
1449 1 0.254881 0.627728 0.377366
1447 1 0.190128 0.623913 0.431953
1414 1 0.182718 0.559378 0.375554
1418 1 0.309217 0.562676 0.377359
1287 1 0.185557 0.496251 0.315317
1513 1 0.24923 0.871408 0.377254
1519 1 0.438981 0.875852 0.437862
1453 1 0.371147 0.623254 0.373739
1424 1 0.376015 0.560756 0.435194
1455 1 0.440931 0.627496 0.434591
1422 1 0.434185 0.564586 0.375465
1457 1 0.499021 0.621627 0.375451
399 1 0.435781 0.998262 0.435986
285 1 0.873823 0.991237 0.249447
1515 1 0.309767 0.871154 0.438312
1428 1 0.499274 0.559046 0.436223
1461 1 0.627532 0.62128 0.377358
1426 1 0.561017 0.563377 0.380348
1432 1 0.624385 0.558574 0.439741
1459 1 0.564936 0.629283 0.433041
1516 1 0.243371 0.941488 0.436029
1439 1 0.936035 0.501341 0.442321
1465 1 0.749842 0.6271 0.376215
1467 1 0.818775 0.625488 0.43319
1430 1 0.688356 0.560756 0.377042
1434 1 0.812255 0.56429 0.371967
1436 1 0.752582 0.560079 0.43764
1463 1 0.6852 0.625221 0.434697
1441 1 0.00271455 0.622554 0.37461
1471 1 0.938708 0.63051 0.43707
1438 1 0.938892 0.566292 0.376081
1440 1 0.875538 0.565926 0.440249
1469 1 0.878442 0.629384 0.377523
1517 1 0.369907 0.873471 0.376734
1476 1 -0.00205133 0.813843 0.438686
1444 1 1.00295 0.69014 0.432799
1442 1 0.0594544 0.686268 0.376092
1480 1 0.125826 0.807929 0.433515
1448 1 0.122875 0.687167 0.439435
1474 1 0.0628083 0.803157 0.368584
1475 1 0.0626622 0.747058 0.435888
1477 1 0.127658 0.74637 0.374414
27 1 0.812492 0.996393 0.0573933
1514 1 0.304735 0.936493 0.374044
1483 1 0.313317 0.753826 0.439837
1484 1 0.251557 0.810212 0.433527
1478 1 0.188966 0.809655 0.373595
1481 1 0.253348 0.754507 0.37018
1482 1 0.314378 0.813887 0.374181
1479 1 0.186687 0.747945 0.43732
1450 1 0.312347 0.687888 0.375176
1452 1 0.246967 0.692478 0.433
1446 1 0.184662 0.685646 0.372736
387 1 0.059819 0.996559 0.444224
1510 1 0.189233 0.939003 0.371997
1485 1 0.373222 0.751334 0.381681
1487 1 0.439064 0.747618 0.438346
1488 1 0.374672 0.818554 0.43901
1454 1 0.433267 0.689598 0.374128
1486 1 0.432846 0.811075 0.375356
1456 1 0.370856 0.682 0.436742
1492 1 0.495949 0.809054 0.435028
157 1 0.86951 1.00007 0.122763
1511 1 0.186527 0.872679 0.433213
1554 1 0.562263 0.562083 0.498531
1460 1 0.50128 0.687409 0.433488
1490 1 0.564563 0.816574 0.377383
1464 1 0.627441 0.693265 0.433895
1496 1 0.626276 0.816663 0.44033
1493 1 0.621618 0.756969 0.376451
1491 1 0.560487 0.750884 0.436007
1489 1 0.495182 0.747261 0.374968
1458 1 0.56136 0.687997 0.373122
15 1 0.43886 0.99837 0.0661381
145 1 0.502061 0.998971 0.126287
1524 1 0.498596 0.942905 0.435549
1311 1 0.939826 0.505939 0.311161
1177 1 0.755036 0.503349 0.122794
1500 1 0.752654 0.816531 0.439862
1462 1 0.683735 0.689609 0.372878
1466 1 0.814883 0.695254 0.371277
1495 1 0.689798 0.751189 0.440071
1494 1 0.689714 0.817538 0.375928
1497 1 0.752119 0.75071 0.371818
1499 1 0.81757 0.753319 0.442051
1498 1 0.816359 0.816792 0.376829
1468 1 0.758943 0.689901 0.435008
141 1 0.37926 0.996956 0.125584
1501 1 0.875586 0.757677 0.374581
1502 1 0.937234 0.813755 0.377404
1472 1 0.879784 0.689667 0.435922
1503 1 0.940379 0.752745 0.437804
1504 1 0.874789 0.813964 0.44151
1473 1 0.998996 0.748867 0.377621
1470 1 0.935574 0.694226 0.375132
1027 1 0.0630402 0.505923 0.0647134
1505 1 0.00163839 0.865616 0.37522
1512 1 0.122327 0.93801 0.434384
1507 1 0.0651597 0.874576 0.433315
1506 1 0.0623006 0.935816 0.371439
1509 1 0.12684 0.872495 0.370074
23 1 0.681281 1.00332 0.0594739
1581 1 0.376718 0.620121 0.496259
1138 1 0.55985 0.939291 -0.00527113
1179 1 0.817582 0.505416 0.181763
1409 1 0.998398 0.498384 0.375285
143 1 0.434076 0.996829 0.186154
1169 1 0.491819 0.499414 0.128149
1617 1 0.507275 0.751725 0.5037
1181 1 0.877898 0.505283 0.122545
1035 1 0.303984 0.50144 0.0598503
139 1 0.311676 1.00027 0.191663
1435 1 0.814263 0.499071 0.435391
1558 1 0.688104 0.564027 0.494503
281 1 0.746327 0.998349 0.245585
7 1 0.187342 0.997578 0.0612022
401 1 0.500298 0.998467 0.375144
1041 1 0.497624 0.500507 0.0027068
149 1 0.622862 1.00579 0.125069
11 1 0.312364 0.994636 0.0589504
407 1 0.689776 0.998976 0.43672
1157 1 0.124453 0.499193 0.123292
1625 1 0.754245 0.751947 0.499718
1133 1 0.379333 0.87645 0.00433359
1630 1 0.939589 0.818684 0.497831
1602 1 0.0624248 0.80829 0.496981
1118 1 0.945462 0.813824 0.0028551
1606 1 0.191759 0.809705 0.495776
1578 1 0.314001 0.68385 0.496994
1654 1 0.68549 0.936014 0.501924
1590 1 0.690937 0.689704 0.496772
1597 1 0.874596 0.625219 0.502794
1649 1 0.501961 0.87894 0.497578
1086 1 0.943626 0.692214 -0.00210998
1050 1 0.821328 0.561013 0.00402726
1593 1 0.750899 0.625729 0.49321
1641 1 0.250538 0.871392 0.494655
1101 1 0.374015 0.751625 -0.00503186
1569 1 0.000439008 0.616241 0.495876
1117 1 0.874925 0.752791 0.00135664
1585 1 0.498932 0.624482 0.494888
1582 1 0.439247 0.691173 0.495928
1122 1 0.0613826 0.934935 -0.00411627
1594 1 0.813257 0.685659 0.499193
537 1 0.750446 1.003 0.502718
1062 1 0.185078 0.690943 -0.000245368
1090 1 0.0648525 0.816219 0.00915597
1134 1 0.439022 0.938354 0.00250492
1565 1 0.874667 0.507588 0.500929
1073 1 0.49627 0.630764 0.00113541
1034 1 0.316371 0.562343 0.00231866
1074 1 0.561711 0.69066 -0.00303642
1029 1 0.124776 0.501807 -0.0011992
1125 1 0.124445 0.875274 0.000452497
1097 1 0.249183 0.74711 0.00546758
1646 1 0.440972 0.934649 0.498205
1094 1 0.188996 0.811667 -0.00309176
1046 1 0.688175 0.5659 0.00168993
533 1 0.629569 0.999487 0.500569
1642 1 0.308214 0.936802 0.497924
1561 1 0.753588 0.508058 0.50157
1037 1 0.375002 0.497151 0.00203905
1544 1 0.133447 0.564449 0.562189
1571 1 0.0646566 0.620318 0.557352
1666 1 0.060153 0.562098 0.619907
1701 1 0.124849 0.618741 0.624188
1540 1 0.999379 0.564449 0.564047
1089 1 0.0092556 0.751752 0.999647
543 1 0.937682 1.00057 0.565929
1626 1 0.810945 0.819892 0.501923
1548 1 0.254086 0.561466 0.564433
1575 1 0.192699 0.622116 0.563968
1579 1 0.319346 0.620002 0.560847
1670 1 0.194754 0.56137 0.621976
1674 1 0.316945 0.558397 0.621505
1705 1 0.249582 0.62051 0.625002
665 1 0.750057 1.00272 0.627789
787 1 0.556509 0.999525 0.812686
1945 1 0.748936 0.508214 0.872051
1552 1 0.379749 0.561068 0.561698
1583 1 0.444072 0.628255 0.56081
1678 1 0.440255 0.557738 0.624733
1709 1 0.378092 0.623257 0.621349
1713 1 0.504357 0.627914 0.622961
1556 1 0.50313 0.565516 0.556891
5 1 0.121442 0.997591 1.0034
649 1 0.258422 1.00202 0.627106
1542 1 0.189079 0.563436 0.499143
1560 1 0.628424 0.563608 0.561023
1587 1 0.562177 0.627014 0.556307
1682 1 0.560294 0.562965 0.622431
1717 1 0.624671 0.623783 0.615322
2020 1 0.000863728 0.934337 0.938129
2045 1 0.875019 0.874272 0.876578
1126 1 0.185089 0.936479 0.998429
1110 1 0.689663 0.813515 1.00335
785 1 0.500901 0.997086 0.748589
1137 1 0.503442 0.872045 1.00092
2046 1 0.942066 0.930309 0.877995
1564 1 0.75395 0.570179 0.565234
1591 1 0.694451 0.628956 0.5575
1595 1 0.813989 0.628531 0.561198
1686 1 0.689409 0.561277 0.619258
1690 1 0.814444 0.563848 0.625658
1721 1 0.750695 0.625326 0.625561
1538 1 0.0584311 0.557696 0.497014
1559 1 0.69123 0.506397 0.552585
2047 1 0.932282 0.873175 0.935241
531 1 0.562015 0.996847 0.560289
1697 1 0.996887 0.627351 0.624838
1568 1 0.880656 0.559646 0.56448
1599 1 0.936122 0.63359 0.561708
1694 1 0.937673 0.564325 0.630537
1725 1 0.875527 0.631788 0.626377
1671 1 0.184229 0.500444 0.69033
1681 1 0.504575 0.501505 0.623992
917 1 0.623832 0.997916 0.878155
2048 1 0.874445 0.938379 0.932399
1576 1 0.125209 0.681267 0.560457
1603 1 0.0683837 0.751514 0.56336
1608 1 0.128158 0.815593 0.561894
1698 1 0.0656035 0.681206 0.621456
1730 1 0.064919 0.811165 0.625158
1733 1 0.127084 0.741582 0.621914
1572 1 0.000794183 0.685282 0.558283
1604 1 0.00350272 0.810224 0.561377
1811 1 0.562375 0.503329 0.812415
671 1 0.937286 0.99984 0.686299
1662 1 0.9407 0.934756 0.501228
1580 1 0.249634 0.688392 0.563032
1607 1 0.191583 0.751072 0.559473
1611 1 0.313616 0.748409 0.562293
1612 1 0.253374 0.809825 0.560585
1702 1 0.190176 0.688268 0.622708
1706 1 0.306041 0.688041 0.622683
1734 1 0.192714 0.812085 0.621464
1737 1 0.252952 0.749426 0.623536
1738 1 0.319707 0.805484 0.625552
1149 1 0.870459 0.878182 0.994714
1687 1 0.688264 0.500562 0.683708
1584 1 0.374766 0.688335 0.551848
1615 1 0.439381 0.749817 0.560173
1616 1 0.374465 0.810478 0.559825
1710 1 0.434519 0.684502 0.618618
1741 1 0.376268 0.746955 0.620297
1742 1 0.436238 0.807579 0.624285
1620 1 0.497616 0.812345 0.558024
799 1 0.943184 0.998446 0.813568
1588 1 0.499529 0.691119 0.558674
1745 1 0.500429 0.750488 0.623042
1592 1 0.626863 0.6922 0.56052
1619 1 0.563963 0.757713 0.560905
1624 1 0.630053 0.810659 0.562348
1714 1 0.562286 0.687974 0.622654
1746 1 0.565916 0.813237 0.627399
1749 1 0.624667 0.754438 0.621528
1596 1 0.751488 0.688853 0.55813
1623 1 0.694306 0.748452 0.560733
1627 1 0.80824 0.754493 0.561851
1628 1 0.753852 0.816664 0.564267
1718 1 0.684269 0.68848 0.623161
1722 1 0.806804 0.687811 0.624449
1750 1 0.691281 0.816123 0.623048
1753 1 0.743573 0.752031 0.626269
1754 1 0.812479 0.810152 0.624459
897 1 0.00395908 0.99722 0.876938
1058 1 0.0632007 0.688635 0.992157
1729 1 0.998461 0.742309 0.6236
1600 1 0.872136 0.690492 0.565375
1631 1 0.937396 0.749965 0.565498
1632 1 0.874713 0.816162 0.55887
1726 1 0.93298 0.68805 0.623701
1757 1 0.874326 0.752917 0.619809
1758 1 0.936213 0.81463 0.625085
907 1 0.314407 0.997684 0.936342
541 1 0.876224 0.996557 0.497835
1635 1 0.0633006 0.872804 0.564593
1640 1 0.124442 0.931566 0.564762
1762 1 0.0644993 0.940607 0.628306
1765 1 0.12297 0.87447 0.628351
1761 1 -0.00249914 0.871997 0.622248
783 1 0.437328 1.00319 0.80877
1807 1 0.438902 0.497491 0.812607
653 1 0.375685 0.997082 0.622285
1639 1 0.196307 0.873668 0.55734
1643 1 0.310891 0.872998 0.558296
1644 1 0.24813 0.937161 0.561104
1766 1 0.19348 0.933046 0.624211
1769 1 0.254392 0.871717 0.623237
1770 1 0.320446 0.931077 0.6243
2038 1 0.683238 0.938508 0.879905
911 1 0.433256 0.997959 0.931586
1813 1 0.626061 0.502571 0.751615
1541 1 0.130418 0.499241 0.502822
1647 1 0.440957 0.87592 0.557155
1648 1 0.375681 0.933441 0.555439
1773 1 0.379953 0.874255 0.623655
1774 1 0.438453 0.939619 0.625629
1652 1 0.49587 0.939341 0.560899
1777 1 0.498857 0.879322 0.627146
1931 1 0.312058 0.498006 0.935586
13 1 0.377136 0.996827 1.00082
1145 1 0.740595 0.878605 0.993973
1651 1 0.564603 0.874311 0.561279
1656 1 0.626993 0.940393 0.563673
1778 1 0.563274 0.93634 0.625095
1781 1 0.624357 0.876953 0.62632
1661 1 0.877753 0.881023 0.502267
1129 1 0.257532 0.873075 1.00029
901 1 0.123328 0.994839 0.876723
1655 1 0.684507 0.878361 0.566495
1659 1 0.809579 0.876813 0.563256
1660 1 0.750382 0.939681 0.564861
1782 1 0.686473 0.940953 0.625593
1785 1 0.7491 0.885203 0.629631
1786 1 0.817073 0.940061 0.623636
2039 1 0.686448 0.874831 0.933646
2041 1 0.746415 0.877557 0.870894
1636 1 0.00307385 0.938742 0.566645
1663 1 0.939682 0.872022 0.562957
1664 1 0.875413 0.93884 0.56465
1789 1 0.873105 0.876735 0.619914
1790 1 0.935873 0.939546 0.631653
2042 1 0.81398 0.94038 0.868366
1645 1 0.375277 0.877042 0.492675
1672 1 0.125742 0.567474 0.68905
1699 1 0.0650245 0.627186 0.685968
1794 1 0.0634767 0.558739 0.750008
1800 1 0.124067 0.558202 0.808437
1827 1 0.0638958 0.626834 0.816552
1829 1 0.126159 0.625509 0.751375
1796 1 0.00277203 0.557061 0.809037
1668 1 -0.000968911 0.563338 0.690794
1825 1 -0.0035692 0.623789 0.752513
2043 1 0.806807 0.875857 0.932465
1927 1 0.185365 0.496475 0.937041
1573 1 0.124059 0.62239 0.498264
1819 1 0.812783 0.500997 0.814928
1676 1 0.249844 0.559795 0.688387
1703 1 0.188081 0.626839 0.68375
1707 1 0.31741 0.618112 0.681576
1798 1 0.18593 0.558756 0.749725
1802 1 0.310471 0.558985 0.745502
1804 1 0.248855 0.563032 0.809221
1831 1 0.189847 0.623106 0.813037
1833 1 0.250761 0.623821 0.744863
1835 1 0.310986 0.623085 0.809597
1675 1 0.3068 0.500973 0.683339
1547 1 0.311837 0.502402 0.560293
2044 1 0.751878 0.939796 0.930179
1680 1 0.374414 0.556392 0.691071
1711 1 0.436455 0.626449 0.689386
1806 1 0.442528 0.557653 0.747232
1808 1 0.379384 0.55499 0.811114
1837 1 0.378694 0.625046 0.752976
1839 1 0.437135 0.622136 0.8165
1684 1 0.503678 0.567306 0.68819
1812 1 0.499077 0.562895 0.806877
645 1 0.126228 1.00006 0.624336
775 1 0.186714 1.00239 0.814147
925 1 0.883097 0.993309 0.87376
1841 1 0.498988 0.624418 0.74633
1688 1 0.627658 0.564116 0.683416
1715 1 0.568535 0.624808 0.68278
1810 1 0.562426 0.568602 0.751361
1816 1 0.626851 0.56362 0.815728
1843 1 0.560028 0.628879 0.813586
1845 1 0.62536 0.620504 0.749742
1669 1 0.120956 0.502144 0.623355
1692 1 0.750799 0.562636 0.685563
1719 1 0.685386 0.630651 0.686153
1723 1 0.810926 0.625225 0.687565
1814 1 0.68583 0.566877 0.750844
1818 1 0.809576 0.563716 0.753437
1820 1 0.745722 0.566661 0.81427
1847 1 0.684203 0.627273 0.812373
1849 1 0.746243 0.625179 0.749441
1851 1 0.812615 0.626917 0.811308
1817 1 0.745705 0.50449 0.751043
1696 1 0.873175 0.561119 0.691849
1727 1 0.934909 0.627762 0.686009
1822 1 0.931526 0.560455 0.749728
1824 1 0.873473 0.56339 0.809509
1853 1 0.875115 0.625077 0.751275
1855 1 0.935628 0.621296 0.812487
1704 1 0.128353 0.687419 0.687036
1731 1 0.0624458 0.749185 0.680977
1736 1 0.126492 0.809005 0.68228
1826 1 0.0626662 0.687756 0.748832
1832 1 0.126421 0.68845 0.810168
1858 1 0.0579624 0.806499 0.750453
1859 1 0.0606473 0.748772 0.809209
1861 1 0.13064 0.749438 0.748301
1864 1 0.124227 0.806204 0.809976
1700 1 0.995563 0.685984 0.6892
1860 1 0.995019 0.81191 0.809201
1732 1 1.00309 0.810711 0.686971
1708 1 0.250159 0.685853 0.680615
1735 1 0.190362 0.75191 0.684892
1739 1 0.315214 0.747058 0.68696
1740 1 0.25414 0.807774 0.683647
1830 1 0.190069 0.684259 0.74909
1834 1 0.311503 0.684859 0.742192
1836 1 0.249772 0.685546 0.811707
1862 1 0.191032 0.806741 0.74821
1863 1 0.192016 0.748192 0.810913
1865 1 0.252272 0.748536 0.747568
1866 1 0.314758 0.811125 0.74459
1867 1 0.315964 0.750692 0.812291
1868 1 0.250269 0.812055 0.811199
1712 1 0.375727 0.687755 0.68842
1743 1 0.439335 0.745449 0.685267
1744 1 0.379293 0.810855 0.689029
1838 1 0.436331 0.685491 0.750921
1840 1 0.375588 0.683401 0.813939
1869 1 0.37619 0.749875 0.753553
1870 1 0.437795 0.814525 0.751945
1871 1 0.439881 0.750127 0.812518
1872 1 0.372108 0.816812 0.809356
1844 1 0.496822 0.687781 0.810134
1876 1 0.501962 0.815038 0.815758
1748 1 0.497237 0.810692 0.687718
1716 1 0.503429 0.687379 0.685986
1873 1 0.495596 0.754803 0.751711
1720 1 0.623234 0.691168 0.683254
1747 1 0.561822 0.746678 0.688943
1752 1 0.62373 0.809945 0.690518
1842 1 0.564834 0.682752 0.748047
1848 1 0.624805 0.686352 0.809741
1874 1 0.561698 0.815739 0.750216
1875 1 0.558197 0.751414 0.811255
1877 1 0.620116 0.75156 0.752428
1880 1 0.624575 0.813017 0.809978
1724 1 0.749695 0.68847 0.685493
1751 1 0.692576 0.75171 0.691689
1755 1 0.812207 0.749685 0.689627
1756 1 0.749694 0.811715 0.685778
1846 1 0.682914 0.685384 0.746585
1850 1 0.808761 0.689406 0.748818
1852 1 0.750147 0.684627 0.809722
1878 1 0.687442 0.814716 0.75264
1879 1 0.688421 0.745465 0.805799
1881 1 0.750549 0.745269 0.755008
1882 1 0.814445 0.812236 0.749477
1883 1 0.814526 0.747811 0.811441
1884 1 0.750966 0.810878 0.813855
1857 1 1.00055 0.74676 0.748371
1828 1 0.999797 0.685621 0.814724
1728 1 0.874359 0.692927 0.685177
1759 1 0.935113 0.751386 0.685838
1760 1 0.878554 0.815465 0.689273
1854 1 0.937625 0.690272 0.756341
1856 1 0.874865 0.680569 0.812533
1885 1 0.876878 0.745312 0.744364
1886 1 0.936606 0.81289 0.744156
1887 1 0.935144 0.752766 0.809087
1888 1 0.874985 0.813923 0.810937
1054 1 0.933773 0.562501 1.00197
1935 1 0.436164 0.493586 0.943566
1763 1 0.0570808 0.872898 0.690979
1768 1 0.120113 0.940396 0.692057
1890 1 0.0599289 0.935941 0.752742
1891 1 0.0603491 0.874171 0.811264
1893 1 0.11836 0.869928 0.753907
1896 1 0.119479 0.936572 0.815576
1764 1 0.000587651 0.938981 0.686254
1892 1 0.997989 0.936065 0.809648
669 1 0.87525 1.00082 0.622063
1767 1 0.185409 0.876519 0.682263
1771 1 0.315855 0.868842 0.682275
1772 1 0.25333 0.93391 0.68384
1894 1 0.186512 0.940192 0.754582
1895 1 0.184396 0.874766 0.816194
1897 1 0.249595 0.875838 0.744818
1898 1 0.31729 0.936801 0.74917
1899 1 0.316197 0.874947 0.806292
1900 1 0.251744 0.939438 0.81398
2032 1 0.372291 0.935891 0.938567
793 1 0.752671 1.00292 0.75358
2031 1 0.440417 0.873806 0.938492
2036 1 0.498717 0.935052 0.936645
1775 1 0.436292 0.87185 0.682143
1776 1 0.372363 0.939642 0.682178
1901 1 0.377733 0.878103 0.742968
1902 1 0.434621 0.94068 0.748254
1903 1 0.440949 0.877744 0.808887
1904 1 0.374612 0.940379 0.810598
1780 1 0.498357 0.938675 0.685176
1908 1 0.497885 0.936358 0.81057
2034 1 0.561596 0.937942 0.875203
1905 1 0.499698 0.876385 0.748598
1779 1 0.560653 0.87373 0.684396
1784 1 0.624667 0.940247 0.68461
1906 1 0.561714 0.934762 0.743289
1907 1 0.561507 0.873982 0.810609
1909 1 0.621615 0.876934 0.74495
1912 1 0.624151 0.940617 0.807941
2022 1 0.189438 0.937087 0.873853
2035 1 0.563116 0.87774 0.940744
1783 1 0.683499 0.877728 0.687337
1787 1 0.816142 0.87639 0.685653
1788 1 0.749469 0.945155 0.689621
1910 1 0.683608 0.939472 0.748244
1911 1 0.68994 0.880196 0.809725
1913 1 0.750276 0.878997 0.745199
1914 1 0.812371 0.938209 0.750121
1915 1 0.808821 0.876404 0.811848
1916 1 0.751605 0.939809 0.811659
2040 1 0.624284 0.938812 0.936492
1889 1 0.998915 0.877016 0.749526
2037 1 0.627021 0.878236 0.869657
1791 1 0.943058 0.876104 0.683335
1792 1 0.872441 0.935864 0.692832
1917 1 0.874912 0.874786 0.750304
1918 1 0.936578 0.935364 0.747597
1919 1 0.935686 0.870484 0.812714
1920 1 0.876345 0.933042 0.814227
1922 1 0.0598053 0.559942 0.872448
1928 1 0.116202 0.562056 0.933603
1955 1 0.0585485 0.626865 0.93349
1957 1 0.121644 0.620798 0.87369
1953 1 0.998159 0.61874 0.871652
1613 1 0.372995 0.749493 0.494059
1683 1 0.562126 0.500085 0.685815
2033 1 0.504096 0.876593 0.87142
1069 1 0.374998 0.629647 0.997714
1963 1 0.311739 0.618649 0.94006
1961 1 0.259275 0.625926 0.873923
1932 1 0.252961 0.563199 0.939143
1959 1 0.187297 0.621825 0.942989
1926 1 0.193467 0.564888 0.874499
1930 1 0.317255 0.557799 0.877785
2025 1 0.253493 0.866269 0.874627
1936 1 0.377765 0.562829 0.938849
1967 1 0.433961 0.621471 0.940017
1934 1 0.435654 0.562077 0.881666
1965 1 0.375511 0.621949 0.878579
1969 1 0.503433 0.621496 0.873674
1951 1 0.935487 0.4973 0.935862
1065 1 0.244818 0.629342 1.00228
1142 1 0.679744 0.940952 1.00109
1109 1 0.624767 0.747934 1.00184
1940 1 0.497119 0.56007 0.934872
1938 1 0.56431 0.562656 0.881153
1944 1 0.627597 0.567574 0.939445
1971 1 0.561067 0.628287 0.944055
1973 1 0.620562 0.62835 0.875667
1 1 0.00329005 0.994882 0.999917
1106 1 0.559736 0.809859 0.998724
1977 1 0.745861 0.62926 0.871193
1975 1 0.688323 0.625623 0.932612
1948 1 0.759051 0.568097 0.937553
1979 1 0.811881 0.626106 0.938558
1942 1 0.683962 0.56681 0.873431
1946 1 0.813183 0.571473 0.874804
1082 1 0.81224 0.69003 1.00469
919 1 0.687864 0.994137 0.936215
1924 1 0.995052 0.559324 0.934904
1952 1 0.874799 0.562317 0.937959
1981 1 0.882694 0.621163 0.876466
1983 1 0.937337 0.627719 0.934794
1950 1 0.942886 0.557449 0.870673
1821 1 0.873224 0.49879 0.7471
1960 1 0.126522 0.687872 0.929854
1986 1 0.0632014 0.809921 0.877494
1987 1 0.0628483 0.746406 0.930625
1989 1 0.126333 0.745166 0.873321
1992 1 0.128642 0.811757 0.937595
1954 1 0.0562704 0.691498 0.870361
1956 1 0.998305 0.689523 0.933859
1555 1 0.56407 0.502544 0.559914
2023 1 0.188958 0.874097 0.938295
1993 1 0.243855 0.744864 0.876653
1996 1 0.245077 0.806083 0.932613
1964 1 0.242984 0.689504 0.93746
1990 1 0.183286 0.812189 0.874878
1991 1 0.183763 0.750015 0.932907
1962 1 0.308427 0.690318 0.877091
1958 1 0.186886 0.681878 0.871247
1995 1 0.309234 0.751481 0.939355
1994 1 0.313128 0.806002 0.871636
1999 1 0.441324 0.754029 0.936749
1997 1 0.383068 0.748423 0.873873
1998 1 0.437888 0.811204 0.873891
2000 1 0.373983 0.815435 0.933359
1966 1 0.434238 0.688331 0.878521
1968 1 0.369423 0.689442 0.933577
2004 1 0.503045 0.815979 0.934707
2001 1 0.500555 0.748847 0.874876
1972 1 0.502321 0.69202 0.932432
2028 1 0.248705 0.927938 0.933591
1537 1 -0.00274391 0.500186 0.501767
2026 1 0.313428 0.940455 0.869253
2030 1 0.442086 0.935365 0.875314
1970 1 0.561659 0.690496 0.873941
2005 1 0.623243 0.749278 0.868146
2002 1 0.566664 0.816006 0.873047
1976 1 0.624508 0.690296 0.934492
2003 1 0.564927 0.75646 0.934028
2008 1 0.627931 0.81462 0.929203
2029 1 0.375035 0.878642 0.870559
2027 1 0.311085 0.868752 0.933349
2009 1 0.749594 0.752537 0.877064
2006 1 0.689335 0.81297 0.869296
1974 1 0.683598 0.690973 0.873378
1978 1 0.807354 0.690828 0.877604
2007 1 0.684895 0.751344 0.940095
2012 1 0.742412 0.812389 0.93529
2011 1 0.810711 0.751031 0.941425
1980 1 0.751219 0.685419 0.937169
2010 1 0.815091 0.808893 0.871499
1939 1 0.559919 0.496837 0.944788
1985 1 0.99722 0.751333 0.874381
2013 1 0.871301 0.746353 0.87604
2015 1 0.934167 0.751994 0.939537
2016 1 0.870999 0.810128 0.939054
2014 1 0.935759 0.814163 0.874564
1982 1 0.933062 0.684558 0.871836
1984 1 0.870724 0.687759 0.937976
1988 1 -0.00323919 0.810109 0.937338
2024 1 0.125453 0.929988 0.936786
2019 1 0.061587 0.871006 0.939559
2017 1 0.996311 0.868225 0.880542
2021 1 0.123169 0.869693 0.878729
2018 1 0.0605042 0.928374 0.873854
1677 1 0.375135 0.498685 0.627513
1026 1 0.0583489 0.559385 1.00121
899 1 0.0672077 0.992376 0.935996
903 1 0.190662 0.995224 0.937853
1657 1 0.749682 0.878825 0.503697
1114 1 0.802834 0.818563 0.998536
905 1 0.253656 0.999949 0.877429
1949 1 0.875962 0.504327 0.873764
1801 1 0.246156 0.496629 0.750929
1815 1 0.687592 0.499882 0.811973
1081 1 0.750845 0.630232 1.00522
657 1 0.508139 1.00053 0.625324
1947 1 0.820622 0.502124 0.940751
647 1 0.187213 1.00075 0.688299
1943 1 0.687174 0.506906 0.938086
1658 1 0.809314 0.938185 0.506405
791 1 0.687918 0.998292 0.814542
1025 1 0.99635 0.501498 0.99953
1141 1 0.626297 0.866976 0.996425
927 1 0.942169 0.997789 0.93717
1057 1 0.997483 0.616952 0.998874
527 1 0.440011 1.002 0.56236
1078 1 0.688072 0.689488 0.994851
1093 1 0.12574 0.754405 0.999195
1070 1 0.441225 0.69082 0.995138
1598 1 0.936816 0.694074 0.498993
1038 1 0.437533 0.561805 1.00167
1066 1 0.309826 0.690516 0.998251
1098 1 0.314916 0.814412 1.0003
9 1 0.25033 0.995246 0.996732
1650 1 0.558985 0.933986 0.499585
1610 1 0.314395 0.812355 0.501407
1621 1 0.626644 0.755709 0.494251
1130 1 0.308346 0.936685 0.993011
1614 1 0.433756 0.810042 0.500739
1033 1 0.247501 0.49737 0.999418
1105 1 0.499805 0.748177 0.997368
1113 1 0.748941 0.75063 0.997297
1622 1 0.687039 0.814739 0.507809
1049 1 0.754615 0.506527 1.00246
1633 1 0.00583256 0.87585 0.502233
1030 1 0.183248 0.557276 0.9963
513 1 0.998055 0.996316 0.504671
1061 1 0.117877 0.629023 0.989176
1634 1 0.0697254 0.934708 0.50406
1150 1 0.936624 0.934626 0.992364
1550 1 0.43939 0.559519 0.49382
1146 1 0.81007 0.93828 0.991674
525 1 0.372889 0.995584 0.499587
| [
"[email protected]"
] | |
04c9978ad6a95cfed263e81ffc0cdeaba8a93b6c | ab460d3c0c3cbc4bd45542caea46fed8b1ee8c26 | /dprs/common/sftp/PySFTPAuthException.py | a6d9b4cf620bb2d34ac77e41957792eefe8c126a | [
"Unlicense"
] | permissive | sone777/automl-dprs | 8c7f977402f6819565c45acd1cb27d8d53c40144 | 63572d1877079d8390b0e4a3153edf470056acf0 | refs/heads/main | 2023-09-03T21:54:43.440111 | 2021-11-02T14:44:35 | 2021-11-02T14:44:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 288 | py | # -*- coding: utf-8 -*-
# Author : Jin Kim
# e-mail : [email protected]
# Powered by Seculayer © 2020 AI Service Model Team, R&D Center.
class PySFTPAuthException(Exception):
def __str__(self):
return "[ERROR-C0001] Authentication failed. check username and password!"
| [
"[email protected]"
] | |
5406a0bd1a39311a7a0f09d7800aa9d20636919f | c631e9756210bab774afda2b228853cb93ae28fe | /src/test/test_trainer_attention.py | 5e4bbec6ab8ba0a3a905e64b3e3157bbcaafa0c8 | [] | no_license | AIRob/pytorch-chat-bot | 9a9af2078ef4ee6b5ce5a10a75977fb0b5adfe6a | 1b604f9fecee70e519a930525afaa83facbfaf68 | refs/heads/master | 2020-03-27T10:00:35.117537 | 2017-12-09T01:38:40 | 2017-12-09T01:38:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,541 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
from unittest import TestCase
from data.data_loader_attention import DataLoaderAttention
from models.encoder import Encoder
from models.decoder import Decoder
from models.trainer import Trainer
class TestTrainerAttention(TestCase):
def test_train_method(self):
file_name = 'test/test_data/attention_test.txt'
fine_tune_model_name = '../models/glove_model_40.pth'
self.test_data_loader_attention = DataLoaderAttention(file_name=file_name)
self.test_data_loader_attention.load_data()
source2index, index2source, target2index, index2target, train_data = \
self.test_data_loader_attention.load_data()
EMBEDDING_SIZE = 50
HIDDEN_SIZE = 32
encoder = Encoder(len(source2index), EMBEDDING_SIZE, HIDDEN_SIZE, 3, True)
decoder = Decoder(len(target2index), EMBEDDING_SIZE, HIDDEN_SIZE*2)
self.trainer = Trainer(
fine_tune_model=fine_tune_model_name
)
self.trainer.train_attention(train_data=train_data,
source2index=source2index,
target2index=target2index,
index2source=index2source,
index2target=index2target,
encoder_model=encoder,
decoder_model=decoder,
)
| [
"[email protected]"
] | |
8403194c971606033bb11b869b9d4c323b5903ff | 2e00546708761532e0081dc9be928b58307c5941 | /setup.py | 6f30596ede067a7daf4e98a7a4a82ac3164c7708 | [
"BSD-3-Clause"
] | permissive | gijs/bulbs | 5f16b9d748face55f514f73c849745af91a8bd97 | 650e03d1ee635d0d8f40557f4697b3a85b88cdff | refs/heads/master | 2021-01-18T06:23:04.496132 | 2011-07-15T15:00:49 | 2011-07-15T15:00:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,726 | py | """
Bulbs
-----
Bulbs is a Python persistence framework for graph databases that
connects to Rexster.
"""
from setuptools import Command, setup
class run_audit(Command):
"""Audits source code using PyFlakes for following issues:
- Names which are used but not defined or used before they are defined.
- Names which are redefined without having been used.
"""
description = "Audit source code with PyFlakes"
user_options = []
def initialize_options(self):
all = None
def finalize_options(self):
pass
def run(self):
import os, sys
try:
import pyflakes.scripts.pyflakes as flakes
except ImportError:
print "Audit requires PyFlakes installed in your system."""
sys.exit(-1)
dirs = ['bulbs', 'tests']
# Add example directories
#for dir in ['blog',]:
# dirs.append(os.path.join('examples', dir))
# TODO: Add test subdirectories
warns = 0
for dir in dirs:
for filename in os.listdir(dir):
if filename.endswith('.py') and filename != '__init__.py':
warns += flakes.checkPath(os.path.join(dir, filename))
if warns > 0:
print ("Audit finished with total %d warnings." % warns)
else:
print ("No problems found in sourcecode.")
def run_tests():
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), 'tests'))
from bulbs_tests import suite
return suite()
setup (
name = 'Bulbs',
version = '0.2-dev',
url = 'http://bulbflow.com',
license = 'BSD',
author = 'James Thornton',
author_email = '[email protected]',
description = 'A Python persistence framework for graph databases that '
'connects to Rexster.',
long_description = __doc__,
keywords = "graph database DB persistence framework rexster gremlin",
packages = ['bulbs'],
zip_safe=False,
platforms='any',
install_requires=[
'httplib2>=0.7.1',
'simplejson>=2.1.6',
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Topic :: Database",
"Topic :: Database :: Front-Ends",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Distributed Computing",
],
cmdclass={'audit': run_audit},
test_suite='__main__.run_tests'
)
| [
"[email protected]"
] | |
2e6261677ddc3501e9d60c2a0868e8ae1938e26e | f33e2e9e10a7c8a5ecc9997f86548bad071ce33e | /alerta/app/exceptions.py | 6c0b0caccf525a16c4431797256c413948898f77 | [
"Apache-2.0"
] | permissive | sasha-astiadi/alerta | 01f1136adbfc26f79935c1c44e9ca3d49efd6f00 | f9a33f50af562e5d0a470e1091e9d696d76558f4 | refs/heads/master | 2023-03-16T10:35:42.300274 | 2018-01-23T14:06:42 | 2018-01-23T14:06:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 431 | py |
class AlertaException(IOError):
pass
class RejectException(AlertaException):
"""The alert was rejected because the format did not meet the required policy."""
pass
class RateLimit(AlertaException):
"""Too many alerts have been received for a resource or from an origin."""
pass
class BlackoutPeriod(AlertaException):
"""Alert was not processed becauese it was sent during a blackout period."""
pass
| [
"[email protected]"
] | |
b71941a91b5406892fc0962d46ddbf6b15406fb4 | 64d923ab490341af97c4e7f6d91bf0e6ccefdf4b | /tensorforce/core/networks/auto.py | 6a445b051b06ae683e4435e6f34e5c608037ef5b | [
"Apache-2.0"
] | permissive | tensorforce/tensorforce | 38d458fedeeaa481adf083397829cea434d020cd | 1bf4c3abb471062fb66f9fe52852437756fd527b | refs/heads/master | 2023-08-17T17:35:34.578444 | 2023-08-14T20:14:08 | 2023-08-14T20:14:08 | 85,491,050 | 1,312 | 246 | Apache-2.0 | 2023-08-14T20:14:10 | 2017-03-19T16:24:22 | Python | UTF-8 | Python | false | false | 7,625 | py | # Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import logging
from tensorforce import TensorforceError
from tensorforce.core.networks import LayeredNetwork
class AutoNetwork(LayeredNetwork):
"""
Network whose architecture is automatically configured based on input types and shapes,
offering high-level customization (specification key: `auto`).
Args:
size (int > 0): Layer size, before concatenation if multiple states
(<span style="color:#00C000"><b>default</b></span>: 64).
depth (int > 0): Number of layers per state, before concatenation if multiple states
(<span style="color:#00C000"><b>default</b></span>: 2).
final_size (int > 0): Layer size after concatenation if multiple states
(<span style="color:#00C000"><b>default</b></span>: layer size).
final_depth (int > 0): Number of layers after concatenation if multiple states
(<span style="color:#00C000"><b>default</b></span>: 1).
rnn (false | [parameter](../modules/parameters.html), int >= 0): Whether to add an LSTM cell
with internal state as last layer, and if so, horizon of the LSTM for truncated
backpropagation through time
(<span style="color:#00C000"><b>default</b></span>: false).
device (string): Device name
(<span style="color:#00C000"><b>default</b></span>: inherit value of parent module).
l2_regularization (float >= 0.0): Scalar controlling L2 regularization
(<span style="color:#00C000"><b>default</b></span>: inherit value of parent module).
name (string): <span style="color:#0000C0"><b>internal use</b></span>.
inputs_spec (specification): <span style="color:#0000C0"><b>internal use</b></span>.
outputs (iter[string]): <span style="color:#0000C0"><b>internal use</b></span>.
"""
def __init__(
self, *, size=64, depth=2, final_size=None, final_depth=1, rnn=False, device=None,
l2_regularization=None, name=None, inputs_spec=None, outputs=None,
# Deprecated
internal_rnn=None
):
if internal_rnn is not None:
raise TensorforceError.deprecated(
name='AutoNetwork', argument='internal_rnn', replacement='rnn'
)
if len(inputs_spec) == 1:
if final_size is not None:
raise TensorforceError.invalid(
name='AutoNetwork', argument='final_size', condition='input size = 1'
)
if final_depth is not None and final_depth != 1:
raise TensorforceError.invalid(
name='AutoNetwork', argument='final_depth', condition='input size = 1'
)
if len(inputs_spec) > 8:
logging.warning("Large number of state components {} which may cause poor performance, "
"consider merging components where possible.".format(len(inputs_spec)))
if outputs is not None:
raise TensorforceError.invalid(
name='policy', argument='single_output', condition='AutoNetwork'
)
if final_size is None:
final_size = size
if final_depth is None:
final_depth = 0
layers = list()
for input_name, spec in inputs_spec.items():
if len(inputs_spec) == 1:
state_layers = layers
else:
state_layers = list()
layers.append(state_layers)
# Retrieve input state
if input_name is None:
prefix = ''
else:
prefix = input_name + '_'
state_layers.append(dict(
type='retrieve', name=(prefix + 'retrieve'), tensors=(input_name,)
))
# Embed bool and int states
requires_embedding = (spec.type == 'bool' or spec.type == 'int')
if spec.type == 'int' and spec.num_values is None:
if input_name is None:
raise TensorforceError.required(
name='state', argument='num_values', condition='state type is int'
)
else:
raise TensorforceError.required(
name=(input_name + ' state'), argument='num_values',
condition='state type is int'
)
if requires_embedding:
state_layers.append(dict(
type='embedding', name=(prefix + 'embedding'), size=size
))
# Shape-specific layer type
if spec.rank == 1 - requires_embedding:
layer = 'dense'
elif spec.rank == 2 - requires_embedding:
layer = 'conv1d'
elif spec.rank == 3 - requires_embedding:
layer = 'conv2d'
elif spec.rank == 0:
state_layers.append(dict(type='flatten', name=(prefix + 'flatten')))
layer = 'dense'
else:
raise TensorforceError.value(
name='AutoNetwork', argument='input rank', value=spec.rank, hint='>= 3'
)
# Repeat layer according to depth (one less if embedded)
for n in range(depth - requires_embedding):
state_layers.append(dict(
type=layer, name='{}{}{}'.format(prefix, layer, n), size=size
))
# Max pool if rank greater than one
if spec.rank > 1 - requires_embedding:
state_layers.append(dict(
type='pooling', name=(prefix + 'pooling'), reduction='max'
))
# Register state-specific embedding
if input_name is not None:
state_layers.append(dict(
type='register', name=(prefix + 'register'), tensor=(input_name + '-embedding')
))
# Final combined layers
if len(inputs_spec) == 1:
final_layers = layers
else:
final_layers = list()
layers.append(final_layers)
# Retrieve state-specific embeddings
final_layers.append(dict(
type='retrieve', name='retrieve',
tensors=tuple(input_name + '-embedding' for input_name in inputs_spec),
aggregation='concat'
))
# Repeat layer according to depth
for n in range(final_depth):
final_layers.append(dict(type='dense', name=('dense' + str(n)), size=final_size))
# Rnn
if rnn is not None and rnn is not False:
final_layers.append(dict(type='lstm', name='lstm', size=final_size, horizon=rnn))
super().__init__(
layers=layers, device=device, l2_regularization=l2_regularization, name=name,
inputs_spec=inputs_spec, outputs=outputs
)
| [
"[email protected]"
] | |
4aadfcd20a040ed6e5cbe84affd38b0320fa6928 | e12385c85e41d98bc3104f3e4dde22025a0b6365 | /m5stack-u105/examples/test_saw.py | f4472bdcdd32643ae248bca4c8a0e8d2eb67553a | [] | no_license | mchobby/esp8266-upy | 6ee046856ec03c900ebde594967dd50c5f0a8e21 | 75184da49e8578315a26bc42d9c3816ae5d5afe8 | refs/heads/master | 2023-08-04T15:11:03.031121 | 2023-07-27T15:43:08 | 2023-07-27T15:43:08 | 72,998,023 | 47 | 30 | null | 2021-06-20T16:12:59 | 2016-11-06T15:00:57 | Python | UTF-8 | Python | false | false | 515 | py | """
Test the MicroPython driver for M5Stack U105, DDS unit (AD9833), I2C grove.
Set SAWTOOTH signal output (this have fixed frequency)
* Author(s):
30 may 2021: Meurisse D. (shop.mchobby.be) - Initial Writing
"""
from machine import I2C
from mdds import *
from time import sleep
# Pico - I2C(0) - sda=GP8, scl=GP9
i2c = I2C(0)
# M5Stack core
# i2c = I2C( sda=Pin(21), scl=Pin(22) )
dds = DDS(i2c)
# Generates the SAW TOOTH signal at 55.9Hz (fixed frequency)
dds.quick_out( SAWTOOTH_MODE, freq=1, phase=0 )
| [
"[email protected]"
] | |
d04e9de9a1c3e8805f81d233500ea425bbc2a27d | 55646e56d6bb31ae0913eb71879f49efdfaf904f | /scribbli/profiles/constants.py | 1dc3127b96a3d1fc9dcffb567188491d639a6e3d | [] | no_license | jacobbridges/scribbli-mvp | 2d8851aba018b54431af0eb8cb030d02d35f173f | c24f2f1a2a19480a6b5f69ffbcccf0269d156140 | refs/heads/master | 2023-02-22T11:37:12.239845 | 2021-06-17T04:10:30 | 2021-06-17T04:10:30 | 156,637,826 | 0 | 0 | null | 2023-02-15T20:18:03 | 2018-11-08T02:20:53 | Python | UTF-8 | Python | false | false | 328 | py | class RoleChoices(object):
Guest = 0
User = 1
Moderator = 2
Admin = 3
@staticmethod
def as_choices():
return (
(RoleChoices.Guest, "Guest"),
(RoleChoices.User, "User"),
(RoleChoices.Moderator, "Moderator"),
(RoleChoices.Admin, "Admin"),
)
| [
"[email protected]"
] | |
ba426f1e79cb391d274343cd87e1ffbf76f2fa37 | 1fc6750d4553b1c7c81837ec1855377f444dacdd | /Test/pigLatin/__init__.py | 9cf86a56f5cbda3cf701a8ef4cf627a73908d109 | [] | no_license | Yasaman1997/My_Python_Training | a6234c86ef911a366e02ce0d0ed177a0a68157d5 | 11d0496ba97d97f16a3d168aacdda5a6e47abcf7 | refs/heads/master | 2023-03-19T08:33:42.057127 | 2021-03-16T07:43:57 | 2021-03-16T07:43:57 | 91,189,940 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | def Pig_Latin(s):
s = raw_input( "input a word")
while(s!=null):
print s+s(0) + "ay"
| [
"[email protected]"
] | |
2bfe0ce34f0883cb0a19b9e1ddc4a134e88153f8 | bbea9b1f64284c9ca95d9f72f35e06aa39522c67 | /Scripts/plot_MS-FIGURE_4b_v2.py | 179017277abe54d6e9bf27d6a766bc9dfc223aaa | [
"MIT"
] | permissive | zmlabe/ModelBiasesANN | 1e70c150bd8897fa5fb822daf8ffad0ee581c5f1 | cece4a4b01ca1950f73c4d23fb379458778c221e | refs/heads/main | 2023-05-23T06:05:23.826345 | 2022-07-22T18:36:27 | 2022-07-22T18:36:27 | 339,145,668 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,625 | py | """
Script to plot figure 4b
Author : Zachary M. Labe
Date : 12 July 2021
Version : 2
"""
### Import packages
import sys
import matplotlib.pyplot as plt
import matplotlib.colors as c
import numpy as np
import palettable.cubehelix as cm
import palettable.scientific.sequential as sss
import palettable.cartocolors.qualitative as cc
import cmocean as cmocean
import cmasher as cmr
import calc_Utilities as UT
import scipy.stats as sts
### Plotting defaults
plt.rc('text',usetex=True)
plt.rc('font',**{'family':'sans-serif','sans-serif':['Avant Garde']})
### Set parameters
directorydata = '/Users/zlabe/Documents/Research/ModelComparison/Data/MSFigures_v2/'
directoryfigure = '/Users/zlabe/Desktop/ModelComparison_v1/MSFigures_v2/'
variablesall = ['T2M']
yearsall = np.arange(1950,2019+1,1)
allDataLabels = ['CanESM2','MPI','CSIRO-MK3.6','EC-EARTH','GFDL-CM3','GFDL-ESM2M','LENS','MM-Mean']
letters = ["a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p"]
### Read in frequency data
globef = np.load(directorydata + 'CountingIterations_%s.npz' % ('SMILEGlobe'))
arcticf = np.load(directorydata + 'CountingIterations_%s.npz' % ('LowerArctic'))
gmeanff = globef['mmean']
ggfdlff = globef['gfdlcm']
ameanff = arcticf['mmean']
agfdlff = arcticf['gfdlcm']
###############################################################################
###############################################################################
###############################################################################
def adjust_spines(ax, spines):
for loc, spine in ax.spines.items():
if loc in spines:
spine.set_position(('outward', 5))
else:
spine.set_color('none')
if 'left' in spines:
ax.yaxis.set_ticks_position('left')
else:
ax.yaxis.set_ticks([])
if 'bottom' in spines:
ax.xaxis.set_ticks_position('bottom')
else:
ax.xaxis.set_ticks([])
### Begin plot
fig = plt.figure(figsize=(8,6))
ax = plt.subplot(211)
adjust_spines(ax, ['left', 'bottom'])
ax.spines['top'].set_color('none')
ax.spines['right'].set_color('none')
ax.spines['left'].set_color('dimgrey')
ax.spines['bottom'].set_color('dimgrey')
ax.spines['left'].set_linewidth(2)
ax.spines['bottom'].set_linewidth(2)
ax.tick_params('both',length=4,width=2,which='major',color='dimgrey')
ax.yaxis.grid(zorder=1,color='darkgrey',alpha=0.35,clip_on=False,linewidth=0.5)
x=np.arange(1950,2019+1,1)
plt.plot(yearsall,gmeanff,linewidth=5,color='k',alpha=1,zorder=3,clip_on=False)
plt.yticks(np.arange(0,101,10),map(str,np.round(np.arange(0,101,10),2)),size=9)
plt.xticks(np.arange(1950,2030+1,10),map(str,np.arange(1950,2030+1,10)),size=9)
plt.xlim([1950,2020])
plt.ylim([0,100])
plt.text(1949,104,r'\textbf{[a]}',color='dimgrey',
fontsize=7,ha='center')
plt.text(2022,50,r'\textbf{GLOBAL}',color='dimgrey',fontsize=25,rotation=270,
ha='center',va='center')
plt.ylabel(r'\textbf{Frequency of Label}',color='k',fontsize=10)
###############################################################################
ax = plt.subplot(212)
adjust_spines(ax, ['left', 'bottom'])
ax.spines['top'].set_color('none')
ax.spines['right'].set_color('none')
ax.spines['left'].set_color('dimgrey')
ax.spines['bottom'].set_color('dimgrey')
ax.spines['left'].set_linewidth(2)
ax.spines['bottom'].set_linewidth(2)
ax.tick_params('both',length=4,width=2,which='major',color='dimgrey')
ax.yaxis.grid(zorder=1,color='darkgrey',alpha=0.35,clip_on=False,linewidth=0.5)
x=np.arange(1950,2019+1,1)
plt.plot(yearsall,ameanff,linewidth=5,color='k',alpha=1,zorder=3,clip_on=False,label=r'\textbf{MM-Mean}')
plt.plot(yearsall,agfdlff,linewidth=4,color=plt.cm.CMRmap(0.6),alpha=1,zorder=3,clip_on=False,label=r'\textbf{GFDL-CM3}',
linestyle='--',dashes=(1,0.3))
plt.yticks(np.arange(0,101,10),map(str,np.round(np.arange(0,101,10),2)),size=9)
plt.xticks(np.arange(1950,2030+1,10),map(str,np.arange(1950,2030+1,10)),size=9)
plt.xlim([1950,2020])
plt.ylim([0,100])
plt.text(1949,104,r'\textbf{[b]}',color='dimgrey',
fontsize=7,ha='center')
leg = plt.legend(shadow=False,fontsize=11,loc='upper center',
bbox_to_anchor=(0.5,1.22),fancybox=True,ncol=4,frameon=False,
handlelength=5,handletextpad=1)
plt.ylabel(r'\textbf{Frequency of Label}',color='k',fontsize=10)
plt.text(2022,50,r'\textbf{ARCTIC}',color='dimgrey',fontsize=25,rotation=270,
ha='center',va='center')
plt.tight_layout()
plt.subplots_adjust(hspace=0.4)
plt.savefig(directoryfigure + 'MS-Figure_4b_v2_Poster.png',dpi=1000) | [
"[email protected]"
] | |
6db9b78246aef370efc8ef609a33b1dadab124a8 | 53e58c213232e02250e64f48b97403ca86cd02f9 | /18/mc/ExoDiBosonResonances/EDBRTreeMaker/test/crab3_analysisM4500_R_0-7.py | fc7eba74e67b0348603262470fab519845902f68 | [] | no_license | xdlyu/fullRunII_ntuple_102X | 32e79c3bbc704cfaa00c67ab5124d40627fdacaf | d420b83eb9626a8ff1c79af5d34779cb805d57d8 | refs/heads/master | 2020-12-23T15:39:35.938678 | 2020-05-01T14:41:38 | 2020-05-01T14:41:38 | 237,192,426 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,160 | py | from WMCore.Configuration import Configuration
name = 'WWW'
steam_dir = 'xulyu'
config = Configuration()
config.section_("General")
config.General.requestName = 'M4500_R0-7_off'
config.General.transferLogs = True
config.section_("JobType")
config.JobType.pluginName = 'Analysis'
config.JobType.inputFiles = ['Autumn18_V19_MC_L1FastJet_AK4PFchs.txt','Autumn18_V19_MC_L2Relative_AK4PFchs.txt','Autumn18_V19_MC_L3Absolute_AK4PFchs.txt','Autumn18_V19_MC_L1FastJet_AK8PFchs.txt','Autumn18_V19_MC_L2Relative_AK8PFchs.txt','Autumn18_V19_MC_L3Absolute_AK8PFchs.txt','Autumn18_V19_MC_L1FastJet_AK8PFPuppi.txt','Autumn18_V19_MC_L2Relative_AK8PFPuppi.txt','Autumn18_V19_MC_L3Absolute_AK8PFPuppi.txt','Autumn18_V19_MC_L1FastJet_AK4PFPuppi.txt','Autumn18_V19_MC_L2Relative_AK4PFPuppi.txt','Autumn18_V19_MC_L3Absolute_AK4PFPuppi.txt']
#config.JobType.inputFiles = ['PHYS14_25_V2_All_L1FastJet_AK4PFchs.txt','PHYS14_25_V2_All_L2Relative_AK4PFchs.txt','PHYS14_25_V2_All_L3Absolute_AK4PFchs.txt','PHYS14_25_V2_All_L1FastJet_AK8PFchs.txt','PHYS14_25_V2_All_L2Relative_AK8PFchs.txt','PHYS14_25_V2_All_L3Absolute_AK8PFchs.txt']
# Name of the CMSSW configuration file
#config.JobType.psetName = 'bkg_ana.py'
config.JobType.psetName = 'analysis_sig.py'
#config.JobType.allowUndistributedCMSSW = True
config.JobType.allowUndistributedCMSSW = True
config.section_("Data")
#config.Data.inputDataset = '/WJetsToLNu_13TeV-madgraph-pythia8-tauola/Phys14DR-PU20bx25_PHYS14_25_V1-v1/MINIAODSIM'
config.Data.inputDataset = '/WkkToWRadionToWWW_M4500-R0-7_TuneCP5_13TeV-madgraph/RunIIAutumn18MiniAOD-102X_upgrade2018_realistic_v15-v1/MINIAODSIM'
#config.Data.inputDBS = 'global'
config.Data.inputDBS = 'global'
config.Data.splitting = 'FileBased'
config.Data.unitsPerJob =20
config.Data.totalUnits = -1
config.Data.publication = False
config.Data.outLFNDirBase = '/store/group/dpg_trigger/comm_trigger/TriggerStudiesGroup/STEAM/' + steam_dir + '/' + name + '/'
# This string is used to construct the output dataset name
config.Data.outputDatasetTag = 'M4500_R0-7_off'
config.section_("Site")
# Where the output files will be transmitted to
config.Site.storageSite = 'T2_CH_CERN'
| [
"[email protected]"
] | |
87934c23053f09c259a1ce2e6270ea821fc90da6 | 520baeba0e86b0bab3c5590f40b868ca4306dc7e | /hazelcast/protocol/codec/count_down_latch_get_count_codec.py | 345de04de3a44161676bfb0d96b360bac2e606ad | [
"Apache-2.0"
] | permissive | mustafaiman/hazelcast-python-client | 69f27367162045bbfa4e66e7adadcfd254dfab21 | 85f29f975c91520075d0461327e38ab93c2e78c2 | refs/heads/master | 2021-01-18T04:23:10.740371 | 2015-12-11T14:26:06 | 2015-12-11T14:26:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,038 | py | from hazelcast.serialization.data import *
from hazelcast.serialization.bits import *
from hazelcast.protocol.client_message import ClientMessage
from hazelcast.protocol.custom_codec import *
from hazelcast.protocol.codec.count_down_latch_message_type import *
REQUEST_TYPE = COUNTDOWNLATCH_GETCOUNT
RESPONSE_TYPE = 102
RETRYABLE = True
def calculate_size(name):
""" Calculates the request payload size"""
data_size = 0
data_size += calculate_size_str(name)
return data_size
def encode_request(name):
""" Encode request into client_message"""
client_message = ClientMessage(payload_size=calculate_size(name))
client_message.set_message_type(REQUEST_TYPE)
client_message.set_retryable(RETRYABLE)
client_message.append_str(name)
client_message.update_frame_length()
return client_message
def decode_response(client_message):
""" Decode response from client message"""
parameters = dict(response=None)
parameters['response'] = client_message.read_int()
return parameters
| [
"[email protected]"
] | |
3393a8d836b455d0cb754edb1ec870771dbee269 | c4544c22c0618451746795090e07c80bc85a0877 | /static_demo/static_demo/urls.py | 9928d4f92b406cd67d5c5cef03defa2f295f2c2a | [] | no_license | RelaxedDong/Django_course | 35f7027dc552ad148d2dc8679a19a1ffb12b8d14 | 2965089d15e4c80cd6402d362ee37f8cc675c08b | refs/heads/master | 2022-01-09T14:28:40.503099 | 2019-05-24T07:07:03 | 2019-05-24T07:07:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 981 | py | """static_demo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from book import views
#导入配置
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('',views.index)
] + static(settings.STATIC_URL, document_root=settings.STATICFILES_DIRS[0])
| [
"[email protected]"
] | |
1e215bc242fca5e9220e2fe7e015281cf4b594b5 | f0858aae73097c49e995ff3526a91879354d1424 | /nova/api/openstack/compute/contrib/hosts.py | 9fbefd7309b945e6eb8b63e717634091b4a2e30f | [
"Apache-2.0"
] | permissive | bopopescu/nested_quota_final | 7a13f7c95e9580909d91db83c46092148ba1403b | 7c3454883de9f5368fa943924540eebe157a319d | refs/heads/master | 2022-11-20T16:14:28.508150 | 2015-02-16T17:47:59 | 2015-02-16T17:47:59 | 282,100,691 | 0 | 0 | Apache-2.0 | 2020-07-24T02:14:02 | 2020-07-24T02:14:02 | null | UTF-8 | Python | false | false | 12,946 | py | # Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The hosts admin extension."""
import webob.exc
from nova.api.openstack import extensions
from nova import compute
from nova import exception
from nova.i18n import _
from nova import objects
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
authorize = extensions.extension_authorizer('compute', 'hosts')
class HostController(object):
"""The Hosts API controller for the OpenStack API."""
def __init__(self):
self.api = compute.HostAPI()
super(HostController, self).__init__()
def index(self, req):
"""Returns a dict in the format:
| {'hosts': [{'host_name': 'some.host.name',
| 'service': 'cells',
| 'zone': 'internal'},
| {'host_name': 'some.other.host.name',
| 'service': 'cells',
| 'zone': 'internal'},
| {'host_name': 'some.celly.host.name',
| 'service': 'cells',
| 'zone': 'internal'},
| {'host_name': 'console1.host.com',
| 'service': 'consoleauth',
| 'zone': 'internal'},
| {'host_name': 'network1.host.com',
| 'service': 'network',
| 'zone': 'internal'},
| {'host_name': 'netwwork2.host.com',
| 'service': 'network',
| 'zone': 'internal'},
| {'host_name': 'compute1.host.com',
| 'service': 'compute',
| 'zone': 'nova'},
| {'host_name': 'compute2.host.com',
| 'service': 'compute',
| 'zone': 'nova'},
| {'host_name': 'sched1.host.com',
| 'service': 'scheduler',
| 'zone': 'internal'},
| {'host_name': 'sched2.host.com',
| 'service': 'scheduler',
| 'zone': 'internal'},
| {'host_name': 'vol1.host.com',
| 'service': 'volume',
| 'zone': 'internal'}]}
"""
context = req.environ['nova.context']
authorize(context)
filters = {'disabled': False}
zone = req.GET.get('zone', None)
if zone:
filters['availability_zone'] = zone
services = self.api.service_get_all(context, filters=filters,
set_zones=True)
hosts = []
for service in services:
hosts.append({'host_name': service['host'],
'service': service['topic'],
'zone': service['availability_zone']})
return {'hosts': hosts}
def update(self, req, id, body):
"""Updates a specified body.
:param body: example format {'status': 'enable',
'maintenance_mode': 'enable'}
"""
def read_enabled(orig_val, msg):
"""Checks a specified orig_val and returns True for 'enabled'
and False for 'disabled'.
:param orig_val: A string with either 'enable' or 'disable'. May
be surrounded by whitespace, and case doesn't
matter
:param msg: The message to be passed to HTTPBadRequest. A single
%s will be replaced with orig_val.
"""
val = orig_val.strip().lower()
if val == "enable":
return True
elif val == "disable":
return False
else:
raise webob.exc.HTTPBadRequest(explanation=msg % orig_val)
context = req.environ['nova.context']
authorize(context)
# See what the user wants to 'update'
params = {k.strip().lower(): v for k, v in body.iteritems()}
orig_status = status = params.pop('status', None)
orig_maint_mode = maint_mode = params.pop('maintenance_mode', None)
# Validate the request
if len(params) > 0:
# Some extra param was passed. Fail.
explanation = _("Invalid update setting: '%s'") % params.keys()[0]
raise webob.exc.HTTPBadRequest(explanation=explanation)
if orig_status is not None:
status = read_enabled(orig_status, _("Invalid status: '%s'"))
if orig_maint_mode is not None:
maint_mode = read_enabled(orig_maint_mode, _("Invalid mode: '%s'"))
if status is None and maint_mode is None:
explanation = _("'status' or 'maintenance_mode' needed for "
"host update")
raise webob.exc.HTTPBadRequest(explanation=explanation)
# Make the calls and merge the results
result = {'host': id}
if status is not None:
result['status'] = self._set_enabled_status(context, id, status)
if maint_mode is not None:
result['maintenance_mode'] = self._set_host_maintenance(context,
id, maint_mode)
return result
def _set_host_maintenance(self, context, host_name, mode=True):
"""Start/Stop host maintenance window. On start, it triggers
guest VMs evacuation.
"""
LOG.audit(_("Putting host %(host_name)s in maintenance mode "
"%(mode)s."),
{'host_name': host_name, 'mode': mode})
try:
result = self.api.set_host_maintenance(context, host_name, mode)
except NotImplementedError:
msg = _("Virt driver does not implement host maintenance mode.")
raise webob.exc.HTTPNotImplemented(explanation=msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.ComputeServiceUnavailable as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
if result not in ("on_maintenance", "off_maintenance"):
raise webob.exc.HTTPBadRequest(explanation=result)
return result
def _set_enabled_status(self, context, host_name, enabled):
"""Sets the specified host's ability to accept new instances.
:param enabled: a boolean - if False no new VMs will be able to start
on the host
"""
if enabled:
LOG.audit(_("Enabling host %s.") % host_name)
else:
LOG.audit(_("Disabling host %s.") % host_name)
try:
result = self.api.set_host_enabled(context, host_name=host_name,
enabled=enabled)
except NotImplementedError:
msg = _("Virt driver does not implement host disabled status.")
raise webob.exc.HTTPNotImplemented(explanation=msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.ComputeServiceUnavailable as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
if result not in ("enabled", "disabled"):
raise webob.exc.HTTPBadRequest(explanation=result)
return result
def _host_power_action(self, req, host_name, action):
"""Reboots, shuts down or powers up the host."""
context = req.environ['nova.context']
authorize(context)
try:
result = self.api.host_power_action(context, host_name=host_name,
action=action)
except NotImplementedError:
msg = _("Virt driver does not implement host power management.")
raise webob.exc.HTTPNotImplemented(explanation=msg)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.ComputeServiceUnavailable as e:
raise webob.exc.HTTPBadRequest(explanation=e.format_message())
return {"host": host_name, "power_action": result}
def startup(self, req, id):
return self._host_power_action(req, host_name=id, action="startup")
def shutdown(self, req, id):
return self._host_power_action(req, host_name=id, action="shutdown")
def reboot(self, req, id):
return self._host_power_action(req, host_name=id, action="reboot")
@staticmethod
def _get_total_resources(host_name, compute_node):
return {'resource': {'host': host_name,
'project': '(total)',
'cpu': compute_node['vcpus'],
'memory_mb': compute_node['memory_mb'],
'disk_gb': compute_node['local_gb']}}
@staticmethod
def _get_used_now_resources(host_name, compute_node):
return {'resource': {'host': host_name,
'project': '(used_now)',
'cpu': compute_node['vcpus_used'],
'memory_mb': compute_node['memory_mb_used'],
'disk_gb': compute_node['local_gb_used']}}
@staticmethod
def _get_resource_totals_from_instances(host_name, instances):
cpu_sum = 0
mem_sum = 0
hdd_sum = 0
for instance in instances:
cpu_sum += instance['vcpus']
mem_sum += instance['memory_mb']
hdd_sum += instance['root_gb'] + instance['ephemeral_gb']
return {'resource': {'host': host_name,
'project': '(used_max)',
'cpu': cpu_sum,
'memory_mb': mem_sum,
'disk_gb': hdd_sum}}
@staticmethod
def _get_resources_by_project(host_name, instances):
# Getting usage resource per project
project_map = {}
for instance in instances:
resource = project_map.setdefault(instance['project_id'],
{'host': host_name,
'project': instance['project_id'],
'cpu': 0,
'memory_mb': 0,
'disk_gb': 0})
resource['cpu'] += instance['vcpus']
resource['memory_mb'] += instance['memory_mb']
resource['disk_gb'] += (instance['root_gb'] +
instance['ephemeral_gb'])
return project_map
def show(self, req, id):
"""Shows the physical/usage resource given by hosts.
:param id: hostname
:returns: expected to use HostShowTemplate.
ex.::
{'host': {'resource':D},..}
D: {'host': 'hostname','project': 'admin',
'cpu': 1, 'memory_mb': 2048, 'disk_gb': 30}
"""
context = req.environ['nova.context']
host_name = id
try:
compute_node = (
objects.ComputeNode.get_first_node_by_host_for_old_compat(
context, host_name))
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
except exception.AdminRequired:
msg = _("Describe-resource is admin only functionality")
raise webob.exc.HTTPForbidden(explanation=msg)
instances = self.api.instance_get_all_by_host(context, host_name)
resources = [self._get_total_resources(host_name, compute_node)]
resources.append(self._get_used_now_resources(host_name,
compute_node))
resources.append(self._get_resource_totals_from_instances(host_name,
instances))
by_proj_resources = self._get_resources_by_project(host_name,
instances)
for resource in by_proj_resources.itervalues():
resources.append({'resource': resource})
return {'host': resources}
class Hosts(extensions.ExtensionDescriptor):
"""Admin-only host administration."""
name = "Hosts"
alias = "os-hosts"
namespace = "http://docs.openstack.org/compute/ext/hosts/api/v1.1"
updated = "2011-06-29T00:00:00Z"
def get_resources(self):
resources = [extensions.ResourceExtension('os-hosts',
HostController(),
collection_actions={'update': 'PUT'},
member_actions={"startup": "GET", "shutdown": "GET",
"reboot": "GET"})]
return resources
| [
"[email protected]"
] | |
fd8a249b1f44b14a3c11896e5a12e1c86a1988e9 | 372a0eb8d3be3d40b9dfb5cf45a7df2149d2dd0d | /charles/Week 07/lab08/lab08.py | 198fffad72e36dfcdfe4b7505ec51e6fe007c177 | [] | no_license | charlesfrye/cs61a-summer2015 | 5d14b679e5bea53cfa26c2a6a86720e8e77c322c | 1f5c0fbf5dce5d1322285595ca964493d9adbdfe | refs/heads/master | 2016-08-07T06:06:09.335913 | 2015-08-21T00:33:25 | 2015-08-21T00:33:25 | 38,509,126 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,460 | py | ## Linked Lists and Sets ##
# Linked Lists
class Link:
"""A linked list.
>>> s = Link(1, Link(2, Link(3, Link(4))))
>>> len(s)
4
>>> s[2]
3
>>> s
Link(1, Link(2, Link(3, Link(4))))
"""
empty = ()
def __init__(self, first, rest=empty):
assert rest is Link.empty or isinstance(rest, Link)
self.first = first
self.rest = rest
def __getitem__(self, i):
if i == 0:
return self.first
else:
return self.rest[i-1]
def __len__(self):
return 1 + len(self.rest)
def __repr__(self):
if self.rest is not Link.empty:
rest_str = ', ' + repr(self.rest)
else:
rest_str = ''
return 'Link({0}{1})'.format(repr(self.first), rest_str)
def slice_link(link, start, end):
"""Slices a Link from start to end (as with a normal Python list).
>>> link = Link(3, Link(1, Link(4, Link(1, Link(5, Link(9))))))
>>> slice_link(link, 1, 4)
Link(1, Link(4, Link(1)))
"""
if start == end:
return Link.empty
return Link(link[start],slice_link(link.rest,0,end-1-start))
# Sets
def union(s1, s2):
"""Returns the union of two sets.
>>> r = {0, 6, 6}
>>> s = {1, 2, 3, 4}
>>> t = union(s, {1, 6})
>>> t
{1, 2, 3, 4, 6}
>>> union(r, t)
{0, 1, 2, 3, 4, 6}
"""
union_set = set()
for element in s1:
union_set.add(element)
for element in s2:
union_set.add(element)
return union_set
def intersection(s1, s2):
"""Returns the intersection of two sets.
>>> r = {0, 1, 4, 0}
>>> s = {1, 2, 3, 4}
>>> t = intersection(s, {3, 4, 2})
>>> t
{2, 3, 4}
>>> intersection(r, t)
{4}
"""
intersect = set()
for element in s1:
if element in s2:
intersect.add(element)
return intersect
def extra_elem(a,b):
"""B contains every element in A, and has one additional member, find
the additional member.
>>> extra_elem(['dog', 'cat', 'monkey'], ['dog', 'cat', 'monkey', 'giraffe'])
'giraffe'
>>> extra_elem([1, 2, 3, 4, 5], [1, 2, 3, 4, 5, 6])
6
"""
return list(set(b)-set(a))[0]
def find_duplicates(lst):
"""Returns True if lst has any duplicates and False if it does not.
>>> find_duplicates([1, 2, 3, 4, 5])
False
>>> find_duplicates([1, 2, 3, 4, 2])
True
"""
return len(set(lst)) != len(lst)
| [
"[email protected]"
] | |
95303882335933bf48710ea4c6a92ec77ab6fa8b | 71748e7379548d75fcf6713f0e6d66d6db1c2bbd | /4AL16IS051_SHETTY _TANVI/Jaishma Ma'am/Challenge 1/p1.py | 94f3b31275c157f72cf5617f380da3fcadaba83b | [] | no_license | alvas-education-foundation/ISE_4th_Year_Coding_challenge | fcf78c755cc924bea7e905e67c1e30385cf5af0b | 96cfc92d679576dab15ef7d1cb6773f2082abfb2 | refs/heads/master | 2022-11-17T09:19:23.851817 | 2020-07-21T09:59:40 | 2020-07-21T09:59:40 | 265,195,858 | 5 | 2 | null | null | null | null | UTF-8 | Python | false | false | 233 | py | @@ -0,0 +1,26 @@
1)
x = input("")
n = int(x)
if n % 2 == 1:
print("Weird")
elif n % 2 == 0 and 2 <= n <= 5:
print("Not Weird")
elif n % 2 == 0 and 6 <= n <= 20:
print("Weird")
else:
print("Not Weird")
| [
"[email protected]"
] | |
d56f9ff4d9977c5cae0f12e69328846bbc0dd91b | e82b761f53d6a3ae023ee65a219eea38e66946a0 | /All_In_One/addons/io_thps_scene/export_shared.py | f75d8e3457209c8ec5a53569e993701e92bf9c6a | [] | no_license | 2434325680/Learnbgame | f3a050c28df588cbb3b14e1067a58221252e2e40 | 7b796d30dfd22b7706a93e4419ed913d18d29a44 | refs/heads/master | 2023-08-22T23:59:55.711050 | 2021-10-17T07:26:07 | 2021-10-17T07:26:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 63,550 | py | #############################################
# SCENE EXPORT - SHARED COMPONENTS
#############################################
import bpy
import bmesh
import struct
import mathutils
import math
from . import helpers, collision, prefs, material, autosplit
from bpy.props import *
from . prefs import *
from . autosplit import *
from . helpers import *
from . collision import *
from . material import *
from . constants import *
from . qb import *
from . level_manifest import *
from . export_thug1 import export_scn_sectors
from . export_thug2 import export_scn_sectors_ug2
from . export_thps4 import *
class ExportError(Exception):
pass
# METHODS
#############################################
def pack_pre(root_dir, files, output_file):
pack = struct.pack
with open(output_file, "wb") as outp:
outp.write(pack("I", 0))
outp.write(pack("I", 0xABCD0003)) # version
outp.write(pack("I", len(files))) # num files
for file in files:
adjusted_fn = bytes(os.path.relpath(file, root_dir), 'ascii') + b"\x00"
if len(adjusted_fn) % 4 != 0:
adjusted_fn = adjusted_fn + (b'\x00' * (4 - (len(adjusted_fn) % 4)))
with open(file, "rb") as inp:
data = inp.read()
outp.write(pack("I", len(data))) # data size
outp.write(pack("I", 0)) # compressed data size
outp.write(pack("I", len(adjusted_fn))) # file name size
outp.write(pack("I", crc_from_string(bytes(os.path.relpath(file, root_dir), 'ascii')))) # file name checksum
outp.write(adjusted_fn) # file name
outp.write(data) # data
offs = outp.tell()
if offs % 4 != 0:
outp.write(b'\x00' * (4 - (offs % 4)))
total_bytes = outp.tell()
outp.seek(0)
outp.write(pack("I", total_bytes))
#----------------------------------------------------------------------------------
def do_export(operator, context, target_game):
is_model = False
self = operator
import subprocess, shutil, datetime
addon_prefs = context.user_preferences.addons[ADDON_NAME].preferences
base_files_dir_error = prefs._get_base_files_dir_error(addon_prefs)
if base_files_dir_error:
self.report({"ERROR"}, "Base files directory error: {} Check the base files directory addon preference. Aborting export.".format(base_files_dir_error))
return {"CANCELLED"}
base_files_dir = addon_prefs.base_files_dir
if target_game == "THPS4":
DEFAULT_SKY_SCN = self.skybox_name + "scn.dat"
DEFAULT_SKY_TEX = self.skybox_name + "tex.dat"
elif target_game == "THUG1":
DEFAULT_SKY_SCN = self.skybox_name + ".scn.xbx"
DEFAULT_SKY_TEX = self.skybox_name + ".tex.xbx"
elif target_game == "THUG2":
DEFAULT_SKY_SCN = self.skybox_name + ".scn.xbx"
DEFAULT_SKY_TEX = self.skybox_name + ".tex.xbx"
else:
raise Exception("Unknown target game: {}".format(target_game))
start_time = datetime.datetime.now()
filename = self.filename
directory = self.directory
j = os.path.join
def md(dir):
if not os.path.exists(dir):
os.makedirs(dir)
ext_pre = (".prx" if target_game == "THUG2" else ".pre")
ext_col = (".col" if (target_game == "THUG1" and not self.pack_pre) else ".col.xbx" )
ext_scn = (".scn" if (target_game == "THUG1" and not self.pack_pre) else ".scn.xbx" )
ext_tex = (".tex" if (target_game == "THUG1" and not self.pack_pre) else ".tex.xbx" )
ext_qb = ".qb"
if target_game == "THPS4":
ext_col = "col.dat"
ext_scn = "scn.dat"
ext_tex = "tex.dat"
self.report({'OPERATOR'}, "")
self.report({'INFO'}, "-" * 20)
self.report({'INFO'}, "Starting export of {} at {}".format(filename, start_time.time()))
orig_objects, temporary_objects = [], []
import sys
logging_fh = logging.FileHandler(j(directory, filename + "_export.log"), mode='w')
logging_fh.setFormatter(logging.Formatter("{asctime} [{levelname}] {message}", style='{', datefmt="%H:%M:%S"))
logging_ch = logging.StreamHandler(sys.stdout)
logging_ch.setFormatter(logging.Formatter("{asctime} [{levelname}] {message}", style='{', datefmt="%H:%M:%S"))
set_export_scale(operator.export_scale)
try:
LOG.addHandler(logging_fh)
LOG.addHandler(logging_ch)
LOG.setLevel(logging.DEBUG)
# Create shadow caster objects (hacky!)
generate_shadowcasters()
if self.generate_col_file or self.generate_scn_file or self.generate_scripts_files:
orig_objects, temporary_objects = autosplit._prepare_autosplit_objects(operator, context,target_game)
path = j(directory, "Levels", filename)
md(path)
if self.generate_col_file:
export_col(filename + ext_col, path, target_game, self)
if self.generate_scn_file:
self.report({'OPERATOR'}, "Generating scene file... ")
export_scn(filename + ext_scn, path, target_game, self, is_model)
if self.generate_tex_file:
md(path)
self.report({'OPERATOR'}, "Generating tex file... ")
export_tex(filename + ext_tex, path, target_game, self)
# ********************************************************
# Export cubemap DDS textures
if True:
_lightmap_folder = bpy.path.basename(bpy.context.blend_data.filepath)[:-6] # = Name of blend file
_folder = bpy.path.abspath("//Tx_Cubemap/{}".format(_lightmap_folder))
for ob in bpy.data.objects:
if ob.type == 'EMPTY' and ob.thug_empty_props and ob.thug_empty_props.empty_type == 'CubemapProbe' \
and ob.thug_cubemap_props and ob.thug_cubemap_props.exported == True:
shutil.copy("{}/{}.dds".format(_folder, ob.name),
j(path, "{}.dds".format(ob.name)))
# ********************************************************
if self.generate_scn_file and self.generate_sky:
skypath = j(directory, "Levels", filename + "_sky")
md(skypath)
shutil.copy(
j(base_files_dir, 'default_sky', DEFAULT_SKY_SCN),
j(skypath, filename + "_sky" + ext_scn))
shutil.copy(
j(base_files_dir, 'default_sky', DEFAULT_SKY_TEX),
j(skypath, filename + "_sky" + ext_tex))
compilation_successful = None
if self.generate_scripts_files:
self.report({'OPERATOR'}, "Generating QB files... ")
export_qb(filename, path, target_game, self)
old_cwd = os.getcwd()
os.chdir(path)
compilation_successful = True
import platform
wine = [] if platform.system() == "Windows" else ["wine"]
# #########################
# Build NODEARRAY qb file
try:
print("Compiling {}.txt to QB...".format(filename))
roq_output = subprocess.run(wine + [
j(base_files_dir, "roq.exe"),
"-c",
filename + ".txt"
], stdout=subprocess.PIPE)
if os.path.exists(filename + ".qb"):
os.remove(filename + ".qb")
if os.path.exists(filename + ".txt.qb"):
os.rename(filename + ".txt.qb", filename + ".qb")
else:
self.report({"ERROR"}, "{}\n\nCompiler output:\nFailed to compile the QB file.".format(
'\n'.join(reversed(roq_output.stdout.decode().split("\r\n")))))
compilation_successful = False
finally:
os.chdir(old_cwd)
# /Build NODEARRAY qb file
# #########################
# #########################
# Build _SCRIPTS qb file
if os.path.exists(j(path, filename + "_scripts.txt")):
print("Compiling {}_scripts.txt to QB...".format(filename))
os.chdir(path)
try:
roq_output = subprocess.run(wine + [
j(base_files_dir, "roq.exe"),
"-c",
filename + "_scripts.txt"
], stdout=subprocess.PIPE)
if os.path.exists(filename + "_scripts.qb"):
os.remove(filename + "_scripts.qb")
if os.path.exists(filename + "_scripts.txt.qb"):
os.rename(filename + "_scripts.txt.qb", filename + "_scripts.qb")
else:
self.report({"ERROR"}, "{}\n\nCompiler output:\nFailed to compile the QB file.".format(
'\n'.join(reversed(roq_output.stdout.decode().split("\r\n")))))
compilation_successful = False
finally:
os.chdir(old_cwd)
# /Build _SCRIPTS qb file
# #########################
# #########################
# Build PRE files
if self.pack_pre and target_game != 'THPS4':
md(j(directory, "pre"))
# Export all level files to a single PRE container
if False:
pack_files = []
pack_files.append(j(path, filename + ext_scn))
pack_files.append(j(path, filename + ext_tex))
pack_files.append(j(path, filename + ext_col))
pack_files.append(j(path, filename + ext_qb))
pack_files.append(j(path, filename + "_scripts" + ext_qb))
if self.generate_sky:
pack_files.append(j(skypath, filename + "_sky" + ext_scn))
pack_files.append(j(skypath, filename + "_sky" + ext_tex))
pack_pre( directory, pack_files, j(directory, "pre", filename + ext_pre) )
self.report({'OPERATOR'}, "Exported " + j(directory, "pre", filename + ext_pre))
# Export all level files using the classic multi-PRE container setup
else:
if self.generate_scripts_files:
pack_files = []
pack_files.append(j(path, filename + ext_qb))
pack_files.append(j(path, filename + "_scripts" + ext_qb))
if target_game == "THUG2":
pack_files.append(j(path, filename + "_thugpro" + ext_qb))
pack_pre( directory, pack_files, j(directory, "pre", filename + "_scripts" + ext_pre) )
else:
pack_pre( directory, pack_files, j(directory, "pre", filename + ext_pre) )
self.report({'OPERATOR'}, "Exported " + j(directory, "pre", filename + ext_pre))
if self.generate_col_file:
pack_files = []
pack_files.append(j(path, filename + ext_col))
pack_pre( directory, pack_files, j(directory, "pre", filename + "col" + ext_pre) )
self.report({'OPERATOR'}, "Exported " + j(directory, "pre", filename + "col" + ext_pre))
if self.generate_scn_file:
pack_files = []
pack_files.append(j(path, filename + ext_scn))
pack_files.append(j(path, filename + ext_tex))
if self.generate_sky:
pack_files.append(j(skypath, filename + "_sky" + ext_scn))
pack_files.append(j(skypath, filename + "_sky" + ext_tex))
pack_pre( directory, pack_files, j(directory, "pre", filename + "scn" + ext_pre) )
self.report({'OPERATOR'}, "Exported " + j(directory, "pre", filename + "scn" + ext_pre))
# /Build PRE files
# #########################
# Remove shadow caster objects (so hacky!)
cleanup_shadowcasters()
# Make sure our generated grass materials/textures are removed after export
cleanup_grass_materials()
end_time = datetime.datetime.now()
if (compilation_successful is None) or compilation_successful:
print("EXPORT COMPLETE! Thank you for waiting :)")
self.report({'INFO'}, "Exported level {} at {} (time taken: {})".format(filename, end_time.time(), end_time - start_time))
else:
print("EXPORT FAILED! Uh oh :(")
self.report({'WARNING'}, "Failed exporting level {} at {} (time taken: {})".format(filename, end_time.time(), end_time - start_time))
# -------------------------------------------------
# Final step: generate level manifest .json file!
# -------------------------------------------------
export_level_manifest_json(filename, directory, self, context.scene.thug_level_props)
except ExportError as e:
self.report({'ERROR'}, "Export failed.\nExport error: {}".format(str(e)))
except Exception as e:
LOG.debug(e)
raise
finally:
LOG.removeHandler(logging_fh)
LOG.removeHandler(logging_ch)
autosplit._cleanup_autosplit_objects(operator, context, target_game, orig_objects, temporary_objects)
return {'FINISHED'}
#----------------------------------------------------------------------------------
def do_export_model(operator, context, target_game):
is_model = True
self = operator
import subprocess, shutil, datetime
addon_prefs = context.user_preferences.addons[ADDON_NAME].preferences
base_files_dir_error = prefs._get_base_files_dir_error(addon_prefs)
if base_files_dir_error:
self.report({"ERROR"}, "Base files directory error: {} Check the base files directory addon preference. Aborting export.".format(base_files_dir_error))
return {"CANCELLED"}
base_files_dir = addon_prefs.base_files_dir
if not target_game == "THUG1" and not target_game == "THUG2" and not target_game == "THPS4":
raise Exception("Unknown target game: {}".format(target_game))
start_time = datetime.datetime.now()
filename = self.filename
directory = self.directory
j = os.path.join
def md(dir):
if not os.path.exists(dir):
os.makedirs(dir)
ext_col = (".col" if target_game == "THUG1" else ".col.xbx" )
ext_scn = (".mdl" if target_game == "THUG1" else ".mdl.xbx" )
ext_tex = (".tex" if target_game == "THUG1" else ".tex.xbx" )
ext_qb = ".qb"
if self.model_type == "skin":
ext_scn = (".skin" if target_game == "THUG1" else ".skin.xbx" )
if target_game == "THPS4":
ext_col = "col.dat"
ext_scn = "skin.dat" if self.model_type == "skin" else "mdl.dat"
ext_tex = "tex.dat"
self.report({'OPERATOR'}, "")
self.report({'INFO'}, "-" * 20)
self.report({'INFO'}, "Starting export of {} at {}".format(filename, start_time.time()))
orig_objects, temporary_objects = [], []
import sys
logging_fh = logging.FileHandler(j(directory, filename + "_export.log"), mode='w')
logging_fh.setFormatter(logging.Formatter("{asctime} [{levelname}] {message}", style='{', datefmt="%H:%M:%S"))
logging_ch = logging.StreamHandler(sys.stdout)
logging_ch.setFormatter(logging.Formatter("{asctime} [{levelname}] {message}", style='{', datefmt="%H:%M:%S"))
set_export_scale(operator.export_scale)
try:
LOG.addHandler(logging_fh)
LOG.addHandler(logging_ch)
LOG.setLevel(logging.DEBUG)
orig_objects, temporary_objects = autosplit._prepare_autosplit_objects(operator, context,target_game)
path = j(directory, "Models/" + filename)
md(path)
# Generate COL file
self.report({'OPERATOR'}, "Generating collision file... ")
export_col(filename + ext_col, path, target_game, self)
# Generate SCN/MDL file
self.report({'OPERATOR'}, "Generating scene file... ")
export_scn(filename + ext_scn, path, target_game, self, is_model)
# Generate TEX file
self.report({'OPERATOR'}, "Generating tex file... ")
export_tex(filename + ext_tex, path, target_game, self)
# Maybe generate QB file
compilation_successful = None
if self.generate_scripts_files:
self.report({'OPERATOR'}, "Generating QB files... ")
export_model_qb(filename, path, target_game, self)
old_cwd = os.getcwd()
os.chdir(path)
compilation_successful = True
import platform
wine = [] if platform.system() == "Windows" else ["wine"]
try:
roq_output = subprocess.run(wine + [
j(base_files_dir, "roq.exe"),
"-c",
filename + ".txt"
], stdout=subprocess.PIPE)
if os.path.exists(filename + ".qb"):
os.remove(filename + ".qb")
if os.path.exists(filename + ".txt.qb"):
os.rename(filename + ".txt.qb", filename + ".qb")
else:
self.report({"ERROR"}, "{}\n\nCompiler output:\nFailed to compile the QB file.".format(
'\n'.join(reversed(roq_output.stdout.decode().split("\r\n")))))
compilation_successful = False
finally:
os.chdir(old_cwd)
end_time = datetime.datetime.now()
if (compilation_successful is None) or compilation_successful:
self.report({'INFO'}, "Exported model {} at {} (time taken: {})".format(filename, end_time.time(), end_time - start_time))
else:
self.report({'WARNING'}, "Failed exporting model {} at {} (time taken: {})".format(filename, end_time.time(), end_time - start_time))
except ExportError as e:
self.report({'ERROR'}, "Export failed.\nExport error: {}".format(str(e)))
except Exception as e:
LOG.debug(e)
raise
finally:
LOG.removeHandler(logging_fh)
LOG.removeHandler(logging_ch)
autosplit._cleanup_autosplit_objects(operator, context, target_game, orig_objects, temporary_objects)
return {'FINISHED'}
#----------------------------------------------------------------------------------
def generate_shadowcasters():
print("Creating shadow casters...")
out_objects = [o for o in bpy.data.objects
if (o.type == "MESH"
and getattr(o, 'thug_export_scene', True)
and not o.get("thug_autosplit_object_no_export_hack", False))]
scene = bpy.context.scene
sc_count = -1
sc_mat_count = -1
for ob in out_objects:
if not ob.thug_cast_shadow:
continue
ob_name = ob.name
if ob.name.endswith("_SCN"):
ob_name = ob.name[:-4]
print("Creating shadow caster object(s) for mesh: {}".format(ob_name))
sc_count += 1
new_ob = ob.copy()
new_ob.data = ob.data.copy()
# Create empty collision mesh, and an SCN mesh
new_col_mesh = bpy.data.meshes.new(name="GEN_ShadowCaster_" + str(sc_count) + "_MESH")
new_col_ob = bpy.data.objects.new(name="GEN_ShadowCaster_" + str(sc_count), object_data=new_col_mesh)
new_ob.name = "GEN_ShadowCaster_" + str(sc_count) + "_SCN"
new_col_ob.thug_object_class = "LevelObject"
new_ob.thug_object_class = "LevelGeometry"
new_ob.thug_export_scene = True
new_ob.thug_export_collision = False
#new_ob.scale[0] = 1.1
#new_ob.scale[1] = 1.1
#new_ob.scale[2] = 1.1
new_col_ob.thug_export_scene = False
new_col_ob.thug_export_collision = True
for mat_slot in new_ob.material_slots:
sc_mat_count += 1
orig_mat = mat_slot.material
mat_slot.material = mat_slot.material.copy()
mat_slot.material.thug_material_props.use_new_mats = False
mat_slot.material.thug_material_props.specular_power = -0.23
mat_slot.material.name = "GEN_Mat_ShadowCaster_" + str(sc_mat_count)
scene.objects.link(new_ob)
scene.objects.link(new_col_ob)
#helpers._flip_normals(new_ob)
def cleanup_shadowcasters():
print("Removing shadow casters...")
for ob in bpy.data.objects:
if ob.name.startswith("GEN_ShadowCaster_"):
bpy.data.objects.remove(ob)
for mat in bpy.data.materials:
if mat.name.startswith("GEN_Mat_ShadowCaster_"):
bpy.data.materials.remove(mat)
#----------------------------------------------------------------------------------
def export_scn(filename, directory, target_game, operator=None, is_model=False):
def w(fmt, *args):
outp.write(struct.pack(fmt, *args))
output_file = os.path.join(directory, filename)
with open(output_file, "wb") as outp:
w("3I", 1, 1, 1)
if target_game == "THPS4":
export_materials_th4(outp, target_game, operator, is_model)
else:
export_materials(outp, target_game, operator, is_model)
if target_game == "THUG2":
export_scn_sectors_ug2(outp, operator, is_model)
elif target_game == "THUG1":
export_scn_sectors(outp, operator, is_model)
elif target_game == "THPS4":
export_scn_sectors_th4(outp, operator, is_model)
else:
raise Exception("Unknown target game: {}".format(target_game))
w("i", 0) # number of hierarchy objects
#----------------------------------------------------------------------------------
def export_col(filename, directory, target_game, operator=None):
from io import BytesIO
p = Printer()
output_file = os.path.join(directory, filename)
bm = bmesh.new()
# Applies modifiers and triangulates mesh - unless the 'speed hack' export option is on
def triang(o):
if operator.speed_hack:
final_mesh = o.data
bm.clear()
bm.from_mesh(final_mesh)
else:
final_mesh = o.to_mesh(bpy.context.scene, True, 'PREVIEW')
if helpers._need_to_flip_normals(o):
temporary_object = helpers._make_temp_obj(final_mesh)
try:
bpy.context.scene.objects.link(temporary_object)
# temporary_object.matrix_world = o.matrix_world
helpers._flip_normals(temporary_object)
finally:
if bpy.context.mode != "OBJECT":
bpy.ops.object.mode_set(mode="OBJECT")
bpy.context.scene.objects.unlink(temporary_object)
bpy.data.objects.remove(temporary_object)
bm.clear()
bm.from_mesh(final_mesh)
bmesh.ops.triangulate(bm, faces=bm.faces)
bm.faces.ensure_lookup_table()
bm.faces.index_update()
bpy.data.meshes.remove(final_mesh)
return
out_objects = [o for o in bpy.data.objects
if (o.type == "MESH"
and getattr(o, 'thug_export_collision', True)
and not o.get("thug_autosplit_object_no_export_hack", False))]
total_verts = 0 # sum(len(bm.verts) for o in out_objects if [triang(o)])
total_faces = 0 # sum(len(bm.faces) for o in out_objects if [triang(o)])
with open(output_file, "wb") as outp:
def w(fmt, *args):
outp.write(struct.pack(fmt, *args))
verts_out = BytesIO()
intensities_out = BytesIO()
faces_out = BytesIO()
thug2_thing_out = BytesIO()
nodes_out = BytesIO()
col_version = 10
if target_game == 'THUG1':
col_version = 9
elif target_game == 'THPS4':
col_version = 8
w("i", col_version) # version
w("i", len(out_objects)) # num objects
total_verts_offset = outp.tell()
w("i", total_verts)
w("i", total_faces if target_game != 'THPS4' else 0) # large faces
w("i", 0) # small faces
w("i", total_verts) # large verts
w("i", 0) # small verts
w("i", 0) # padding
obj_face_offset = 0
obj_vert_offset = 0
obj_bsp_offset = 0
obj_intensity_offset = 0
bsp_nodes_size = 0
node_face_index_offset = 0
node_faces = []
DBG = lambda *args: LOG.debug(" ".join(str(arg) for arg in args))
for o in out_objects:
def w(fmt, *args):
outp.write(struct.pack(fmt, *args))
LOG.debug("Exporting object: {}".format(o.name))
triang(o)
total_verts += len(bm.verts)
total_faces += len(bm.faces)
if "thug_checksum" in o:
w("i", o["thug_checksum"])
else:
clean_name = get_clean_name(o)
if is_hex_string(clean_name):
w("I", int(clean_name, 0)) # checksum
else:
w("I", crc_from_string(bytes(clean_name, 'ascii'))) # checksum
w("H", o.thug_col_obj_flags)
if len(bm.verts) > 2**16:
raise ExportError("Too many vertices in an object: {} (has {}, max is {}). Consider using Autosplit.".format(o.name, len(bm.verts), 2**16))
w("H", len(bm.verts))
MAX_TRIS = 6000 # min(6000, 2**16)
#if (len(bm.faces) * (3 if target_game == "THUG2" else 1)) > MAX_TRIS:
if len(bm.faces) > MAX_TRIS:
raise ExportError("Too many tris in an object: {} (has {}, max is {}). Consider using Autosplit.".format(
o.name,
len(bm.faces),
MAX_TRIS))
# 2**16 // (3 if target_game == "THUG2" else 1)))
w("H", len(bm.faces))
w("?", False) # use face small
w("?", False) # use fixed verts
w("I", obj_face_offset)
if target_game == 'THPS4':
obj_face_offset += SIZEOF_LARGE_FACE_THPS4 * len(bm.faces)
else:
obj_face_offset += SIZEOF_LARGE_FACE * len(bm.faces)
obj_matrix = get_scale_matrix(o) if o.thug_object_class == "LevelObject" else o.matrix_world
#obj_matrix = o.matrix_world
if operator.is_park_editor:
# AFAIK we don't modify the bounding box for dictionary collision, only the scene.
# But if this changes I'll update it here!
bbox = get_bbox2(bm.verts, obj_matrix, operator.is_park_editor)
else:
bbox = get_bbox2(bm.verts, obj_matrix)
w("4f", *bbox[0])
w("4f", *bbox[1])
w("I", obj_vert_offset)
if target_game == 'THPS4':
obj_vert_offset += len(bm.verts)
else:
obj_vert_offset += SIZEOF_FLOAT_VERT * len(bm.verts)
w("I", obj_bsp_offset)
obj_bsp_tree = make_bsp_tree(o, bm.faces[:], obj_matrix)
obj_bsp_offset += len(list(iter_tree(obj_bsp_tree))) * SIZEOF_BSP_NODE
# THPS4: Intensity list does not exist, intensity is appended to each vert
if target_game == 'THPS4':
w("I", 0)
else:
w("I", obj_intensity_offset)
obj_intensity_offset += len(bm.verts)
w("I", 0) # padding
def w(fmt, *args):
verts_out.write(struct.pack(fmt, *args))
for v in bm.verts:
w("3f", *to_thug_coords(obj_matrix * v.co))
if target_game == 'THPS4':
w("B", 0xFF) # Intensity data(?)
w("B", 0xFF) # Intensity data(?)
w("B", 0xFF) # Intensity data(?)
w("B", 0xFF) # Intensity data(?)
if target_game != 'THPS4':
def w(fmt, *args):
intensities_out.write(struct.pack(fmt, *args))
intensity_layer = bm.loops.layers.color.get("intensity")
if not intensity_layer:
intensity_layer = bm.loops.layers.color.get("bake")
if not intensity_layer:
intensity_layer = bm.loops.layers.color.get("color")
if intensity_layer:
intensities_list = {}
for face in bm.faces:
for loop in face.loops:
tmp_intensity = int((( loop[intensity_layer].r + loop[intensity_layer].g + loop[intensity_layer].b ) / 3.0) * 255)
intensities_list[loop.vert] = tmp_intensity
for vert in bm.verts:
if vert in intensities_list:
w('B', intensities_list[vert])
else:
w('B', 128)
else:
intensities_out.write(b'\xff' * len(bm.verts))
def w(fmt, *args):
faces_out.write(struct.pack(fmt, *args))
cfl = bm.faces.layers.int.get("collision_flags")
ttl = bm.faces.layers.int.get("terrain_type")
# bm.verts.ensure_lookup_table()
# Face flags are output here!
for face in bm.faces:
if cfl and (face[cfl] & FACE_FLAGS["mFD_TRIGGER"]):
if o.thug_triggerscript_props.template_name_txt == "" or o.thug_triggerscript_props.template_name_txt == "None" or \
(o.thug_triggerscript_props.template_name_txt == "Custom" and o.thug_triggerscript_props.custom_name == ""):
# This object has a Trigger face, but no TriggerScript assigned
# Normally this would crash the game, so let's create and assign a blank script!
get_triggerscript("io_thps_scene_NullScript")
#o.thug_triggerscript_props.template_name = "Custom"
o.thug_triggerscript_props.template_name_txt = "Custom"
o.thug_triggerscript_props.custom_name = "io_thps_scene_NullScript"
LOG.debug("WARNING: Object {} has trigger faces but no TriggerScript. A blank script was assigned.".format(o.name))
#raise Exception("Collision object " + o.name + " has a trigger face with no TriggerScript attached to the object! This is for your own safety!")
w("H", face[cfl] if cfl else 0)
tt = collision._resolve_face_terrain_type(o, bm, face)
w("H", tt)
for vert in face.verts:
w("H", vert.index)
if target_game == 'THPS4':
w("H", 0) # Padding?
if target_game == "THUG2":
def w(fmt, *args):
thug2_thing_out.write(struct.pack(fmt, *args))
thug2_thing_out.write(b'\x00' * len(bm.faces))
#p("I am at: {}", outp.tell())
def w(fmt, *args):
nodes_out.write(struct.pack(fmt, *args))
bsp_nodes_start = bsp_nodes_size
node_list, node_indices = tree_to_list(obj_bsp_tree)
for idx, node in enumerate(node_list):
# assert idx == node_indices[id(node)]
# DBG(node_indices[id(node)])
bsp_nodes_size += SIZEOF_BSP_NODE
if isinstance(node, BSPLeaf):
w("B", 0xFF if target_game == 'THPS4' else 3) # the axis it is split on (0 = X, 1 = Y, 2 = Z, 3 = Leaf)
w("B", 0) # padding
w("H", len(node.faces) * (3 if False and target_game == "THUG2" else 1))
w("I", node_face_index_offset)
# exported |= set(node.faces)
for face in node.faces:
# assert bm.faces[face.index] == face
node_faces.append(face.index)
node_face_index_offset += len(node.faces) * (3 if False and target_game == "THUG2" else 1)
#if target_game == 'THPS4':
# # Padding?
# w("I", 0xFFFFFFFF)
# w("I", 0xFFFFFFFF)
else:
split_axis_and_point = (
(node.split_axis & 0x3) |
# 1 |
(int(node.split_point * 16.0) << 2)
)
w("i", split_axis_and_point)
w("I", (bsp_nodes_start + node_indices[id(node.left)] * SIZEOF_BSP_NODE))
def w(fmt, *args):
outp.write(struct.pack(fmt, *args))
tmp_offset = outp.tell()
outp.seek(total_verts_offset)
w("i", total_verts)
w("i", total_faces)
w("i", 0) # small faces
w("i", total_verts)
outp.seek(tmp_offset)
LOG.debug("offset obj list: {}".format(outp.tell()))
outp.write(b'\x00' * calc_alignment_diff(outp.tell(), 16))
LOG.debug("offset verts: {}".format(outp.tell()))
outp.write(verts_out.getbuffer())
if target_game != 'THPS4':
LOG.debug("offset intensities: {}".format(outp.tell()))
# intensity
outp.write(intensities_out.getbuffer())
alignment_diff = calc_alignment_diff(outp.tell(), 4)
if alignment_diff != 0:
LOG.debug("A: ".format(alignment_diff))
outp.write(b'\x00' * alignment_diff)
# outp.write(b'\x00' * calc_alignment_diff(SIZEOF_FLOAT_VERT * total_verts + total_verts), 4)
LOG.debug("offset faces: {}".format(outp.tell()))
outp.write(faces_out.getbuffer())
if target_game == "THUG2":
# alignment_diff = calc_alignment_diff(total_verts, 4)
alignment_diff = calc_alignment_diff(outp.tell(), 2)
if alignment_diff != 0:
LOG.debug("B: {}".format(alignment_diff))
outp.write(b'\x00' * alignment_diff)
else:
# LOG.debug("B TH1!")
if total_faces & 1:
outp.write(b'\x00' * 2)
if target_game == "THUG2":
LOG.debug("offset thug2 thing: {}".format(outp.tell()))
outp.write(thug2_thing_out.getbuffer())
alignment_diff = calc_alignment_diff(outp.tell(), 4)
if alignment_diff != 0:
LOG.debug("C: {}".format(alignment_diff))
outp.write(b'\x00' * alignment_diff)
LOG.debug("offset nodes: {}".format(outp.tell()))
w("I", bsp_nodes_size)
outp.write(nodes_out.getbuffer())
for face in node_faces:
w("H", face)
bm.free()
#----------------------------------------------------------------------------------
def calc_alignment_diff(offset, alignment):
assert offset >= 0 and alignment >= 0
if offset % alignment == 0:
return 0
return alignment - (offset % alignment)
#----------------------------------------------------------------------------------
#- Runs the 'Quick export', validating the settings first
#----------------------------------------------------------------------------------
def maybe_export_scene(operator, scene):
def scene_settings_are_valid(level_props):
return (level_props.scene_name != '' and level_props.export_props.target_game != '' and \
level_props.export_props.directory != '' and level_props.export_props.scene_type != '' )
if not hasattr(scene, 'thug_level_props') or not hasattr(scene.thug_level_props, 'export_props'):
operator.report({'ERROR'}, "Unable to run quick export - scene settings were not found!")
#raise Exception('Unable to run quick export - scene settings were not found!')
return False
if not scene_settings_are_valid(scene.thug_level_props):
operator.report({'ERROR'}, "Invalid scene settings. Enter a scene name and select the game/export dir/export type first!")
#raise Exception('Unable to run quick export - scene settings are not valid. Make sure you enter a scene name and select the game/export dir/export type first!')
return False
scene.thug_level_props.export_props.filename = scene.thug_level_props.scene_name
scene.thug_level_props.export_props.directory = bpy.path.abspath(scene.thug_level_props.export_props.directory)
if scene.thug_level_props.export_props.scene_type == 'Level':
do_export(scene.thug_level_props.export_props, bpy.context, scene.thug_level_props.export_props.target_game)
else:
do_export_model(scene.thug_level_props.export_props, bpy.context, scene.thug_level_props.export_props.target_game)
return True
# OPERATORS
#############################################
class SceneToTHPS4Files(bpy.types.Operator): #, ExportHelper):
bl_idname = "export.scene_to_th4_xbx"
bl_label = "Scene to THPS4 level files"
# bl_options = {'REGISTER', 'UNDO'}
def report(self, category, message):
LOG.debug("OP: {}: {}".format(category, message))
super().report(category, message)
filename = StringProperty(name="File Name")
directory = StringProperty(name="Directory")
always_export_normals = BoolProperty(name="Export normals", default=False)
use_vc_hack = BoolProperty(name="Vertex color hack",
description = "Doubles intensity of vertex colours. Enable if working with an imported scene that appears too dark in game."
, default=False)
speed_hack = BoolProperty(name="No modifiers (speed hack)",
description = "Don't apply any modifiers to objects. Much faster with large scenes, but all mesh must be triangles prior to export.", default=False)
# AUTOSPLIT SETTINGS
autosplit_everything = BoolProperty(name="Autosplit All",
description = "Applies the autosplit setting to all objects in the scene, with default settings.", default=False)
autosplit_faces_per_subobject = IntProperty(name="Faces Per Subobject",
description="The max amount of faces for every created subobject.",
default=800, min=50, max=6000)
autosplit_max_radius = FloatProperty(name="Max Radius",
description="The max radius of for every created subobject.",
default=2000, min=100, max=5000)
# /AUTOSPLIT SETTINGS
pack_pre = BoolProperty(name="Pack files into .prx", default=True)
is_park_editor = BoolProperty(name="Is Park Editor",
description="Use this option when exporting a park editor dictionary.", default=False)
generate_tex_file = BoolProperty(name="Generate a .tex file", default=True)
generate_scn_file = BoolProperty(name="Generate a .scn file", default=True)
generate_sky = BoolProperty(name="Generate skybox", default=True,description="Check to export a skybox with this scene.")
generate_col_file = BoolProperty(name="Generate a .col file", default=True)
generate_scripts_files = BoolProperty(name="Generate scripts", default=True)
# filepath = StringProperty()
skybox_name = StringProperty(name="Skybox name", default="THUG_Sky")
export_scale = FloatProperty(name="Export scale", default=1)
mipmap_offset = IntProperty(
name="Mipmap offset",
description="Offsets generation of mipmaps (default is 0). For example, setting this to 1 will make the base texture 1/4 the size. Use when working with very large textures.",
min=0, max=4, default=0)
only_offset_lightmap = BoolProperty(name="Only Lightmaps", default=False, description="Mipmap offset only applies to lightmap textures.")
def execute(self, context):
return do_export(self, context, "THPS4")
def invoke(self, context, event):
wm = bpy.context.window_manager
wm.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
self.layout.row().prop(self, "generate_sky", toggle=True, icon='MAT_SPHERE_SKY')
if self.generate_sky:
box = self.layout.box().column(True)
box.row().prop(self, "skybox_name")
self.layout.row().prop(self, "always_export_normals", toggle=True, icon='SNAP_NORMAL')
self.layout.row().prop(self, "use_vc_hack", toggle=True, icon='COLOR')
self.layout.row().prop(self, "speed_hack", toggle=True, icon='FF')
self.layout.row().prop(self, "autosplit_everything", toggle=True, icon='MOD_EDGESPLIT')
if self.autosplit_everything:
box = self.layout.box().column(True)
box.row().prop(self, "autosplit_faces_per_subobject")
box.row().prop(self, "autosplit_max_radius")
self.layout.row().prop(self, "pack_pre", toggle=True, icon='PACKAGE')
self.layout.row().prop(self, "is_park_editor", toggle=True, icon='PACKAGE')
self.layout.row().prop(self, "generate_tex_file", toggle=True, icon='TEXTURE_DATA')
self.layout.row().prop(self, "generate_scn_file", toggle=True, icon='SCENE_DATA')
self.layout.row().prop(self, "generate_col_file", toggle=True, icon='OBJECT_DATA')
self.layout.row().prop(self, "generate_scripts_files", toggle=True, icon='FILE_SCRIPT')
self.layout.row().prop(self, "export_scale")
box = self.layout.box().column(True)
box.row().prop(self, "mipmap_offset")
box.row().prop(self, "only_offset_lightmap")
#----------------------------------------------------------------------------------
class SceneToTHPS4Model(bpy.types.Operator): #, ExportHelper):
bl_idname = "export.scene_to_th4_model"
bl_label = "Scene to THPS4 model"
# bl_options = {'REGISTER', 'UNDO'}
def report(self, category, message):
LOG.debug("OP: {}: {}".format(category, message))
super().report(category, message)
filename = StringProperty(name="File Name")
directory = StringProperty(name="Directory")
always_export_normals = BoolProperty(name="Export normals", default=False)
is_park_editor = BoolProperty(name="Is Park Editor", default=False, options={'HIDDEN'})
use_vc_hack = BoolProperty(name="Vertex color hack",
description = "Doubles intensity of vertex colours. Enable if working with an imported scene that appears too dark in game."
, default=False)
speed_hack = BoolProperty(name="No modifiers (speed hack)",
description = "Don't apply any modifiers to objects. Much faster with large scenes, but all mesh must be triangles prior to export.", default=False)
# AUTOSPLIT SETTINGS
autosplit_everything = BoolProperty(name="Autosplit All"
, description = "Applies the autosplit setting to all objects in the scene, with default settings."
, default=False)
autosplit_faces_per_subobject = IntProperty(name="Faces Per Subobject",
description="The max amount of faces for every created subobject.",
default=800, min=50, max=6000)
autosplit_max_radius = FloatProperty(name="Max Radius",
description="The max radius of for every created subobject.",
default=2000, min=100, max=5000)
# /AUTOSPLIT SETTINGS
model_type = EnumProperty(items = (
("skin", ".skin", "Character skin, used for playable characters and pedestrians."),
("mdl", ".mdl", "Model used for vehicles and other static mesh."),
), name="Model Type", default="skin")
generate_scripts_files = BoolProperty(
name="Generate scripts",
default=True)
export_scale = FloatProperty(name="Export scale", default=1)
mipmap_offset = IntProperty(
name="Mipmap offset",
description="Offsets generation of mipmaps (default is 0). For example, setting this to 1 will make the base texture 1/4 the size. Use when working with very large textures.",
min=0, max=4, default=0)
only_offset_lightmap = BoolProperty(name="Only Lightmaps", default=False, description="Mipmap offset only applies to lightmap textures.")
def execute(self, context):
return do_export_model(self, context, "THPS4")
def invoke(self, context, event):
wm = bpy.context.window_manager
wm.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
self.layout.row().prop(self, "always_export_normals", toggle=True, icon='SNAP_NORMAL')
self.layout.row().prop(self, "use_vc_hack", toggle=True, icon='COLOR')
self.layout.row().prop(self, "speed_hack", toggle=True, icon='FF')
self.layout.row().prop(self, "autosplit_everything", toggle=True, icon='MOD_EDGESPLIT')
if self.autosplit_everything:
box = self.layout.box().column(True)
box.row().prop(self, "autosplit_faces_per_subobject")
box.row().prop(self, "autosplit_max_radius")
self.layout.row().prop(self, "model_type", expand=True)
self.layout.row().prop(self, "generate_scripts_files", toggle=True, icon='FILE_SCRIPT')
self.layout.row().prop(self, "export_scale")
box = self.layout.box().column(True)
box.row().prop(self, "mipmap_offset")
box.row().prop(self, "only_offset_lightmap")
#----------------------------------------------------------------------------------
class SceneToTHUGFiles(bpy.types.Operator): #, ExportHelper):
bl_idname = "export.scene_to_thug_xbx"
bl_label = "Scene to THUG1 level files"
# bl_options = {'REGISTER', 'UNDO'}
def report(self, category, message):
LOG.debug("OP: {}: {}".format(category, message))
super().report(category, message)
filename = StringProperty(name="File Name")
directory = StringProperty(name="Directory")
always_export_normals = BoolProperty(name="Export normals", default=False)
use_vc_hack = BoolProperty(name="Vertex color hack",
description = "Doubles intensity of vertex colours. Enable if working with an imported scene that appears too dark in game."
, default=False)
speed_hack = BoolProperty(name="No modifiers (speed hack)",
description = "Don't apply any modifiers to objects. Much faster with large scenes, but all mesh must be triangles prior to export.", default=False)
# AUTOSPLIT SETTINGS
autosplit_everything = BoolProperty(name="Autosplit All",
description = "Applies the autosplit setting to all objects in the scene, with default settings.", default=False)
autosplit_faces_per_subobject = IntProperty(name="Faces Per Subobject",
description="The max amount of faces for every created subobject.",
default=800, min=50, max=6000)
autosplit_max_radius = FloatProperty(name="Max Radius",
description="The max radius of for every created subobject.",
default=2000, min=100, max=5000)
# /AUTOSPLIT SETTINGS
pack_pre = BoolProperty(name="Pack files into .prx", default=True)
is_park_editor = BoolProperty(name="Is Park Editor",
description="Use this option when exporting a park editor dictionary.", default=False)
generate_tex_file = BoolProperty(name="Generate a .tex file", default=True)
generate_scn_file = BoolProperty(name="Generate a .scn file", default=True)
generate_sky = BoolProperty(name="Generate skybox", default=True,description="Check to export a skybox with this scene.")
generate_col_file = BoolProperty(name="Generate a .col file", default=True)
generate_scripts_files = BoolProperty(name="Generate scripts", default=True)
# filepath = StringProperty()
skybox_name = StringProperty(name="Skybox name", default="THUG_Sky")
export_scale = FloatProperty(name="Export scale", default=1)
mipmap_offset = IntProperty(
name="Mipmap offset",
description="Offsets generation of mipmaps (default is 0). For example, setting this to 1 will make the base texture 1/4 the size. Use when working with very large textures.",
min=0, max=4, default=0)
only_offset_lightmap = BoolProperty(name="Only Lightmaps", default=False, description="Mipmap offset only applies to lightmap textures.")
def execute(self, context):
return do_export(self, context, "THUG1")
def invoke(self, context, event):
wm = bpy.context.window_manager
wm.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
self.layout.row().prop(self, "generate_sky", toggle=True, icon='MAT_SPHERE_SKY')
if self.generate_sky:
box = self.layout.box().column(True)
box.row().prop(self, "skybox_name")
self.layout.row().prop(self, "always_export_normals", toggle=True, icon='SNAP_NORMAL')
self.layout.row().prop(self, "use_vc_hack", toggle=True, icon='COLOR')
self.layout.row().prop(self, "speed_hack", toggle=True, icon='FF')
self.layout.row().prop(self, "autosplit_everything", toggle=True, icon='MOD_EDGESPLIT')
if self.autosplit_everything:
box = self.layout.box().column(True)
box.row().prop(self, "autosplit_faces_per_subobject")
box.row().prop(self, "autosplit_max_radius")
self.layout.row().prop(self, "pack_pre", toggle=True, icon='PACKAGE')
self.layout.row().prop(self, "is_park_editor", toggle=True, icon='PACKAGE')
self.layout.row().prop(self, "generate_tex_file", toggle=True, icon='TEXTURE_DATA')
self.layout.row().prop(self, "generate_scn_file", toggle=True, icon='SCENE_DATA')
self.layout.row().prop(self, "generate_col_file", toggle=True, icon='OBJECT_DATA')
self.layout.row().prop(self, "generate_scripts_files", toggle=True, icon='FILE_SCRIPT')
self.layout.row().prop(self, "export_scale")
box = self.layout.box().column(True)
box.row().prop(self, "mipmap_offset")
box.row().prop(self, "only_offset_lightmap")
#----------------------------------------------------------------------------------
class SceneToTHUGModel(bpy.types.Operator): #, ExportHelper):
bl_idname = "export.scene_to_thug_model"
bl_label = "Scene to THUG1 model"
# bl_options = {'REGISTER', 'UNDO'}
def report(self, category, message):
LOG.debug("OP: {}: {}".format(category, message))
super().report(category, message)
filename = StringProperty(name="File Name")
directory = StringProperty(name="Directory")
always_export_normals = BoolProperty(name="Export normals", default=False)
is_park_editor = BoolProperty(name="Is Park Editor", default=False, options={'HIDDEN'})
use_vc_hack = BoolProperty(name="Vertex color hack",
description = "Doubles intensity of vertex colours. Enable if working with an imported scene that appears too dark in game."
, default=False)
speed_hack = BoolProperty(name="No modifiers (speed hack)",
description = "Don't apply any modifiers to objects. Much faster with large scenes, but all mesh must be triangles prior to export.", default=False)
# AUTOSPLIT SETTINGS
autosplit_everything = BoolProperty(name="Autosplit All"
, description = "Applies the autosplit setting to all objects in the scene, with default settings."
, default=False)
autosplit_faces_per_subobject = IntProperty(name="Faces Per Subobject",
description="The max amount of faces for every created subobject.",
default=800, min=50, max=6000)
autosplit_max_radius = FloatProperty(name="Max Radius",
description="The max radius of for every created subobject.",
default=2000, min=100, max=5000)
# /AUTOSPLIT SETTINGS
model_type = EnumProperty(items = (
("skin", ".skin", "Character skin, used for playable characters and pedestrians."),
("mdl", ".mdl", "Model used for vehicles and other static mesh."),
), name="Model Type", default="skin")
generate_scripts_files = BoolProperty(
name="Generate scripts",
default=True)
export_scale = FloatProperty(name="Export scale", default=1)
mipmap_offset = IntProperty(
name="Mipmap offset",
description="Offsets generation of mipmaps (default is 0). For example, setting this to 1 will make the base texture 1/4 the size. Use when working with very large textures.",
min=0, max=4, default=0)
only_offset_lightmap = BoolProperty(name="Only Lightmaps", default=False, description="Mipmap offset only applies to lightmap textures.")
def execute(self, context):
return do_export_model(self, context, "THUG1")
def invoke(self, context, event):
wm = bpy.context.window_manager
wm.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
self.layout.row().prop(self, "always_export_normals", toggle=True, icon='SNAP_NORMAL')
self.layout.row().prop(self, "use_vc_hack", toggle=True, icon='COLOR')
self.layout.row().prop(self, "speed_hack", toggle=True, icon='FF')
self.layout.row().prop(self, "autosplit_everything", toggle=True, icon='MOD_EDGESPLIT')
if self.autosplit_everything:
box = self.layout.box().column(True)
box.row().prop(self, "autosplit_faces_per_subobject")
box.row().prop(self, "autosplit_max_radius")
self.layout.row().prop(self, "model_type", expand=True)
self.layout.row().prop(self, "generate_scripts_files", toggle=True, icon='FILE_SCRIPT')
self.layout.row().prop(self, "export_scale")
box = self.layout.box().column(True)
box.row().prop(self, "mipmap_offset")
box.row().prop(self, "only_offset_lightmap")
# OPERATORS
#############################################
class SceneToTHUG2Files(bpy.types.Operator): #, ExportHelper):
bl_idname = "export.scene_to_thug2_xbx"
bl_label = "Scene to THUG2/PRO level files"
def report(self, category, message):
LOG.debug("OP: {}: {}".format(category, message))
super().report(category, message)
filename = StringProperty(name="File Name")
directory = StringProperty(name="Directory")
always_export_normals = BoolProperty(name="Export normals", default=False)
use_vc_hack = BoolProperty(name="Vertex color hack",
description = "Doubles intensity of vertex colours. Enable if working with an imported scene that appears too dark in game."
, default=False)
speed_hack = BoolProperty(name="No modifiers (speed hack)",
description = "Don't apply any modifiers to objects. Much faster with large scenes, but all mesh must be triangles prior to export.", default=False)
# AUTOSPLIT SETTINGS
autosplit_everything = BoolProperty(name="Autosplit All"
, description = "Applies the autosplit setting to all objects in the scene, with default settings."
, default=False)
autosplit_faces_per_subobject = IntProperty(name="Faces Per Subobject",
description="The max amount of faces for every created subobject.",
default=800, min=50, max=6000)
autosplit_max_radius = FloatProperty(name="Max Radius",
description="The max radius of for every created subobject.",
default=2000, min=100, max=5000)
# /AUTOSPLIT SETTINGS
is_park_editor = BoolProperty(name="Is Park Editor",
description="Use this option when exporting a park editor dictionary.", default=False)
pack_pre = BoolProperty(name="Pack files into .prx", default=True)
generate_tex_file = BoolProperty(
name="Generate a .tex file",
description="If you have already generated a .tex file, and didn't change/add any new images in meantime, you can uncheck this.", default=True)
generate_scn_file = BoolProperty(name="Generate a .scn file", default=True)
generate_col_file = BoolProperty(name="Generate a .col file", default=True)
generate_sky = BoolProperty(name="Generate skybox", default=True,description="Check to export a skybox with this scene.")
generate_scripts_files = BoolProperty(name="Generate scripts", default=True)
skybox_name = StringProperty(name="Skybox name", default="THUG2_Sky")
export_scale = FloatProperty(name="Export scale", default=1)
mipmap_offset = IntProperty(name="Mipmap offset",
description="Offsets generation of mipmaps (default is 0). For example, setting this to 1 will make the base texture 1/4 the size. Use when working with very large textures.",
min=0, max=4, default=0)
only_offset_lightmap = BoolProperty(name="Only Lightmaps", default=False, description="Mipmap offset only applies to lightmap textures.")
def execute(self, context):
return do_export(self, context, "THUG2")
def invoke(self, context, event):
wm = bpy.context.window_manager
wm.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
self.layout.row().prop(self, "generate_sky", toggle=True, icon='MAT_SPHERE_SKY')
if self.generate_sky:
box = self.layout.box().column(True)
box.row().prop(self, "skybox_name")
self.layout.row().prop(self, "always_export_normals", toggle=True, icon='SNAP_NORMAL')
self.layout.row().prop(self, "use_vc_hack", toggle=True, icon='COLOR')
self.layout.row().prop(self, "speed_hack", toggle=True, icon='FF')
self.layout.row().prop(self, "autosplit_everything", toggle=True, icon='MOD_EDGESPLIT')
if self.autosplit_everything:
box = self.layout.box().column(True)
box.row().prop(self, "autosplit_faces_per_subobject")
box.row().prop(self, "autosplit_max_radius")
self.layout.row().prop(self, "pack_pre", toggle=True, icon='PACKAGE')
self.layout.row().prop(self, "generate_tex_file", toggle=True, icon='TEXTURE_DATA')
self.layout.row().prop(self, "generate_scn_file", toggle=True, icon='SCENE_DATA')
self.layout.row().prop(self, "generate_col_file", toggle=True, icon='OBJECT_DATA')
self.layout.row().prop(self, "generate_scripts_files", toggle=True, icon='FILE_SCRIPT')
self.layout.row().prop(self, "export_scale")
box = self.layout.box().column(True)
box.row().prop(self, "mipmap_offset")
box.row().prop(self, "only_offset_lightmap")
#----------------------------------------------------------------------------------
class SceneToTHUG2Model(bpy.types.Operator): #, ExportHelper):
bl_idname = "export.scene_to_thug2_model"
bl_label = "Scene to THUG2 model"
# bl_options = {'REGISTER', 'UNDO'}
def report(self, category, message):
LOG.debug("OP: {}: {}".format(category, message))
super().report(category, message)
filename = StringProperty(name="File Name")
directory = StringProperty(name="Directory")
always_export_normals = BoolProperty(name="Export normals", default=False)
use_vc_hack = BoolProperty(name="Vertex color hack",
description = "Doubles intensity of vertex colours. Enable if working with an imported scene that appears too dark in game."
, default=False)
speed_hack = BoolProperty(name="No modifiers (speed hack)",
description = "Don't apply any modifiers to objects. Much faster with large scenes, but all mesh must be triangles prior to export.", default=False)
# AUTOSPLIT SETTINGS
autosplit_everything = BoolProperty(name="Autosplit All",
description = "Applies the autosplit setting to all objects in the scene, with default settings.", default=False)
autosplit_faces_per_subobject = IntProperty(name="Faces Per Subobject",
description="The max amount of faces for every created subobject.",
default=800, min=50, max=6000)
autosplit_max_radius = FloatProperty(name="Max Radius",
description="The max radius of for every created subobject.",
default=2000, min=100, max=5000)
# /AUTOSPLIT SETTINGS
is_park_editor = BoolProperty(name="Is Park Editor", default=False, options={'HIDDEN'})
model_type = EnumProperty(items = (
("skin", ".skin", "Character skin, used for playable characters and pedestrians."),
("mdl", ".mdl", "Model used for vehicles and other static mesh."),
), name="Model Type", default="skin")
generate_scripts_files = BoolProperty(name="Generate scripts", default=True)
export_scale = FloatProperty(name="Export scale", default=1)
mipmap_offset = IntProperty(name="Mipmap offset",
description="Offsets generation of mipmaps (default is 0). For example, setting this to 1 will make the base texture 1/4 the size. Use when working with very large textures.",
min=0, max=4, default=0)
only_offset_lightmap = BoolProperty(name="Only Lightmaps", default=False, description="Mipmap offset only applies to lightmap textures.")
def execute(self, context):
return do_export_model(self, context, "THUG2")
def invoke(self, context, event):
wm = bpy.context.window_manager
wm.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
self.layout.row().prop(self, "always_export_normals", toggle=True, icon='SNAP_NORMAL')
self.layout.row().prop(self, "use_vc_hack", toggle=True, icon='COLOR')
self.layout.row().prop(self, "speed_hack", toggle=True, icon='FF')
self.layout.row().prop(self, "autosplit_everything", toggle=True, icon='MOD_EDGESPLIT')
if self.autosplit_everything:
box = self.layout.box().column(True)
box.row().prop(self, "autosplit_faces_per_subobject")
box.row().prop(self, "autosplit_max_radius")
self.layout.row().prop(self, "model_type", expand=True)
self.layout.row().prop(self, "generate_scripts_files", toggle=True, icon='FILE_SCRIPT')
self.layout.row().prop(self, "export_scale")
box = self.layout.box().column(True)
box.row().prop(self, "mipmap_offset")
box.row().prop(self, "only_offset_lightmap")
class THUGQuickExport(bpy.types.Operator):
bl_idname = "export.thug_quick_export"
bl_label = "Quick Export"
def execute(self, context):
if maybe_export_scene(self, context.scene):
self.report({'INFO'}, "Quick export successfully completed!")
return {'FINISHED'}
# PANELS
#############################################
#----------------------------------------------------------------------------------
class THUGExportTools(bpy.types.Panel):
bl_label = "TH Export Tools"
bl_region_type = "TOOLS"
bl_space_type = "VIEW_3D"
bl_category = "THUG Tools"
@classmethod
def poll(cls, context):
return context.user_preferences.addons[ADDON_NAME].preferences.object_settings_tools
def draw(self, context):
if not context.scene: return
scene = context.scene
box = self.layout.box().column(True)
box.row().operator(THUGQuickExport.bl_idname, text=THUGQuickExport.bl_label, icon='PACKAGE')
| [
"[email protected]"
] | |
6b17dc10db7ef000a03c12afdf0d7cd7b9821e29 | 4904acd900496b4883c2f5b4aa6b45d1ef6654c0 | /graphgallery/datasets/tu_dataset.py | a2979a79643bdb0f96d5d8b81ba2af4af7188b33 | [
"MIT"
] | permissive | blindSpoter01/GraphGallery | aee039edd759be9272d123463b0ad73a57e561c7 | e41caeb32a07da95364f15b85cad527a67763255 | refs/heads/master | 2023-06-17T11:42:27.169751 | 2021-07-15T03:07:39 | 2021-07-15T03:07:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,887 | py | import os
import glob
import requests
import os.path as osp
import numpy as np
import pickle as pkl
import pandas as pd
from urllib.error import URLError
from typing import Optional, List
from .in_memory_dataset import InMemoryDataset
from ..data.edge_graph import EdgeGraph
from ..data.io import makedirs, extractall, remove
_DATASET_URL = 'https://ls11-www.cs.tu-dortmund.de/people/morris/graphkerneldatasets'
_DATASET_CLEAN_URL = 'https://raw.githubusercontent.com/nd7141/graph_datasets/master/datasets'
class TUDataset(InMemoryDataset):
r"""A variety of graph kernel benchmark datasets, *.e.g.* "IMDB-BINARY",
"REDDIT-BINARY" or "PROTEINS", collected from the `TU Dortmund University
<https://chrsmrrs.github.io/datasets>`_.
In addition, this dataset wrapper provides `cleaned dataset versions
<https://github.com/nd7141/graph_datasets>`_ as motivated by the
`"Understanding Isomorphism Bias in Graph Data Sets"
<https://arxiv.org/abs/1910.12091>`_ paper, containing only non-isomorphic
graphs.
"""
def __init__(self,
name,
root=None,
*,
transform=None,
verbose=True,
url=None,
remove_download=True):
if name.endswith('_clean'):
name = name[:-6]
self._url = _DATASET_CLEAN_URL
else:
self._url = _DATASET_URL
super().__init__(name=name, root=root,
transform=transform,
verbose=verbose, url=url,
remove_download=remove_download)
@staticmethod
def available_datasets():
try:
return [
d[:-4] for d in pd.read_html(_DATASET_URL)
[0].Name[2:-1].values.tolist()
]
except URLError:
# No internet, don't panic
print('No connection. See {}'.format(_DATASET_URL))
return []
def _download(self):
req = requests.get(self.url)
if req.status_code == 404:
raise ValueError(
f"Unknown dataset {self.name}. See '{self.__class__.__name__}.available_datasets()'"
" for a list of available datasets.")
makedirs(self.download_dir)
with open(self.download_paths[0], 'wb') as f:
f.write(req.content)
extractall(self.download_paths, osp.split(self.download_dir)[0])
if self.remove_download:
remove(self.download_paths)
def _process(self):
folder = self.download_dir
prefix = self.name
files = glob.glob(osp.join(folder, f'{prefix}_*.txt'))
names = [f.split(os.sep)[-1][len(prefix) + 1:-4] for f in files]
edge_index = genfromtxt(osp.join(folder, prefix + '_A.txt'),
dtype=np.int64).T - 1
node_graph_label = genfromtxt(osp.join(folder, prefix + '_graph_indicator.txt'),
dtype=np.int64) - 1
edge_graph_label = node_graph_label[edge_index[0]]
node_attr = node_label = None
if 'node_attributes' in names:
node_attr = genfromtxt(osp.join(folder,
prefix + '_node_attributes.txt'),
dtype=np.float32)
if 'node_labels' in names:
node_label = genfromtxt(osp.join(folder,
prefix + '_node_labels.txt'),
dtype=np.int64)
node_label = node_label - node_label.min(0)
edge_attr = edge_label = None
if 'edge_attributes' in names:
edge_attr = genfromtxt(osp.join(folder,
prefix + '_edge_attributes.txt'),
dtype=np.float32)
if 'edge_labels' in names:
edge_label = genfromtxt(osp.join(folder,
prefix + '_edge_labels.txt'),
dtype=np.int64)
edge_label = edge_label - edge_label.min(0)
graph_attr = graph_label = None
if 'graph_attributes' in names: # Regression problem.
graph_attr = np.genfromtxt(osp.join(
folder, prefix + '_graph_attributes.txt'),
dtype=np.float32)
if 'graph_labels' in names: # Classification problem.
graph_label = np.genfromtxt(osp.join(folder,
prefix + '_graph_labels.txt'),
dtype=np.int64)
_, graph_label = np.unique(graph_label, return_inverse=True)
graph = EdgeGraph(edge_index,
edge_attr=edge_attr,
edge_label=edge_label,
edge_graph_label=edge_graph_label,
node_attr=node_attr,
node_label=node_label,
node_graph_label=node_graph_label,
graph_attr=graph_attr,
graph_label=graph_label)
cache = {'graph': graph}
with open(self.process_path, 'wb') as f:
pkl.dump(cache, f)
return cache
@property
def download_dir(self):
return osp.join(self.root, "TU", self.name)
def split_graphs(self,
train_size=None,
val_size=None,
test_size=None,
split_by=None,
random_state: Optional[int] = None):
raise NotImplementedError
@property
def url(self) -> str:
return '{}/{}.zip'.format(self._url, self.name)
@property
def process_filename(self):
return f'{self.name}.pkl'
@property
def raw_filenames(self) -> List[str]:
names = ['A', 'graph_indicator'] # and more
return ['{}_{}.txt'.format(self.name, name) for name in names]
@property
def download_paths(self):
return [osp.join(self.download_dir, self.name + '.zip')]
@property
def raw_paths(self) -> List[str]:
return [
osp.join(self.download_dir, raw_filename)
for raw_filename in self.raw_filenames
]
def genfromtxt(path, sep=',', start=0, end=None, dtype=None, device=None):
# with open(path, 'r') as f:
# src = f.read().split('\n')[:-1]
# src = [[float(x) for x in line.split(sep)[start:end]] for line in src]
# src = np.asarray(src, dtype=dtype).squeeze()
# # return src
return np.loadtxt(path, delimiter=sep).astype(dtype).squeeze()
| [
"[email protected]"
] | |
c4acc48e7f4000ebbf4268909ad39fdf1dab8ec8 | ae11eda73ad0a61f8f7f894314bd9aa40798b50a | /MyAnalysis/IsolationTools/python/muonDirectionalPFIsolations_cff.py | 8d48cf05750f63c872425cdbbf934b676a67f71b | [] | no_license | hbakhshi/NTupleProducer | 087a7286f7352e9f6c517d257d7f195280db058d | eec377339008d2139128059d7127f9a2184c080c | refs/heads/master | 2021-01-22T14:32:44.891691 | 2014-06-10T12:48:12 | 2014-06-10T12:48:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,644 | py | import FWCore.ParameterSet.Config as cms
import MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi
### DR=0.1 cone
# Charged Hadron isolation
muonDirPFIsoChHad01 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoChHad01.pfTypes = cms.untracked.vint32(1)
muonDirPFIsoChHad01.deltaR = 0.1
muonDirPFIsoChHad01.directional = True
# Neutral Hadron isolation
muonDirPFIsoNHad01 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoNHad01.pfTypes = cms.untracked.vint32(5)
muonDirPFIsoNHad01.deltaR = 0.1
muonDirPFIsoNHad01.directional = True
# Photon isolation
muonDirPFIsoPhoton01 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoPhoton01.pfTypes = cms.untracked.vint32(4)
muonDirPFIsoPhoton01.deltaR = 0.1
muonDirPFIsoPhoton01.directional = True
### DR=0.2 cone
# Charged Hadron isolation
muonDirPFIsoChHad02 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoChHad02.pfTypes = cms.untracked.vint32(1)
muonDirPFIsoChHad02.deltaR = 0.2
muonDirPFIsoChHad02.directional = True
# Neutral Hadron isolation
muonDirPFIsoNHad02 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoNHad02.pfTypes = cms.untracked.vint32(5)
muonDirPFIsoNHad02.deltaR = 0.2
muonDirPFIsoNHad02.directional = True
# Photon isolation
muonDirPFIsoPhoton02 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoPhoton02.pfTypes = cms.untracked.vint32(4)
muonDirPFIsoPhoton02.deltaR = 0.2
muonDirPFIsoPhoton02.directional = True
### DR=0.3 cone
# Charged Hadron isolation
muonDirPFIsoChHad03 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoChHad03.pfTypes = cms.untracked.vint32(1)
muonDirPFIsoChHad03.deltaR = 0.3
muonDirPFIsoChHad03.directional = True
# Neutral Hadron isolation
muonDirPFIsoNHad03 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoNHad03.pfTypes = cms.untracked.vint32(5)
muonDirPFIsoNHad03.deltaR = 0.3
muonDirPFIsoNHad03.directional = True
# Photon isolation
muonDirPFIsoPhoton03 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoPhoton03.pfTypes = cms.untracked.vint32(4)
muonDirPFIsoPhoton03.deltaR = 0.3
muonDirPFIsoPhoton03.directional = True
### DR=0.4 cone
# Charged Hadron isolation
muonDirPFIsoChHad04 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoChHad04.pfTypes = cms.untracked.vint32(1)
muonDirPFIsoChHad04.deltaR = 0.4
muonDirPFIsoChHad04.directional = True
# Neutral Hadron isolation
muonDirPFIsoNHad04 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoNHad04.pfTypes = cms.untracked.vint32(5)
muonDirPFIsoNHad04.deltaR = 0.4
muonDirPFIsoNHad04.directional = True
# Photon isolation
muonDirPFIsoPhoton04 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoPhoton04.pfTypes = cms.untracked.vint32(4)
muonDirPFIsoPhoton04.deltaR = 0.4
muonDirPFIsoPhoton04.directional = True
### DR=0.5 cone
# Charged Hadron isolation
muonDirPFIsoChHad05 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoChHad05.pfTypes = cms.untracked.vint32(1)
muonDirPFIsoChHad05.deltaR = 0.5
muonDirPFIsoChHad05.directional = True
# Neutral Hadron isolation
muonDirPFIsoNHad05 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoNHad05.pfTypes = cms.untracked.vint32(5)
muonDirPFIsoNHad05.deltaR = 0.5
muonDirPFIsoNHad05.directional = True
# Photon isolation
muonDirPFIsoPhoton05 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoPhoton05.pfTypes = cms.untracked.vint32(4)
muonDirPFIsoPhoton05.deltaR = 0.5
muonDirPFIsoPhoton05.directional = True
### DR=0.6 cone
# Charged Hadron isolation
muonDirPFIsoChHad06 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoChHad06.pfTypes = cms.untracked.vint32(1)
muonDirPFIsoChHad06.deltaR = 0.6
muonDirPFIsoChHad06.directional = True
# Neutral Hadron isolation
muonDirPFIsoNHad06 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoNHad06.pfTypes = cms.untracked.vint32(5)
muonDirPFIsoNHad06.deltaR = 0.6
muonDirPFIsoNHad06.directional = True
# Photon isolation
muonDirPFIsoPhoton06 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoPhoton06.pfTypes = cms.untracked.vint32(4)
muonDirPFIsoPhoton06.deltaR = 0.6
muonDirPFIsoPhoton06.directional = True
### DR=0.7 cone
# Charged Hadron isolation
muonDirPFIsoChHad07 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoChHad07.pfTypes = cms.untracked.vint32(1)
muonDirPFIsoChHad07.deltaR = 0.7
muonDirPFIsoChHad07.directional = True
# Neutral Hadron isolation
muonDirPFIsoNHad07 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoNHad07.pfTypes = cms.untracked.vint32(5)
muonDirPFIsoNHad07.deltaR = 0.7
muonDirPFIsoNHad07.directional = True
# Photon isolation
muonDirPFIsoPhoton07 = MyAnalysis.IsolationTools.muonPFIsoSingleType_cfi.muonPFIsoSingleTypeMapProd.clone()
muonDirPFIsoPhoton07.pfTypes = cms.untracked.vint32(4)
muonDirPFIsoPhoton07.deltaR = 0.7
muonDirPFIsoPhoton07.directional = True
| [
"[email protected]"
] | |
a9625efecd45a7fb3e4a24d22d5c9bdcebcb29c7 | fe3265b72e691c6df8ecd936c25b6d48ac33b59a | /homeassistant/components/homekit/type_triggers.py | b239d67877c7d22f4ee6b162d2d1ac3e503fea4d | [
"Apache-2.0"
] | permissive | bdraco/home-assistant | dcaf76c0967783a08eec30ce704e5e9603a2f0ca | bfa315be51371a1b63e04342a0b275a57ae148bd | refs/heads/dev | 2023-08-16T10:39:15.479821 | 2023-02-21T22:38:50 | 2023-02-21T22:38:50 | 218,684,806 | 13 | 7 | Apache-2.0 | 2023-02-21T23:40:57 | 2019-10-31T04:33:09 | Python | UTF-8 | Python | false | false | 4,485 | py | """Class to hold all sensor accessories."""
from __future__ import annotations
import logging
from typing import Any
from pyhap.const import CATEGORY_SENSOR
from homeassistant.core import CALLBACK_TYPE, Context
from homeassistant.helpers import entity_registry
from homeassistant.helpers.trigger import async_initialize_triggers
from .accessories import TYPES, HomeAccessory
from .aidmanager import get_system_unique_id
from .const import (
CHAR_NAME,
CHAR_PROGRAMMABLE_SWITCH_EVENT,
CHAR_SERVICE_LABEL_INDEX,
CHAR_SERVICE_LABEL_NAMESPACE,
SERV_SERVICE_LABEL,
SERV_STATELESS_PROGRAMMABLE_SWITCH,
)
from .util import cleanup_name_for_homekit
_LOGGER = logging.getLogger(__name__)
@TYPES.register("DeviceTriggerAccessory")
class DeviceTriggerAccessory(HomeAccessory):
"""Generate a Programmable switch."""
def __init__(
self,
*args: Any,
device_triggers: list[dict[str, Any]] | None = None,
device_id: str | None = None,
) -> None:
"""Initialize a Programmable switch accessory object."""
super().__init__(*args, category=CATEGORY_SENSOR, device_id=device_id)
assert device_triggers is not None
self._device_triggers = device_triggers
self._remove_triggers: CALLBACK_TYPE | None = None
self.triggers = []
assert device_triggers is not None
ent_reg = entity_registry.async_get(self.hass)
for idx, trigger in enumerate(device_triggers):
type_: str = trigger["type"]
subtype: str | None = trigger.get("subtype")
unique_id = f'{type_}-{subtype or ""}'
if (entity_id := trigger.get("entity_id")) and (
entry := ent_reg.async_get(entity_id)
):
unique_id += f"-entity_unique_id:{get_system_unique_id(entry)}"
trigger_name_parts = []
if entity_id and (state := self.hass.states.get(entity_id)):
trigger_name_parts.append(state.name)
trigger_name_parts.append(type_.replace("_", " ").title())
if subtype:
trigger_name_parts.append(subtype.replace("_", " ").title())
trigger_name = cleanup_name_for_homekit(" ".join(trigger_name_parts))
serv_stateless_switch = self.add_preload_service(
SERV_STATELESS_PROGRAMMABLE_SWITCH,
[CHAR_NAME, CHAR_SERVICE_LABEL_INDEX],
unique_id=unique_id,
)
self.triggers.append(
serv_stateless_switch.configure_char(
CHAR_PROGRAMMABLE_SWITCH_EVENT,
value=0,
valid_values={"Trigger": 0},
)
)
serv_stateless_switch.configure_char(CHAR_NAME, value=trigger_name)
serv_stateless_switch.configure_char(
CHAR_SERVICE_LABEL_INDEX, value=idx + 1
)
serv_service_label = self.add_preload_service(
SERV_SERVICE_LABEL, unique_id=unique_id
)
serv_service_label.configure_char(CHAR_SERVICE_LABEL_NAMESPACE, value=1)
serv_stateless_switch.add_linked_service(serv_service_label)
async def async_trigger(
self,
run_variables: dict[str, Any],
context: Context | None = None,
skip_condition: bool = False,
) -> None:
"""Trigger button press.
This method is a coroutine.
"""
reason = ""
if "trigger" in run_variables and "description" in run_variables["trigger"]:
reason = f' by {run_variables["trigger"]["description"]}'
_LOGGER.debug("Button triggered%s - %s", reason, run_variables)
idx = int(run_variables["trigger"]["idx"])
self.triggers[idx].set_value(0)
# Attach the trigger using the helper in async run
# and detach it in async stop
async def run(self) -> None:
"""Handle accessory driver started event."""
self._remove_triggers = await async_initialize_triggers(
self.hass,
self._device_triggers,
self.async_trigger,
"homekit",
self.display_name,
_LOGGER.log,
)
async def stop(self) -> None:
"""Handle accessory driver stop event."""
if self._remove_triggers:
self._remove_triggers()
@property
def available(self) -> bool:
"""Return available."""
return True
| [
"[email protected]"
] | |
5f3f39608a38d86ff22999affdb2aa8d25fb22ae | e3eead40e93fdf5186269536edefab4f08e9a5a2 | /LeetCode/75-sort_colors.py | f9126b00d4c74a0e97e76d064217b730e50cc3d7 | [] | no_license | davll/practical-algorithms | bbc930b42363cae00ce39e8a686854c19131d334 | 0e35e4cc87bd41144b8e34302aafe776fec1b356 | refs/heads/master | 2021-08-22T13:12:34.555074 | 2020-03-28T08:56:13 | 2020-03-28T08:56:13 | 147,224,029 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 883 | py | # https://leetcode.com/problems/sort-colors/
def sort_colours_v1(nums):
count = [0] * 3
for x in nums:
count[x] += 1
print("count = " + str(count))
start = 0
for k in range(3):
for i in range(count[k]):
nums[i+start] = k
start += count[k]
def sort_colours_v2(nums):
n = len(nums)
if n < 2:
return
i, tail0, head2 = 0, 0, n-1
while i <= head2:
if nums[i] == 0:
nums[i], nums[tail0] = nums[tail0], 0
tail0 += 1
i += 1
elif nums[i] == 2:
nums[i], nums[head2] = nums[head2], 2
head2 -= 1
else:
i += 1
class Solution:
def sortColors(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
sort_colours_v2(nums)
| [
"[email protected]"
] | |
92901aec9b80ab3e8ae140686ef6e842b467ae45 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2956/60622/299864.py | f11b43e5b8b9f78034e02d9ce7ba11099f2418fb | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | a=input()
b=input()
if a=="2" and b=="ab":
print(675)
else:
print(a)
print(b) | [
"[email protected]"
] | |
b5dc97e2850c52dccd210ba2894bed142eb5c1b9 | dea198896f679e577a3fd0923e3fa4470da4b9cc | /journal/pyfakefs_mutants/AOR_BinOp_mutant_1507054997.py | d29db9215f379ca17584b73b34ae8921cc205ee6 | [] | no_license | naustarg/cbmcmutate | f138ab2b04b4be70d735de90815ac670ae6042ce | a6ee6fd395338bb2dfd6bdffabb2dc484cb303f1 | refs/heads/master | 2020-04-04T08:10:15.913309 | 2018-05-21T18:23:58 | 2018-05-21T18:23:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219,331 | py | # line: 91
'A fake filesystem implementation for unit testing.\n\n:Includes:\n * FakeFile: Provides the appearance of a real file.\n * FakeDirectory: Provides the appearance of a real directory.\n * FakeFilesystem: Provides the appearance of a real directory hierarchy.\n * FakeOsModule: Uses FakeFilesystem to provide a fake os module replacement.\n * FakePathModule: Faked os.path module replacement.\n * FakeFileOpen: Faked file() and open() function replacements.\n\n:Usage:\n\n>>> from pyfakefs import fake_filesystem\n>>> filesystem = fake_filesystem.FakeFilesystem()\n>>> os_module = fake_filesystem.FakeOsModule(filesystem)\n>>> pathname = \'/a/new/dir/new-file\'\n\nCreate a new file object, creating parent directory objects as needed:\n\n>>> os_module.path.exists(pathname)\nFalse\n>>> new_file = filesystem.CreateFile(pathname)\n\nFile objects can\'t be overwritten:\n\n>>> os_module.path.exists(pathname)\nTrue\n>>> try:\n... filesystem.CreateFile(pathname)\n... except IOError as e:\n... assert e.errno == errno.EEXIST, \'unexpected errno: %d\' % e.errno\n... assert e.strerror == \'File already exists in fake filesystem\'\n\nRemove a file object:\n\n>>> filesystem.RemoveObject(pathname)\n>>> os_module.path.exists(pathname)\nFalse\n\nCreate a new file object at the previous path:\n\n>>> beatles_file = filesystem.CreateFile(pathname,\n... contents=\'Dear Prudence\\nWon\\\'t you come out to play?\\n\')\n>>> os_module.path.exists(pathname)\nTrue\n\nUse the FakeFileOpen class to read fake file objects:\n\n>>> file_module = fake_filesystem.FakeFileOpen(filesystem)\n>>> for line in file_module(pathname):\n... print(line.rstrip())\n...\nDear Prudence\nWon\'t you come out to play?\n\nFile objects cannot be treated like directory objects:\n\n>>> os_module.listdir(pathname) #doctest: +NORMALIZE_WHITESPACE\nTraceback (most recent call last):\n File "fake_filesystem.py", line 291, in listdir\n raise OSError(errno.ENOTDIR,\nOSError: [Errno 20] Fake os module: not a directory: \'/a/new/dir/new-file\'\n\nThe FakeOsModule can list fake directory objects:\n\n>>> os_module.listdir(os_module.path.dirname(pathname))\n[\'new-file\']\n\nThe FakeOsModule also supports stat operations:\n\n>>> import stat\n>>> stat.S_ISREG(os_module.stat(pathname).st_mode)\nTrue\n>>> stat.S_ISDIR(os_module.stat(os_module.path.dirname(pathname)).st_mode)\nTrue\n'
# line: 92
import codecs
# line: 93
import errno
# line: 94
import heapq
# line: 95
import io
# line: 96
import locale
# line: 97
import platform
# line: 98
import os
# line: 99
import sys
# line: 100
import time
# line: 101
import warnings
# line: 103
from collections import namedtuple
# line: 105
import stat
# line: 106
from copy import copy
# line: 108
__pychecker__ = 'no-reimportself'
# line: 110
__version__ = '3.3'
# line: 112
PERM_READ = 256
# line: 113
PERM_WRITE = 128
# line: 114
PERM_EXE = 64
# line: 115
PERM_DEF = 511
# line: 116
PERM_DEF_FILE = 438
# line: 117
PERM_ALL = 4095
# line: 119
_OpenModes = namedtuple('open_modes', 'must_exist can_read can_write truncate append must_not_exist')
# line: 125
_OPEN_MODE_MAP = {'r': (True, True, False, False, False, False), 'w': (False, False, True, True, False, False), 'a': (False, False, True, False, True, False), 'r+': (True, True, True, False, False, False), 'w+': (False, True, True, True, False, False), 'a+': (False, True, True, False, True, False), }
# line: 136
if ((sys.version_info[0] < 3) and (sys.platform != 'win32')):
# line: 137
_OPEN_MODE_MAP['rw'] = (True, True, True, False, False, False)
# line: 139
if (sys.version_info >= (3, 3)):
# line: 140
_OPEN_MODE_MAP['x'] = (False, False, True, False, False, True)
# line: 141
_OPEN_MODE_MAP['x+'] = (False, True, True, False, False, True)
# line: 143
if sys.platform.startswith('linux'):
# line: 146
_MAX_LINK_DEPTH = 40
else:
# line: 149
_MAX_LINK_DEPTH = 32
# line: 151
FAKE_PATH_MODULE_DEPRECATION = 'Do not instantiate a FakePathModule directly; let FakeOsModule instantiate it. See the FakeOsModule docstring for details.'
# line: 155
if (sys.platform == 'win32'):
# line: 157
OSError = WindowsError
# line: 160
class FakeLargeFileIoException(Exception):
# line: 163
'Exception thrown on unsupported operations for fake large files.\n Fake large files have a size with no real content.\n '
# line: 165
def __init__(self, file_path):
# line: 166
super(FakeLargeFileIoException, self).__init__(('Read and write operations not supported for fake large file: %s' % file_path))
# line: 171
def CopyModule(old):
# line: 172
'Recompiles and creates new module object.'
# line: 173
saved = sys.modules.pop(old.__name__, None)
# line: 174
new = __import__(old.__name__)
# line: 175
sys.modules[old.__name__] = saved
# line: 176
return new
# line: 179
class _FakeStatResult(object):
# line: 183
'Mimics os.stat_result for use as return type of `stat()` and similar.\n This is needed as `os.stat_result` has no possibility to set\n nanosecond times directly.\n '
# line: 184
long_type = (long if (sys.version_info < (3,)) else int)
# line: 186
def __init__(self, initial_time=None):
# line: 187
self.use_float = FakeOsModule.stat_float_times
# line: 188
self.st_mode = None
# line: 189
self.st_ino = None
# line: 190
self.st_dev = None
# line: 191
self.st_nlink = 0
# line: 192
self.st_uid = None
# line: 193
self.st_gid = None
# line: 194
self.st_size = None
# line: 195
if (initial_time is not None):
# line: 196
self._st_atime_ns = self.long_type((initial_time * 1000000000.0))
else:
# line: 198
self._st_atime_ns = None
# line: 199
self._st_mtime_ns = self._st_atime_ns
# line: 200
self._st_ctime_ns = self._st_atime_ns
# line: 202
def __eq__(self, other):
# line: 203
return (isinstance(other, _FakeStatResult) and (self._st_atime_ns == other._st_atime_ns) and (self._st_ctime_ns == other._st_ctime_ns) and (self._st_mtime_ns == other._st_mtime_ns) and (self.st_size == other.st_size) and (self.st_gid == other.st_gid) and (self.st_uid == other.st_uid) and (self.st_nlink == other.st_nlink) and (self.st_dev == other.st_dev) and (self.st_ino == other.st_ino) and (self.st_mode == other.st_mode))
# line: 217
def __ne__(self, other):
# line: 218
return (not (self == other))
# line: 220
def copy(self):
# line: 223
'Return a copy where the float usage is hard-coded to mimic the behavior\n of the real os.stat_result.\n '
# line: 224
use_float = self.use_float()
# line: 225
stat_result = copy(self)
# line: 226
stat_result.use_float = (lambda : use_float)
# line: 227
return stat_result
# line: 229
def set_from_stat_result(self, stat_result):
# line: 233
'Set values from a real os.stat_result.\n Note: values that are controlled by the fake filesystem are not set.\n This includes st_ino, st_dev and st_nlink.\n '
# line: 234
self.st_mode = stat_result.st_mode
# line: 235
self.st_uid = stat_result.st_uid
# line: 236
self.st_gid = stat_result.st_gid
# line: 237
self.st_size = stat_result.st_size
# line: 238
if (sys.version_info < (3, 3)):
# line: 239
self._st_atime_ns = self.long_type((stat_result.st_atime * 1000000000.0))
# line: 240
self._st_mtime_ns = self.long_type((stat_result.st_mtime * 1000000000.0))
# line: 241
self._st_ctime_ns = self.long_type((stat_result.st_ctime * 1000000000.0))
else:
# line: 243
self._st_atime_ns = stat_result.st_atime_ns
# line: 244
self._st_mtime_ns = stat_result.st_mtime_ns
# line: 245
self._st_ctime_ns = stat_result.st_ctime_ns
# line: 247
@property
# line: 247
def st_ctime(self):
# line: 249
'Return the creation time in seconds.'
# line: 250
ctime = (self._st_ctime_ns / 1000000000.0)
# line: 251
return (ctime if self.use_float() else int(ctime))
# line: 253
@property
# line: 253
def st_atime(self):
# line: 255
'Return the access time in seconds.'
# line: 256
atime = (self._st_atime_ns / 1000000000.0)
# line: 257
return (atime if self.use_float() else int(atime))
# line: 259
@property
# line: 259
def st_mtime(self):
# line: 261
'Return the modification time in seconds.'
# line: 262
mtime = (self._st_mtime_ns / 1000000000.0)
# line: 263
return (mtime if self.use_float() else int(mtime))
# line: 265
@st_ctime.setter
# line: 265
def st_ctime(self, val):
# line: 267
'Set the creation time in seconds.'
# line: 268
self._st_ctime_ns = self.long_type((val * 1000000000.0))
# line: 270
@st_atime.setter
# line: 270
def st_atime(self, val):
# line: 272
'Set the access time in seconds.'
# line: 273
self._st_atime_ns = self.long_type((val * 1000000000.0))
# line: 275
@st_mtime.setter
# line: 275
def st_mtime(self, val):
# line: 277
'Set the modification time in seconds.'
# line: 278
self._st_mtime_ns = self.long_type((val * 1000000000.0))
# line: 280
def __getitem__(self, item):
# line: 281
'Implement item access to mimic `os.stat_result` behavior.'
# line: 282
if (item == stat.ST_MODE):
# line: 283
return self.st_mode
# line: 284
if (item == stat.ST_INO):
# line: 285
return self.st_ino
# line: 286
if (item == stat.ST_DEV):
# line: 287
return self.st_dev
# line: 288
if (item == stat.ST_NLINK):
# line: 289
return self.st_nlink
# line: 290
if (item == stat.ST_UID):
# line: 291
return self.st_uid
# line: 292
if (item == stat.ST_GID):
# line: 293
return self.st_gid
# line: 294
if (item == stat.ST_SIZE):
# line: 295
return self.st_size
# line: 296
if (item == stat.ST_ATIME):
# line: 298
return int(self.st_atime)
# line: 299
if (item == stat.ST_MTIME):
# line: 300
return int(self.st_mtime)
# line: 301
if (item == stat.ST_CTIME):
# line: 302
return int(self.st_ctime)
# line: 304
if (sys.version_info >= (3, 3)):
# line: 306
@property
# line: 306
def st_atime_ns(self):
# line: 308
'Return the access time in nanoseconds.'
# line: 309
return self._st_atime_ns
# line: 311
@property
# line: 311
def st_mtime_ns(self):
# line: 313
'Return the modification time in nanoseconds.'
# line: 314
return self._st_mtime_ns
# line: 316
@property
# line: 316
def st_ctime_ns(self):
# line: 318
'Return the creation time in nanoseconds.'
# line: 319
return self._st_ctime_ns
# line: 321
@st_atime_ns.setter
# line: 321
def st_atime_ns(self, val):
# line: 323
'Set the access time in nanoseconds.'
# line: 324
self._st_atime_ns = val
# line: 326
@st_mtime_ns.setter
# line: 326
def st_mtime_ns(self, val):
# line: 328
'Set the modification time of the fake file in nanoseconds.'
# line: 329
self._st_mtime_ns = val
# line: 331
@st_ctime_ns.setter
# line: 331
def st_ctime_ns(self, val):
# line: 333
'Set the creation time of the fake file in nanoseconds.'
# line: 334
self._st_ctime_ns = val
# line: 337
class FakeFile(object):
# line: 353
"Provides the appearance of a real file.\n\n Attributes currently faked out:\n st_mode: user-specified, otherwise S_IFREG\n st_ctime: the time.time() timestamp of the file change time (updated\n each time a file's attributes is modified).\n st_atime: the time.time() timestamp when the file was last accessed.\n st_mtime: the time.time() timestamp when the file was last modified.\n st_size: the size of the file\n st_nlink: the number of hard links to the file\n st_ino: the inode number - a unique number identifying the file\n st_dev: a unique number identifying the (fake) file system device the file belongs to\n\n Other attributes needed by os.stat are assigned default value of None\n these include: st_uid, st_gid\n "
# line: 355
def __init__(self, name, st_mode=(stat.S_IFREG | PERM_DEF_FILE), contents=None, filesystem=None, encoding=None, errors=None):
# line: 371
'init.\n\n Args:\n name: name of the file/directory, without parent path information\n st_mode: the stat.S_IF* constant representing the file type (i.e.\n stat.S_IFREG, stat.S_IFDIR)\n contents: the contents of the filesystem object; should be a string or byte object for\n regular files, and a list of other FakeFile or FakeDirectory objects\n for FakeDirectory objects\n filesystem: the fake filesystem where the file is created.\n New in pyfakefs 2.9.\n encoding: if contents is a unicode string, the encoding used for serialization\n errors: the error mode used for encoding/decoding errors\n New in pyfakefs 3.2.\n '
# line: 372
self.name = name
# line: 373
self.stat_result = _FakeStatResult(time.time())
# line: 374
self.stat_result.st_mode = st_mode
# line: 375
self.encoding = encoding
# line: 376
self.errors = (errors or 'strict')
# line: 377
self._byte_contents = self._encode_contents(contents)
# line: 378
self.stat_result.st_size = (len(self._byte_contents) if (self._byte_contents is not None) else 0)
# line: 381
if (filesystem is None):
# line: 382
raise ValueError('filesystem shall not be None')
# line: 383
self.filesystem = filesystem
# line: 384
self.epoch = 0
# line: 385
self.parent_dir = None
# line: 387
@property
# line: 387
def byte_contents(self):
# line: 389
return self._byte_contents
# line: 391
@property
# line: 391
def contents(self):
# line: 393
'Return the contents as string with the original encoding.'
# line: 394
if ((sys.version_info >= (3, 0)) and isinstance(self.byte_contents, bytes)):
# line: 395
return self.byte_contents.decode((self.encoding or locale.getpreferredencoding(False)), errors=self.errors)
# line: 398
return self.byte_contents
# line: 400
def SetLargeFileSize(self, st_size):
# line: 413
"Sets the self.st_size attribute and replaces self.content with None.\n\n Provided specifically to simulate very large files without regards\n to their content (which wouldn't fit in memory).\n Note that read/write operations with such a file raise FakeLargeFileIoException.\n\n Args:\n st_size: (int) The desired file size\n\n Raises:\n IOError: if the st_size is not a non-negative integer,\n or if st_size exceeds the available file system space\n "
# line: 414
self._check_positive_int(st_size)
# line: 415
if self.st_size:
# line: 416
self.SetSize(0)
# line: 417
self.filesystem.ChangeDiskUsage(st_size, self.name, self.st_dev)
# line: 418
self.st_size = st_size
# line: 419
self._byte_contents = None
# line: 421
def _check_positive_int(self, size):
# line: 423
int_types = ((int, long) if (sys.version_info < (3, 0)) else int)
# line: 424
if ((not isinstance(size, int_types)) or (size < 0)):
# line: 425
raise IOError(errno.ENOSPC, ('Fake file object: size must be a non-negative integer, but is %s' % size), self.name)
# line: 429
def IsLargeFile(self):
# line: 430
'Return True if this file was initialized with size but no contents.'
# line: 431
return (self._byte_contents is None)
# line: 433
def _encode_contents(self, contents):
# line: 435
if ((sys.version_info >= (3, 0)) and isinstance(contents, str)):
# line: 436
contents = bytes(contents, (self.encoding or locale.getpreferredencoding(False)), self.errors)
elif ((sys.version_info < (3, 0)) and isinstance(contents, unicode)):
# line: 438
contents = contents.encode((self.encoding or locale.getpreferredencoding(False)), self.errors)
# line: 439
return contents
# line: 441
def _set_initial_contents(self, contents):
# line: 450
'Sets the file contents and size.\n Called internally after initial file creation.\n\n Args:\n contents: string, new content of file.\n Raises:\n IOError: if the st_size is not a non-negative integer,\n or if st_size exceeds the available file system space\n '
# line: 451
contents = self._encode_contents(contents)
# line: 452
st_size = len(contents)
# line: 454
if self._byte_contents:
# line: 455
self.SetSize(0)
# line: 456
current_size = (self.st_size or 0)
# line: 457
self.filesystem.ChangeDiskUsage((st_size - current_size), self.name, self.st_dev)
# line: 458
self._byte_contents = contents
# line: 459
self.st_size = st_size
# line: 460
self.epoch += 1
# line: 462
def SetContents(self, contents, encoding=None):
# line: 475
'Sets the file contents and size and increases the modification time.\n\n Args:\n contents: (str, bytes, unicode) new content of file.\n encoding: (str) the encoding to be used for writing the contents\n if they are a unicode string.\n If not given, the locale preferred encoding is used.\n New in pyfakefs 2.9.\n\n Raises:\n IOError: if the st_size is not a non-negative integer,\n or if st_size exceeds the available file system space.\n '
# line: 476
self.encoding = encoding
# line: 477
self._set_initial_contents(contents)
# line: 478
current_time = time.time()
# line: 479
self.st_ctime = current_time
# line: 480
self.st_mtime = current_time
# line: 482
def GetSize(self):
# line: 485
'Returns the size in bytes of the file contents.\n New in pyfakefs 2.9.\n '
# line: 486
return self.st_size
# line: 488
def GetPath(self):
# line: 489
'Return the full path of the current object.'
# line: 490
names = []
# line: 491
obj = self
# line: 492
while obj:
# line: 493
names.insert(0, obj.name)
# line: 494
obj = obj.parent_dir
# line: 495
sep = self.filesystem._path_separator(self.name)
# line: 496
return self.filesystem.NormalizePath(sep.join(names[1:]))
# line: 498
def SetSize(self, st_size):
# line: 507
'Resizes file content, padding with nulls if new size exceeds the old.\n\n Args:\n st_size: The desired size for the file.\n\n Raises:\n IOError: if the st_size arg is not a non-negative integer\n or if st_size exceeds the available file system space\n '
# line: 509
self._check_positive_int(st_size)
# line: 510
current_size = (self.st_size or 0)
# line: 511
self.filesystem.ChangeDiskUsage((st_size - current_size), self.name, self.st_dev)
# line: 512
if self._byte_contents:
# line: 513
if (st_size < current_size):
# line: 514
self._byte_contents = self._byte_contents[:st_size]
elif (sys.version_info < (3, 0)):
# line: 517
self._byte_contents = ('%s%s' % (self._byte_contents, ('\x00' * (st_size - current_size))))
else:
# line: 520
self._byte_contents += ('\x00' * (st_size - current_size))
# line: 521
self.st_size = st_size
# line: 522
self.epoch += 1
# line: 524
def SetATime(self, st_atime):
# line: 529
'Set the self.st_atime attribute.\n\n Args:\n st_atime: The desired access time.\n '
# line: 530
self.st_atime = st_atime
# line: 532
def SetMTime(self, st_mtime):
# line: 537
'Set the self.st_mtime attribute.\n\n Args:\n st_mtime: The desired modification time.\n '
# line: 538
self.st_mtime = st_mtime
# line: 540
def SetCTime(self, st_ctime):
# line: 546
'Set the self.st_ctime attribute.\n New in pyfakefs 3.0.\n\n Args:\n st_ctime: The desired creation time.\n '
# line: 547
self.st_ctime = st_ctime
# line: 549
def __getattr__(self, item):
# line: 550
'Forward some properties to stat_result.'
# line: 551
return getattr(self.stat_result, item)
# line: 553
def __setattr__(self, key, value):
# line: 554
'Forward some properties to stat_result.'
# line: 555
if (key in ('st_mode', 'st_ino', 'st_dev', 'st_nlink', 'st_uid', 'st_gid', 'st_size', 'st_atime', 'st_mtime', 'st_ctime', 'st_atime_ns', 'st_mtime_ns', 'st_ctime_ns')):
# line: 558
return setattr(self.stat_result, key, value)
# line: 559
return super(FakeFile, self).__setattr__(key, value)
# line: 561
def __str__(self):
# line: 562
return ('%s(%o)' % (self.name, self.st_mode))
# line: 564
def SetIno(self, st_ino):
# line: 571
'Set the self.st_ino attribute.\n Note that a unique inode is assigned automatically to a new fake file.\n Using this function does not guarantee uniqueness and should used with caution.\n\n Args:\n st_ino: (int) The desired inode.\n '
# line: 572
self.st_ino = st_ino
# line: 575
class FakeFileFromRealFile(FakeFile):
# line: 580
'Represents a fake file copied from the real file system.\n \n The contents of the file are read on demand only.\n New in pyfakefs 3.2.\n '
# line: 582
def __init__(self, file_path, filesystem, read_only=True):
# line: 593
'init.\n\n Args:\n file_path: path to the existing file.\n filesystem: the fake filesystem where the file is created.\n read_only: if set, the file is treated as read-only, e.g. a write access raises an exception;\n otherwise, writing to the file changes the fake file only as usually.\n\n Raises:\n OSError: if the file does not exist in the real file system.\n '
# line: 594
real_stat = os.stat(file_path)
# line: 596
super(FakeFileFromRealFile, self).__init__(name=os.path.basename(file_path), filesystem=filesystem)
# line: 598
self.stat_result.set_from_stat_result(real_stat)
# line: 599
if read_only:
# line: 600
self.st_mode &= 261924
# line: 601
self.file_path = file_path
# line: 602
self.contents_read = False
# line: 604
@property
# line: 604
def byte_contents(self):
# line: 606
if (not self.contents_read):
# line: 607
self.contents_read = True
# line: 608
with io.open(self.file_path, 'rb') as f:
# line: 609
self._byte_contents = f.read()
# line: 611
self.st_atime = os.stat(self.file_path).st_atime
# line: 612
return self._byte_contents
# line: 614
def IsLargeFile(self):
# line: 615
'The contents are never faked.'
# line: 616
return False
# line: 619
class FakeDirectory(FakeFile):
# line: 620
'Provides the appearance of a real directory.'
# line: 622
def __init__(self, name, perm_bits=PERM_DEF, filesystem=None):
# line: 629
'init.\n\n Args:\n name: name of the file/directory, without parent path information\n perm_bits: permission bits. defaults to 0o777.\n filesystem: if set, the fake filesystem where the directory is created\n '
# line: 630
FakeFile.__init__(self, name, (stat.S_IFDIR | perm_bits), {}, filesystem=filesystem)
# line: 632
self.st_nlink += 1
# line: 634
def SetContents(self, contents, encoding=None):
# line: 635
error_class = (OSError if self.filesystem.is_windows_fs else IOError)
# line: 636
raise error_class(errno.EISDIR, 'Trying to write to directory')
# line: 638
@property
# line: 638
def contents(self):
# line: 640
'Return the list of contained directory entries.'
# line: 641
return self.byte_contents
# line: 643
@property
# line: 643
def ordered_dirs(self):
# line: 645
'Return the list of contained directory entry names ordered by creation order.'
# line: 646
return [item[0] for item in sorted(self.byte_contents.items(), key=(lambda entry: entry[1].st_ino))]
# line: 649
def AddEntry(self, path_object):
# line: 658
'Adds a child FakeFile to this directory.\n\n Args:\n path_object: FakeFile instance to add as a child of this directory.\n\n Raises:\n OSError: if the directory has no write permission (Posix only)\n OSError: if the file or directory to be added already exists\n '
# line: 659
if ((not (self.st_mode & PERM_WRITE)) and (not self.filesystem.is_windows_fs)):
# line: 660
raise OSError(errno.EACCES, 'Permission Denied', self.GetPath())
# line: 662
if (path_object.name in self.contents):
# line: 663
raise OSError(errno.EEXIST, 'Object already exists in fake filesystem', self.GetPath())
# line: 667
self.contents[path_object.name] = path_object
# line: 668
path_object.parent_dir = self
# line: 669
self.st_nlink += 1
# line: 670
path_object.st_nlink += 1
# line: 671
path_object.st_dev = self.st_dev
# line: 672
if (path_object.st_nlink == 1):
# line: 673
self.filesystem.ChangeDiskUsage(path_object.GetSize(), path_object.name, self.st_dev)
# line: 675
def GetEntry(self, pathname_name):
# line: 686
'Retrieves the specified child file or directory entry.\n\n Args:\n pathname_name: basename of the child object to retrieve.\n\n Returns:\n fake file or directory object.\n\n Raises:\n KeyError: if no child exists by the specified name.\n '
# line: 687
return self.contents[pathname_name]
# line: 689
def RemoveEntry(self, pathname_name, recursive=True):
# line: 701
'Removes the specified child file or directory.\n\n Args:\n pathname_name: basename of the child object to remove.\n recursive: if True (default), the entries in contained directories are deleted first.\n Needed to propagate removal errors (e.g. permission problems) from contained entries.\n New in pyfakefs 2.9.\n\n Raises:\n KeyError: if no child exists by the specified name.\n OSError: if user lacks permission to delete the file, or (Windows only) the file is open.\n '
# line: 702
entry = self.contents[pathname_name]
# line: 703
if ((entry.st_mode & PERM_WRITE) == 0):
# line: 704
raise OSError(errno.EACCES, 'Trying to remove object without write permission', pathname_name)
# line: 706
if (self.filesystem.is_windows_fs and self.filesystem.HasOpenFile(entry)):
# line: 707
raise OSError(errno.EACCES, 'Trying to remove an open file', pathname_name)
# line: 708
if (recursive and isinstance(entry, FakeDirectory)):
# line: 709
while entry.contents:
# line: 710
entry.RemoveEntry(list(entry.contents)[0])
elif (entry.st_nlink == 1):
# line: 712
self.filesystem.ChangeDiskUsage((- entry.GetSize()), pathname_name, entry.st_dev)
# line: 714
self.st_nlink -= 1
# line: 715
entry.st_nlink -= 1
# line: 716
assert (entry.st_nlink >= 0)
# line: 718
del self.contents[pathname_name]
# line: 720
def GetSize(self):
# line: 723
'Return the total size of all files contained in this directory tree.\n New in pyfakefs 2.9.\n '
# line: 724
return sum([item[1].GetSize() for item in self.contents.items()])
# line: 726
def HasParentObject(self, dir_object):
# line: 728
'Return `True` if dir_object is a direct or indirect parent directory,\n or if both are the same object.'
# line: 729
obj = self
# line: 730
while obj:
# line: 731
if (obj == dir_object):
# line: 732
return True
# line: 733
obj = obj.parent_dir
# line: 734
return False
# line: 736
def __str__(self):
# line: 737
description = (super(FakeDirectory, self).__str__() + ':\n')
# line: 738
for item in self.contents:
# line: 739
item_desc = self.contents[item].__str__()
# line: 740
for line in item_desc.split('\n'):
# line: 741
if line:
# line: 742
description = (((description + ' ') + line) + '\n')
# line: 743
return description
# line: 746
class FakeDirectoryFromRealDirectory(FakeDirectory):
# line: 751
'Represents a fake directory copied from the real file system.\n \n The contents of the directory are read on demand only.\n New in pyfakefs 3.2.\n '
# line: 753
def __init__(self, dir_path, filesystem, read_only):
# line: 765
'init.\n\n Args:\n dir_path: full directory path\n filesystem: the fake filesystem where the directory is created\n read_only: if set, all files under the directory are treated as read-only,\n e.g. a write access raises an exception;\n otherwise, writing to the files changes the fake files only as usually.\n \n Raises:\n OSError if the directory does not exist in the real file system\n '
# line: 766
real_stat = os.stat(dir_path)
# line: 767
super(FakeDirectoryFromRealDirectory, self).__init__(name=os.path.split(dir_path)[1], perm_bits=real_stat.st_mode, filesystem=filesystem)
# line: 772
self.st_ctime = real_stat.st_ctime
# line: 773
self.st_atime = real_stat.st_atime
# line: 774
self.st_mtime = real_stat.st_mtime
# line: 775
self.st_gid = real_stat.st_gid
# line: 776
self.st_uid = real_stat.st_uid
# line: 777
self.dir_path = dir_path
# line: 778
self.read_only = read_only
# line: 779
self.contents_read = False
# line: 781
@property
# line: 781
def contents(self):
# line: 783
'Return the list of contained directory entries, loading them if not already loaded.'
# line: 784
if (not self.contents_read):
# line: 785
self.contents_read = True
# line: 786
self.filesystem.add_real_paths([os.path.join(self.dir_path, entry) for entry in os.listdir(self.dir_path)], read_only=self.read_only)
# line: 789
return self.byte_contents
# line: 791
def GetSize(self):
# line: 793
if (not self.contents_read):
# line: 794
return 0
# line: 795
return super(FakeDirectoryFromRealDirectory, self).GetSize()
# line: 798
class FakeFilesystem(object):
# line: 809
'Provides the appearance of a real directory tree for unit testing.\n\n Attributes:\n path_separator: The path separator, corresponds to `os.path.sep`.\n alternative_path_separator: Corresponds to `os.path.altsep`.\n is_windows_fs: `True` in a Windows file system, `False` otherwise.\n is_case_sensitive: `True` if a case-sensitive file system is assumed.\n root: The root `FakeDirectory` entry of the file system.\n cwd: The current working directory path.\n umask: The umask used for newly created files, see `os.umask`.\n '
# line: 811
def __init__(self, path_separator=os.path.sep, total_size=None):
# line: 823
"init.\n\n Args:\n path_separator: optional substitute for os.path.sep\n total_size: if not None, the total size in bytes of the root filesystem.\n New in pyfakefs 2.9.\n\n Example usage to emulate real file systems:\n filesystem = FakeFilesystem(\n alt_path_separator='/' if _is_windows else None)\n\n "
# line: 824
self.path_separator = path_separator
# line: 825
self.alternative_path_separator = os.path.altsep
# line: 826
if (path_separator != os.sep):
# line: 827
self.alternative_path_separator = None
# line: 832
self.is_windows_fs = (sys.platform == 'win32')
# line: 836
self.is_case_sensitive = (sys.platform not in ['win32', 'cygwin', 'darwin'])
# line: 838
self.root = FakeDirectory(self.path_separator, filesystem=self)
# line: 839
self.cwd = self.root.name
# line: 841
self.umask = os.umask(18)
# line: 842
os.umask(self.umask)
# line: 845
self.open_files = []
# line: 847
self._free_fd_heap = []
# line: 849
self._last_ino = 0
# line: 850
self._last_dev = 0
# line: 851
self.mount_points = {}
# line: 852
self.AddMountPoint(self.root.name, total_size)
# line: 854
@staticmethod
# line: 854
def _matching_string(matched, string):
# line: 858
'Return the string as byte or unicode depending \n on the type of matched, assuming string is an ASCII string.\n '
# line: 859
if (string is None):
# line: 860
return string
# line: 861
if (sys.version_info < (3,)):
# line: 862
if isinstance(matched, unicode):
# line: 863
return unicode(string)
else:
# line: 865
return string
elif isinstance(matched, bytes):
# line: 868
return bytes(string, 'ascii')
else:
# line: 870
return string
# line: 872
def _path_separator(self, path):
# line: 873
'Return the path separator as the same type as path'
# line: 874
return self._matching_string(path, self.path_separator)
# line: 876
def _alternative_path_separator(self, path):
# line: 877
'Return the alternative path separator as the same type as path'
# line: 878
return self._matching_string(path, self.alternative_path_separator)
# line: 880
def _IsLinkSupported(self):
# line: 882
return ((not self.is_windows_fs) or (sys.version_info >= (3, 2)))
# line: 884
def AddMountPoint(self, path, total_size=None):
# line: 900
'Add a new mount point for a filesystem device.\n The mount point gets a new unique device number.\n New in pyfakefs 2.9.\n\n Args:\n path: The root path for the new mount path.\n\n total_size: The new total size of the added filesystem device\n in bytes. Defaults to infinite size.\n\n Returns:\n The newly created mount point dict.\n\n Raises:\n OSError: if trying to mount an existing mount point again.\n '
# line: 901
path = self.NormalizePath(path)
# line: 902
if (path in self.mount_points):
# line: 903
raise OSError(errno.EEXIST, 'Mount point cannot be added twice', path)
# line: 904
self._last_dev += 1
# line: 905
self.mount_points[path] = {'idev': self._last_dev, 'total_size': total_size, 'used_size': 0, }
# line: 909
root_dir = (self.root if (path == self.root.name) else self.CreateDirectory(path))
# line: 910
root_dir.st_dev = self._last_dev
# line: 911
return self.mount_points[path]
# line: 913
def _AutoMountDriveIfNeeded(self, path, force=False):
# line: 914
if (self.is_windows_fs and (force or (not self._MountPointForPath(path)))):
# line: 915
drive = self.SplitDrive(path)[0]
# line: 916
if drive:
# line: 917
return self.AddMountPoint(path=drive)
# line: 919
def _MountPointForPath(self, path):
# line: 920
def to_str(string):
# line: 921
'Convert the str, unicode or byte object to a str using the default encoding.'
# line: 922
if ((string is None) or isinstance(string, str)):
# line: 923
return string
# line: 924
if (sys.version_info < (3, 0)):
# line: 925
return string.encode(locale.getpreferredencoding(False))
else:
# line: 927
return string.decode(locale.getpreferredencoding(False))
# line: 929
path = self.NormalizePath(self.NormalizeCase(path))
# line: 930
if (path in self.mount_points):
# line: 931
return self.mount_points[path]
# line: 932
mount_path = self._matching_string(path, '')
# line: 933
drive = self.SplitDrive(path)[:1]
# line: 934
for root_path in self.mount_points:
# line: 935
root_path = self._matching_string(path, root_path)
# line: 936
if (drive and (not root_path.startswith(drive))):
# line: 937
continue
# line: 938
if (path.startswith(root_path) and (len(root_path) > len(mount_path))):
# line: 939
mount_path = root_path
# line: 940
if mount_path:
# line: 941
return self.mount_points[to_str(mount_path)]
# line: 942
mount_point = self._AutoMountDriveIfNeeded(path, force=True)
# line: 943
assert mount_point
# line: 944
return mount_point
# line: 946
def _MountPointForDevice(self, idev):
# line: 947
for mount_point in self.mount_points.values():
# line: 948
if (mount_point['idev'] == idev):
# line: 949
return mount_point
# line: 951
def GetDiskUsage(self, path=None):
# line: 961
"Return the total, used and free disk space in bytes as named tuple,\n or placeholder values simulating unlimited space if not set.\n Note: This matches the return value of shutil.disk_usage().\n New in pyfakefs 2.9.\n\n Args:\n path: The disk space is returned for the file system device where\n path resides.\n Defaults to the root path (e.g. '/' on Unix systems).\n "
# line: 962
DiskUsage = namedtuple('usage', 'total, used, free')
# line: 963
if (path is None):
# line: 964
mount_point = self.mount_points[self.root.name]
else:
# line: 966
mount_point = self._MountPointForPath(path)
# line: 967
if (mount_point and (mount_point['total_size'] is not None)):
# line: 968
return DiskUsage(mount_point['total_size'], mount_point['used_size'], (mount_point['total_size'] - mount_point['used_size']))
# line: 970
return DiskUsage((((1024 * 1024) * 1024) * 1024), 0, (((1024 * 1024) * 1024) * 1024))
# line: 972
def SetDiskUsage(self, total_size, path=None):
# line: 986
"Changes the total size of the file system, preserving the used space.\n Example usage: set the size of an auto-mounted Windows drive.\n New in pyfakefs 2.9.\n\n Args:\n total_size: The new total size of the filesystem in bytes.\n\n path: The disk space is changed for the file system device where\n path resides.\n Defaults to the root path (e.g. '/' on Unix systems).\n\n Raises:\n IOError: if the new space is smaller than the used size.\n "
# line: 987
if (path is None):
# line: 988
path = self.root.name
# line: 989
mount_point = self._MountPointForPath(path)
# line: 990
if ((mount_point['total_size'] is not None) and (mount_point['used_size'] > total_size)):
# line: 991
raise IOError(errno.ENOSPC, ('Fake file system: cannot change size to %r bytes - used space is larger' % total_size), path)
# line: 994
mount_point['total_size'] = total_size
# line: 996
def ChangeDiskUsage(self, usage_change, file_path, st_dev):
# line: 1010
'Change the used disk space by the given amount.\n New in pyfakefs 2.9.\n\n Args:\n usage_change: Number of bytes added to the used space.\n If negative, the used space will be decreased.\n\n file_path: The path of the object needing the disk space.\n\n st_dev: The device ID for the respective file system.\n\n Raises:\n IOError: if usage_change exceeds the free file system space\n '
# line: 1011
mount_point = self._MountPointForDevice(st_dev)
# line: 1012
if mount_point:
# line: 1013
if (mount_point['total_size'] is not None):
# line: 1014
if ((mount_point['total_size'] - mount_point['used_size']) < usage_change):
# line: 1015
raise IOError(errno.ENOSPC, ('Fake file system: disk is full, failed to add %r bytes' % usage_change), file_path)
# line: 1018
mount_point['used_size'] += usage_change
# line: 1020
def GetStat(self, entry_path, follow_symlinks=True):
# line: 1034
"Return the os.stat-like tuple for the FakeFile object of entry_path.\n New in pyfakefs 3.0.\n\n Args:\n entry_path: path to filesystem object to retrieve.\n follow_symlinks: if False and entry_path points to a symlink, the link itself is inspected\n instead of the linked object.\n\n Returns:\n the FakeStatResult object corresponding to entry_path.\n\n Raises:\n OSError: if the filesystem object doesn't exist.\n "
# line: 1036
try:
# line: 1037
file_object = self.ResolveObject(entry_path, follow_symlinks, allow_fd=True)
# line: 1038
return file_object.stat_result.copy()
# line: 1039
except IOError as io_error:
# line: 1040
raise OSError(io_error.errno, io_error.strerror, entry_path)
# line: 1042
def ChangeMode(self, path, mode, follow_symlinks=True):
# line: 1051
'Change the permissions of a file as encoded in integer mode.\n New in pyfakefs 3.0.\n\n Args:\n path: (str) Path to the file.\n mode: (int) Permissions.\n follow_symlinks: if False and entry_path points to a symlink, the link itself is affected\n instead of the linked object.\n '
# line: 1052
try:
# line: 1053
file_object = self.ResolveObject(path, follow_symlinks, allow_fd=True)
# line: 1054
except IOError as io_error:
# line: 1055
if (io_error.errno == errno.ENOENT):
# line: 1056
raise OSError(errno.ENOENT, 'No such file or directory in fake filesystem', path)
# line: 1059
raise
# line: 1060
file_object.st_mode = ((file_object.st_mode & (~ PERM_ALL)) | (mode & PERM_ALL))
# line: 1062
file_object.st_ctime = time.time()
# line: 1064
def UpdateTime(self, path, times=None, ns=None, follow_symlinks=True):
# line: 1086
'Change the access and modified times of a file.\n New in pyfakefs 3.0.\n\n Args:\n path: (str) Path to the file.\n times: 2-tuple of int or float numbers, of the form (atime, mtime) \n which is used to set the access and modified times in seconds. \n If None, both times are set to the current time.\n ns: 2-tuple of int numbers, of the form (atime, mtime) which is \n used to set the access and modified times in nanoseconds. \n If None, both times are set to the current time.\n New in Python 3.3. New in pyfakefs 3.3.\n follow_symlinks: If `False` and entry_path points to a symlink, \n the link itself is queried instead of the linked object. \n New in Python 3.3. New in pyfakefs 3.0.\n \n Raises:\n TypeError: If anything other than the expected types is \n specified in the passed `times` or `ns` tuple, \n or if the tuple length is not equal to 2.\n ValueError: If both times and ns are specified.\n '
# line: 1087
if ((times is not None) and (ns is not None)):
# line: 1088
raise ValueError("utime: you may specify either 'times' or 'ns' but not both")
# line: 1089
if ((times is not None) and (len(times) != 2)):
# line: 1090
raise TypeError("utime: 'times' must be either a tuple of two ints or None")
# line: 1091
if ((ns is not None) and (len(ns) != 2)):
# line: 1092
raise TypeError("utime: 'ns' must be a tuple of two ints")
# line: 1094
try:
# line: 1095
file_object = self.ResolveObject(path, follow_symlinks, allow_fd=True)
# line: 1096
except IOError as io_error:
# line: 1097
if (io_error.errno == errno.ENOENT):
# line: 1098
raise OSError(errno.ENOENT, 'No such file or directory in fake filesystem', path)
# line: 1101
raise
# line: 1102
if (times is not None):
# line: 1103
for file_time in times:
# line: 1104
if (not isinstance(file_time, (int, float))):
# line: 1105
raise TypeError('atime and mtime must be numbers')
# line: 1107
file_object.st_atime = times[0]
# line: 1108
file_object.st_mtime = times[1]
elif (ns is not None):
# line: 1110
for file_time in ns:
# line: 1111
if (not isinstance(file_time, int)):
# line: 1112
raise TypeError('atime and mtime must be ints')
# line: 1114
file_object.st_atime_ns = ns[0]
# line: 1115
file_object.st_mtime_ns = ns[1]
else:
# line: 1117
current_time = time.time()
# line: 1118
file_object.st_atime = current_time
# line: 1119
file_object.st_mtime = current_time
# line: 1121
def SetIno(self, path, st_ino):
# line: 1129
"Set the self.st_ino attribute of file at 'path'.\n Note that a unique inode is assigned automatically to a new fake file.\n Using this function does not guarantee uniqueness and should used with caution.\n\n Args:\n path: Path to file.\n st_ino: The desired inode.\n "
# line: 1130
self.GetObject(path).SetIno(st_ino)
# line: 1132
def AddOpenFile(self, file_obj):
# line: 1142
'Add file_obj to the list of open files on the filesystem.\n\n The position in the self.open_files array is the file descriptor number.\n\n Args:\n file_obj: file object to be added to open files list.\n\n Returns:\n File descriptor number for the file object.\n '
# line: 1143
if self._free_fd_heap:
# line: 1144
open_fd = heapq.heappop(self._free_fd_heap)
# line: 1145
self.open_files[open_fd] = file_obj
# line: 1146
return open_fd
# line: 1148
self.open_files.append(file_obj)
# line: 1149
return (len(self.open_files) - 1)
# line: 1151
def CloseOpenFile(self, file_des):
# line: 1158
'Remove file object with given descriptor from the list of open files.\n\n Sets the entry in open_files to None.\n\n Args:\n file_des: descriptor of file object to be removed from open files list.\n '
# line: 1159
self.open_files[file_des] = None
# line: 1160
heapq.heappush(self._free_fd_heap, file_des)
# line: 1162
def GetOpenFile(self, file_des):
# line: 1174
'Return an open file.\n\n Args:\n file_des: file descriptor of the open file.\n\n Raises:\n OSError: an invalid file descriptor.\n TypeError: filedes is not an integer.\n\n Returns:\n Open file object.\n '
# line: 1175
if (not isinstance(file_des, int)):
# line: 1176
raise TypeError('an integer is required')
# line: 1177
if ((file_des >= len(self.open_files)) or (self.open_files[file_des] is None)):
# line: 1179
raise OSError(errno.EBADF, 'Bad file descriptor', file_des)
# line: 1180
return self.open_files[file_des]
# line: 1182
def HasOpenFile(self, file_object):
# line: 1191
'Return True if the given file object is in the list of open files.\n New in pyfakefs 2.9.\n\n Args:\n file_object: The FakeFile object to be checked.\n\n Returns:\n True if the file is open.\n '
# line: 1192
return (file_object in [wrapper.GetObject() for wrapper in self.open_files if wrapper])
# line: 1194
def NormalizePathSeparator(self, path):
# line: 1204
'Replace all appearances of alternative path separator with path separator.\n Do nothing if no alternative separator is set.\n New in pyfakefs 2.9.\n\n Args:\n path: the path to be normalized.\n\n Returns:\n The normalized path that will be used internally.\n '
# line: 1205
if (sys.version_info >= (3, 6)):
# line: 1206
path = os.fspath(path)
# line: 1207
if ((self.alternative_path_separator is None) or (not path)):
# line: 1208
return path
# line: 1209
return path.replace(self._alternative_path_separator(path), self._path_separator(path))
# line: 1211
def CollapsePath(self, path):
# line: 1230
"Mimic os.path.normpath using the specified path_separator.\n\n Mimics os.path.normpath using the path_separator that was specified\n for this FakeFilesystem. Normalizes the path, but unlike the method\n NormalizePath, does not make it absolute. Eliminates dot components\n (. and ..) and combines repeated path separators (//). Initial ..\n components are left in place for relative paths. If the result is an empty\n path, '.' is returned instead.\n\n This also replaces alternative path separator with path separator. That is,\n it behaves like the real os.path.normpath on Windows if initialized with\n '\\' as path separator and '/' as alternative separator.\n\n Args:\n path: (str) The path to normalize.\n\n Returns:\n (str) A copy of path with empty components and dot components removed.\n "
# line: 1231
path = self.NormalizePathSeparator(path)
# line: 1232
(drive, path) = self.SplitDrive(path)
# line: 1233
sep = self._path_separator(path)
# line: 1234
is_absolute_path = path.startswith(sep)
# line: 1235
path_components = path.split(sep)
# line: 1236
collapsed_path_components = []
# line: 1237
dot = self._matching_string(path, '.')
# line: 1238
dotdot = self._matching_string(path, '..')
# line: 1239
for component in path_components:
# line: 1240
if ((not component) or (component == dot)):
# line: 1241
continue
# line: 1242
if (component == dotdot):
# line: 1243
if (collapsed_path_components and (collapsed_path_components[(-1)] != dotdot)):
# line: 1246
collapsed_path_components.pop()
# line: 1247
continue
elif is_absolute_path:
# line: 1250
continue
# line: 1251
collapsed_path_components.append(component)
# line: 1252
collapsed_path = sep.join(collapsed_path_components)
# line: 1253
if is_absolute_path:
# line: 1254
collapsed_path = (sep + collapsed_path)
# line: 1255
return ((drive + collapsed_path) or dot)
# line: 1257
def NormalizeCase(self, path):
# line: 1267
'Return a normalized case version of the given path for case-insensitive\n file systems. For case-sensitive file systems, return path unchanged.\n New in pyfakefs 2.9.\n\n Args:\n path: the file path to be transformed\n\n Returns:\n A version of path matching the case of existing path elements.\n '
# line: 1268
def components_to_path():
# line: 1269
if (len(path_components) > len(normalized_components)):
# line: 1270
normalized_components.extend(path_components[len(normalized_components):])
# line: 1271
sep = self._path_separator(path)
# line: 1272
normalized_path = sep.join(normalized_components)
# line: 1273
if (path.startswith(sep) and (not normalized_path.startswith(sep))):
# line: 1274
normalized_path = (sep + normalized_path)
# line: 1275
return normalized_path
# line: 1277
if (self.is_case_sensitive or (not path)):
# line: 1278
return path
# line: 1279
path_components = self.GetPathComponents(path)
# line: 1280
normalized_components = []
# line: 1281
current_dir = self.root
# line: 1282
for component in path_components:
# line: 1283
if (not isinstance(current_dir, FakeDirectory)):
# line: 1284
return components_to_path()
# line: 1285
(dir_name, current_dir) = self._DirectoryContent(current_dir, component)
# line: 1286
if ((current_dir is None) or (isinstance(current_dir, FakeDirectory) and (current_dir._byte_contents is None) and (current_dir.st_size == 0))):
# line: 1290
return components_to_path()
# line: 1291
normalized_components.append(dir_name)
# line: 1292
return components_to_path()
# line: 1294
def NormalizePath(self, path):
# line: 1306
'Absolutize and minimalize the given path.\n\n Forces all relative paths to be absolute, and normalizes the path to\n eliminate dot and empty components.\n\n Args:\n path: path to normalize\n\n Returns:\n The normalized path relative to the current working directory, or the root\n directory if path is empty.\n '
# line: 1307
path = self.NormalizePathSeparator(path)
# line: 1308
if (not path):
# line: 1309
path = self.path_separator
elif (not self._StartsWithRootPath(path)):
# line: 1312
root_name = self._matching_string(path, self.root.name)
# line: 1313
empty = self._matching_string(path, '')
# line: 1314
path = self._path_separator(path).join(((((self.cwd != root_name) and self.cwd) or empty), path))
# line: 1316
if (path == self._matching_string(path, '.')):
# line: 1317
path = self.cwd
# line: 1318
return self.CollapsePath(path)
# line: 1320
def SplitPath(self, path):
# line: 1332
'Mimic os.path.split using the specified path_separator.\n\n Mimics os.path.split using the path_separator that was specified\n for this FakeFilesystem.\n\n Args:\n path: (str) The path to split.\n\n Returns:\n (str) A duple (pathname, basename) for which pathname does not\n end with a slash, and basename does not contain a slash.\n '
# line: 1333
(drive, path) = self.SplitDrive(path)
# line: 1334
path = self.NormalizePathSeparator(path)
# line: 1335
sep = self._path_separator(path)
# line: 1336
path_components = path.split(sep)
# line: 1337
if (not path_components):
# line: 1338
return ('', '')
# line: 1339
basename = path_components.pop()
# line: 1340
if (not path_components):
# line: 1341
return ('', basename)
# line: 1342
for component in path_components:
# line: 1343
if component:
# line: 1346
while (not path_components[(-1)]):
# line: 1347
path_components.pop()
# line: 1348
return ((drive + sep.join(path_components)), basename)
# line: 1350
return ((drive or sep), basename)
# line: 1352
def SplitDrive(self, path):
# line: 1363
'Splits the path into the drive part and the rest of the path.\n New in pyfakefs 2.9.\n\n Taken from Windows specific implementation in Python 3.5 and slightly adapted.\n\n Args:\n path: the full path to be split.\n\n Returns: a tuple of the drive part and the rest of the path, or of an empty string\n and the full path if drive letters are not supported or no drive is present.\n '
# line: 1364
if (sys.version_info >= (3, 6)):
# line: 1365
path = os.fspath(path)
# line: 1366
if self.is_windows_fs:
# line: 1367
if (len(path) >= 2):
# line: 1368
path = self.NormalizePathSeparator(path)
# line: 1369
sep = self._path_separator(path)
# line: 1371
if (sys.version_info >= (2, 7, 8)):
# line: 1372
if ((path[0:2] == (sep * 2)) and (path[2:3] != sep)):
# line: 1375
sep_index = path.find(sep, 2)
# line: 1376
if (sep_index == (-1)):
# line: 1377
return (path[:0], path)
# line: 1378
sep_index2 = path.find(sep, (sep_index + 1))
# line: 1379
if (sep_index2 == (sep_index + 1)):
# line: 1380
return (path[:0], path)
# line: 1381
if (sep_index2 == (-1)):
# line: 1382
sep_index2 = len(path)
# line: 1383
return (path[:sep_index2], path[sep_index2:])
# line: 1384
if (path[1:2] == self._matching_string(path, ':')):
# line: 1385
return (path[:2], path[2:])
# line: 1386
return (path[:0], path)
# line: 1388
def _JoinPathsWithDriveSupport(self, *all_paths):
# line: 1389
'Taken from Python 3.5 os.path.join() code in ntpath.py and slightly adapted'
# line: 1390
base_path = all_paths[0]
# line: 1391
paths_to_add = all_paths[1:]
# line: 1392
sep = self._path_separator(base_path)
# line: 1393
seps = [sep, self._alternative_path_separator(base_path)]
# line: 1394
(result_drive, result_path) = self.SplitDrive(base_path)
# line: 1395
for path in paths_to_add:
# line: 1396
(drive_part, path_part) = self.SplitDrive(path)
# line: 1397
if (path_part and (path_part[:1] in seps)):
# line: 1399
if (drive_part or (not result_drive)):
# line: 1400
result_drive = drive_part
# line: 1401
result_path = path_part
# line: 1402
continue
elif (drive_part and (drive_part != result_drive)):
# line: 1404
if (self.is_case_sensitive or (drive_part.lower() != result_drive.lower())):
# line: 1406
result_drive = drive_part
# line: 1407
result_path = path_part
# line: 1408
continue
# line: 1410
result_drive = drive_part
# line: 1412
if (result_path and (result_path[(-1):] not in seps)):
# line: 1413
result_path = (result_path + sep)
# line: 1414
result_path = (result_path + path_part)
# line: 1416
colon = self._matching_string(base_path, ':')
# line: 1417
if (result_path and (result_path[:1] not in seps) and result_drive and (result_drive[(-1):] != colon)):
# line: 1419
return ((result_drive - sep) + result_path)
# line: 1420
return (result_drive + result_path)
# line: 1422
def JoinPaths(self, *paths):
# line: 1431
'Mimic os.path.join using the specified path_separator.\n\n Args:\n *paths: (str) Zero or more paths to join.\n\n Returns:\n (str) The paths joined by the path separator, starting with the last\n absolute path in paths.\n '
# line: 1432
if (sys.version_info >= (3, 6)):
# line: 1433
paths = [os.fspath(path) for path in paths]
# line: 1434
if (len(paths) == 1):
# line: 1435
return paths[0]
# line: 1436
if self.is_windows_fs:
# line: 1437
return self._JoinPathsWithDriveSupport(*paths)
# line: 1438
joined_path_segments = []
# line: 1439
sep = self._path_separator(paths[0])
# line: 1440
for path_segment in paths:
# line: 1441
if self._StartsWithRootPath(path_segment):
# line: 1443
joined_path_segments = [path_segment]
else:
# line: 1445
if (joined_path_segments and (not joined_path_segments[(-1)].endswith(sep))):
# line: 1447
joined_path_segments.append(sep)
# line: 1448
if path_segment:
# line: 1449
joined_path_segments.append(path_segment)
# line: 1450
return self._matching_string(paths[0], '').join(joined_path_segments)
# line: 1452
def GetPathComponents(self, path):
# line: 1473
'Breaks the path into a list of component names.\n\n Does not include the root directory as a component, as all paths\n are considered relative to the root directory for the FakeFilesystem.\n Callers should basically follow this pattern:\n\n >>> file_path = self.NormalizePath(file_path)\n >>> path_components = self.GetPathComponents(file_path)\n >>> current_dir = self.root\n >>> for component in path_components:\n >>> if component not in current_dir.contents:\n >>> raise IOError\n >>> DoStuffWithComponent(current_dir, component)\n >>> current_dir = current_dir.GetEntry(component)\n\n Args:\n path: path to tokenize\n\n Returns:\n The list of names split from path\n '
# line: 1474
if ((not path) or (path == self._path_separator(path))):
# line: 1475
return []
# line: 1476
(drive, path) = self.SplitDrive(path)
# line: 1477
path_components = path.split(self._path_separator(path))
# line: 1478
assert (drive or path_components)
# line: 1479
if (not path_components[0]):
# line: 1481
path_components = path_components[1:]
# line: 1482
if drive:
# line: 1483
path_components.insert(0, drive)
# line: 1484
return path_components
# line: 1486
def StartsWithDriveLetter(self, file_path):
# line: 1496
'Return True if file_path starts with a drive letter.\n New in pyfakefs 2.9.\n\n Args:\n file_path: the full path to be examined.\n\n Returns:\n True if drive letter support is enabled in the filesystem and\n the path starts with a drive letter.\n '
# line: 1497
colon = self._matching_string(file_path, ':')
# line: 1498
return (self.is_windows_fs and (len(file_path) >= 2) and file_path[:1].isalpha and (file_path[1:2] == colon))
# line: 1501
def _StartsWithRootPath(self, file_path):
# line: 1502
root_name = self._matching_string(file_path, self.root.name)
# line: 1503
return (file_path.startswith(root_name) or ((not self.is_case_sensitive) and file_path.lower().startswith(root_name.lower())) or self.StartsWithDriveLetter(file_path))
# line: 1508
def _IsRootPath(self, file_path):
# line: 1509
root_name = self._matching_string(file_path, self.root.name)
# line: 1510
return ((file_path == root_name) or ((not self.is_case_sensitive) and (file_path.lower() == root_name.lower())) or ((len(file_path) == 2) and self.StartsWithDriveLetter(file_path)))
# line: 1514
def _EndsWithPathSeparator(self, file_path):
# line: 1515
return (file_path and (file_path.endswith(self._path_separator(file_path)) or ((self.alternative_path_separator is not None) and file_path.endswith(self._alternative_path_separator(file_path)))))
# line: 1519
def _DirectoryContent(self, directory, component):
# line: 1520
if (not isinstance(directory, FakeDirectory)):
# line: 1521
return (None, None)
# line: 1522
if (component in directory.contents):
# line: 1523
return (component, directory.contents[component])
# line: 1524
if (not self.is_case_sensitive):
# line: 1525
matching_content = [(subdir, directory.contents[subdir]) for subdir in directory.contents if (subdir.lower() == component.lower())]
# line: 1528
if matching_content:
# line: 1529
return matching_content[0]
# line: 1531
return (None, None)
# line: 1533
def Exists(self, file_path):
# line: 1544
'Return true if a path points to an existing file system object.\n\n Args:\n file_path: path to examine.\n\n Returns:\n (bool) True if the corresponding object exists.\n\n Raises:\n TypeError: if file_path is None.\n '
# line: 1545
if (sys.version_info >= (3, 6)):
# line: 1546
file_path = os.fspath(file_path)
# line: 1547
if (file_path is None):
# line: 1548
raise TypeError
# line: 1549
if (not file_path):
# line: 1550
return False
# line: 1551
try:
# line: 1552
file_path = self.ResolvePath(file_path)
# line: 1553
except (IOError, OSError):
# line: 1554
return False
# line: 1555
if (file_path == self.root.name):
# line: 1556
return True
# line: 1557
path_components = self.GetPathComponents(file_path)
# line: 1558
current_dir = self.root
# line: 1559
for component in path_components:
# line: 1560
current_dir = self._DirectoryContent(current_dir, component)[1]
# line: 1561
if (not current_dir):
# line: 1562
return False
# line: 1563
return True
# line: 1565
def ResolvePath(self, file_path, allow_fd=False, raw_io=True):
# line: 1601
"Follow a path, resolving symlinks.\n\n ResolvePath traverses the filesystem along the specified file path,\n resolving file names and symbolic links until all elements of the path are\n exhausted, or we reach a file which does not exist. If all the elements\n are not consumed, they just get appended to the path resolved so far.\n This gives us the path which is as resolved as it can be, even if the file\n does not exist.\n\n This behavior mimics Unix semantics, and is best shown by example. Given a\n file system that looks like this:\n\n /a/b/\n /a/b/c -> /a/b2 c is a symlink to /a/b2\n /a/b2/x\n /a/c -> ../d\n /a/x -> y\n\n Then:\n /a/b/x => /a/b/x\n /a/c => /a/d\n /a/x => /a/y\n /a/b/c/d/e => /a/b2/d/e\n\n Args:\n file_path: path to examine.\n allow_fd: If `True`, `file_path` may be open file descriptor\n raw_io: `True` if called from low-level I/O functions\n\n Returns:\n resolved_path (string) or None.\n\n Raises:\n TypeError: if file_path is None.\n IOError: if file_path is '' or a part of the path doesn't exist.\n "
# line: 1603
def _ComponentsToPath(component_folders):
# line: 1604
sep = (self._path_separator(component_folders[0]) if component_folders else self.path_separator)
# line: 1606
path = sep.join(component_folders)
# line: 1607
if (not self._StartsWithRootPath(path)):
# line: 1608
path = (sep + path)
# line: 1609
return path
# line: 1611
def _ValidRelativePath(file_path):
# line: 1612
slash_dotdot = self._matching_string(file_path, '/..')
# line: 1613
while (file_path and (slash_dotdot in file_path)):
# line: 1614
file_path = file_path[:file_path.rfind(slash_dotdot)]
# line: 1615
if (not self.Exists(self.NormalizePath(file_path))):
# line: 1616
return False
# line: 1617
return True
# line: 1619
def _FollowLink(link_path_components, link):
# line: 1639
'Follow a link w.r.t. a path resolved so far.\n\n The component is either a real file, which is a no-op, or a symlink.\n In the case of a symlink, we have to modify the path as built up so far\n /a/b => ../c should yield /a/../c (which will normalize to /a/c)\n /a/b => x should yield /a/x\n /a/b => /x/y/z should yield /x/y/z\n The modified path may land us in a new spot which is itself a\n link, so we may repeat the process.\n\n Args:\n link_path_components: The resolved path built up to the link so far.\n link: The link object itself.\n\n Returns:\n (string) the updated path resolved after following the link.\n\n Raises:\n IOError: if there are too many levels of symbolic link\n '
# line: 1640
link_path = link.contents
# line: 1641
sep = self._path_separator(link_path)
# line: 1642
alt_sep = self._alternative_path_separator(link_path)
# line: 1646
if ((not link_path.startswith(sep)) and ((alt_sep is None) or (not link_path.startswith(alt_sep)))):
# line: 1652
components = link_path_components[:(-1)]
# line: 1653
components.append(link_path)
# line: 1654
link_path = sep.join(components)
# line: 1656
return self.CollapsePath(link_path)
# line: 1658
if (allow_fd and (sys.version_info >= (3, 3)) and isinstance(file_path, int)):
# line: 1659
return self.GetOpenFile(file_path).GetObject().GetPath()
# line: 1661
if (sys.version_info >= (3, 6)):
# line: 1662
file_path = os.fspath(file_path)
# line: 1663
if (file_path is None):
# line: 1665
raise TypeError('Expected file system path string, received None')
# line: 1666
if ((not file_path) or (not _ValidRelativePath(file_path))):
# line: 1669
raise IOError(errno.ENOENT, ("No such file or directory: '%s'" % file_path))
# line: 1671
file_path = self.NormalizePath(self.NormalizeCase(file_path))
# line: 1672
if self._IsRootPath(file_path):
# line: 1673
return file_path
# line: 1675
current_dir = self.root
# line: 1676
path_components = self.GetPathComponents(file_path)
# line: 1678
resolved_components = []
# line: 1679
link_depth = 0
# line: 1680
while path_components:
# line: 1681
component = path_components.pop(0)
# line: 1682
resolved_components.append(component)
# line: 1683
current_dir = self._DirectoryContent(current_dir, component)[1]
# line: 1684
if (current_dir is None):
# line: 1690
resolved_components.extend(path_components)
# line: 1691
break
# line: 1694
if stat.S_ISLNK(current_dir.st_mode):
# line: 1698
if (link_depth > _MAX_LINK_DEPTH):
# line: 1699
error_class = (OSError if raw_io else IOError)
# line: 1700
raise error_class(errno.ELOOP, ("Too many levels of symbolic links: '%s'" % _ComponentsToPath(resolved_components)))
# line: 1704
link_path = _FollowLink(resolved_components, current_dir)
# line: 1708
target_components = self.GetPathComponents(link_path)
# line: 1709
path_components = (target_components + path_components)
# line: 1710
resolved_components = []
# line: 1711
current_dir = self.root
# line: 1712
link_depth += 1
# line: 1713
return _ComponentsToPath(resolved_components)
# line: 1715
def GetObjectFromNormalizedPath(self, file_path):
# line: 1727
'Search for the specified filesystem object within the fake filesystem.\n\n Args:\n file_path: specifies target FakeFile object to retrieve, with a\n path that has already been normalized/resolved.\n\n Returns:\n the FakeFile object corresponding to file_path.\n\n Raises:\n IOError: if the object is not found.\n '
# line: 1728
if (sys.version_info >= (3, 6)):
# line: 1729
file_path = os.fspath(file_path)
# line: 1730
if (file_path == self.root.name):
# line: 1731
return self.root
# line: 1732
path_components = self.GetPathComponents(file_path)
# line: 1733
target_object = self.root
# line: 1734
try:
# line: 1735
for component in path_components:
# line: 1736
if stat.S_ISLNK(target_object.st_mode):
# line: 1737
target_object = self.ResolveObject(target_object.contents)
# line: 1738
if (not stat.S_ISDIR(target_object.st_mode)):
# line: 1739
if (not self.is_windows_fs):
# line: 1740
raise IOError(errno.ENOTDIR, 'Not a directory in fake filesystem', file_path)
# line: 1743
raise IOError(errno.ENOENT, 'No such file or directory in fake filesystem', file_path)
# line: 1746
target_object = target_object.GetEntry(component)
# line: 1747
except KeyError:
# line: 1748
raise IOError(errno.ENOENT, 'No such file or directory in fake filesystem', file_path)
# line: 1751
return target_object
# line: 1753
def GetObject(self, file_path):
# line: 1764
'Search for the specified filesystem object within the fake filesystem.\n\n Args:\n file_path: specifies target FakeFile object to retrieve.\n\n Returns:\n the FakeFile object corresponding to file_path.\n\n Raises:\n IOError: if the object is not found.\n '
# line: 1765
if (sys.version_info >= (3, 6)):
# line: 1766
file_path = os.fspath(file_path)
# line: 1767
file_path = self.NormalizePath(self.NormalizeCase(file_path))
# line: 1768
return self.GetObjectFromNormalizedPath(file_path)
# line: 1770
def ResolveObject(self, file_path, follow_symlinks=True, allow_fd=False):
# line: 1784
'Search for the specified filesystem object, resolving all links.\n\n Args:\n file_path: Specifies target FakeFile object to retrieve.\n follow_symlinks: If `False`, the link itself is resolved,\n otherwise the object linked to.\n allow_fd: If `True`, `file_path` may be open file descriptor\n\n Returns:\n the FakeFile object corresponding to file_path.\n\n Raises:\n IOError: if the object is not found.\n '
# line: 1785
if (allow_fd and (sys.version_info >= (3, 3)) and isinstance(file_path, int)):
# line: 1786
return self.GetOpenFile(file_path).GetObject()
# line: 1788
if follow_symlinks:
# line: 1789
if (sys.version_info >= (3, 6)):
# line: 1790
file_path = os.fspath(file_path)
# line: 1791
return self.GetObjectFromNormalizedPath(self.ResolvePath(file_path))
# line: 1792
return self.LResolveObject(file_path)
# line: 1794
def LResolveObject(self, path):
# line: 1808
'Search for the specified object, resolving only parent links.\n\n This is analogous to the stat/lstat difference. This resolves links *to*\n the object but not of the final object itself.\n\n Args:\n path: specifies target FakeFile object to retrieve.\n\n Returns:\n the FakeFile object corresponding to path.\n\n Raises:\n IOError: if the object is not found.\n '
# line: 1809
if (sys.version_info >= (3, 6)):
# line: 1810
path = os.fspath(path)
# line: 1811
if (path == self.root.name):
# line: 1813
return self.root
# line: 1816
sep = self._path_separator(path)
# line: 1817
alt_sep = self._alternative_path_separator(path)
# line: 1818
if (path.endswith(sep) or (alt_sep and path.endswith(alt_sep))):
# line: 1819
path = path[:(-1)]
# line: 1821
(parent_directory, child_name) = self.SplitPath(path)
# line: 1822
if (not parent_directory):
# line: 1823
parent_directory = self.cwd
# line: 1824
try:
# line: 1825
parent_obj = self.ResolveObject(parent_directory)
# line: 1826
assert parent_obj
# line: 1827
if (not isinstance(parent_obj, FakeDirectory)):
# line: 1828
if ((not self.is_windows_fs) and isinstance(parent_obj, FakeFile)):
# line: 1829
raise IOError(errno.ENOTDIR, 'The parent object is not a directory', path)
# line: 1831
raise IOError(errno.ENOENT, 'No such file or directory in fake filesystem', path)
# line: 1834
return parent_obj.GetEntry(child_name)
# line: 1835
except KeyError:
# line: 1836
raise IOError(errno.ENOENT, 'No such file or directory in the fake filesystem', path)
# line: 1840
def AddObject(self, file_path, file_object, error_class=OSError):
# line: 1851
'Add a fake file or directory into the filesystem at file_path.\n\n Args:\n file_path: the path to the file to be added relative to self.\n file_object: file or directory to add.\n error_class: the error class to be thrown if file_path does\n not correspond to a directory (used internally(\n\n Raises:\n IOError or OSError: if file_path does not correspond to a directory.\n '
# line: 1852
if (not file_path):
# line: 1853
target_directory = self.root
else:
# line: 1855
target_directory = self.ResolveObject(file_path)
# line: 1856
if (not stat.S_ISDIR(target_directory.st_mode)):
# line: 1857
raise error_class(errno.ENOTDIR, 'Not a directory in the fake filesystem', file_path)
# line: 1860
target_directory.AddEntry(file_object)
# line: 1862
def RenameObject(self, old_file_path, new_file_path, force_replace=False):
# line: 1883
'Renames a FakeFile object at old_file_path to new_file_path, preserving all properties.\n\n Args:\n old_file_path: Path to filesystem object to rename.\n new_file_path: Path to where the filesystem object will live after this call.\n force_replace: If set and destination is an existing file, it will be replaced\n even under Windows if the user has permissions, otherwise replacement\n happens under Unix only.\n\n Raises:\n OSError: if old_file_path does not exist.\n OSError: if new_file_path is an existing directory\n (Windows, or Posix if old_file_path points to a regular file)\n OSError: if old_file_path is a directory and new_file_path a file\n OSError: if new_file_path is an existing file and force_replace not set\n (Windows only).\n OSError: if new_file_path is an existing file and could not be removed\n (Posix, or Windows with force_replace set).\n OSError: if dirname(new_file_path) does not exist.\n OSError: if the file would be moved to another filesystem (e.g. mount point).\n '
# line: 1884
old_file_path = self.NormalizePath(old_file_path)
# line: 1885
new_file_path = self.NormalizePath(new_file_path)
# line: 1886
if ((not self.Exists(old_file_path)) and (not self.IsLink(old_file_path))):
# line: 1887
raise OSError(errno.ENOENT, 'Fake filesystem object: can not rename nonexistent file', old_file_path)
# line: 1891
old_object = self.LResolveObject(old_file_path)
# line: 1892
if (not self.is_windows_fs):
# line: 1893
if (self.IsDir(old_file_path, follow_symlinks=False) and self.IsLink(new_file_path)):
# line: 1895
raise OSError(errno.ENOTDIR, 'Cannot rename directory to symlink', new_file_path)
# line: 1898
if (self.IsDir(new_file_path, follow_symlinks=False) and self.IsLink(old_file_path)):
# line: 1900
raise OSError(errno.EISDIR, 'Cannot rename symlink to directory', new_file_path)
# line: 1904
if (self.Exists(new_file_path) or self.IsLink(new_file_path)):
# line: 1905
if (old_file_path == new_file_path):
# line: 1906
return
# line: 1908
new_object = self.GetObject(new_file_path)
# line: 1909
if (old_object == new_object):
# line: 1910
if (old_file_path.lower() == new_file_path.lower()):
# line: 1912
pass
else:
# line: 1915
return
elif (stat.S_ISDIR(new_object.st_mode) or stat.S_ISLNK(new_object.st_mode)):
# line: 1918
if self.is_windows_fs:
# line: 1919
if force_replace:
# line: 1920
raise OSError(errno.EACCES, 'Fake filesystem object: can not replace existing directory', new_file_path)
else:
# line: 1924
raise OSError(errno.EEXIST, 'Fake filesystem object: can not rename to existing directory', new_file_path)
# line: 1927
if (not stat.S_ISLNK(new_object.st_mode)):
# line: 1928
if new_object.contents:
# line: 1929
raise OSError(errno.EEXIST, 'Fake filesystem object: can not rename to non-empty directory', new_file_path)
# line: 1932
if stat.S_ISREG(old_object.st_mode):
# line: 1933
raise OSError(errno.EISDIR, 'Fake filesystem object: cannot rename file to directory', new_file_path)
elif stat.S_ISDIR(old_object.st_mode):
# line: 1937
raise OSError(errno.ENOTDIR, 'Fake filesystem object: cannot rename directory to file', new_file_path)
elif (self.is_windows_fs and (not force_replace)):
# line: 1941
raise OSError(errno.EEXIST, 'Fake filesystem object: can not rename to existing file', new_file_path)
else:
# line: 1945
try:
# line: 1946
self.RemoveObject(new_file_path)
# line: 1947
except IOError as exc:
# line: 1948
raise OSError(exc.errno, exc.strerror, exc.filename)
# line: 1950
(old_dir, old_name) = self.SplitPath(old_file_path)
# line: 1951
(new_dir, new_name) = self.SplitPath(new_file_path)
# line: 1952
if (not self.Exists(new_dir)):
# line: 1953
raise OSError(errno.ENOENT, 'No such fake directory', new_dir)
# line: 1954
old_dir_object = self.ResolveObject(old_dir)
# line: 1955
new_dir_object = self.ResolveObject(new_dir)
# line: 1956
if (old_dir_object.st_dev != new_dir_object.st_dev):
# line: 1957
raise OSError(errno.EXDEV, 'Fake filesystem object: cannot rename across file systems', old_file_path)
# line: 1960
if (not stat.S_ISDIR(new_dir_object.st_mode)):
# line: 1961
raise OSError((errno.EACCES if self.is_windows_fs else errno.ENOTDIR), 'Fake filesystem object: target parent is not a directory', new_file_path)
# line: 1964
if new_dir_object.HasParentObject(old_object):
# line: 1965
raise OSError(errno.EINVAL, 'Fake filesystem object: invalid target for rename', new_file_path)
# line: 1969
object_to_rename = old_dir_object.GetEntry(old_name)
# line: 1970
old_dir_object.RemoveEntry(old_name, recursive=False)
# line: 1971
object_to_rename.name = new_name
# line: 1972
if (new_name in new_dir_object.contents):
# line: 1974
new_dir_object.RemoveEntry(new_name)
# line: 1975
new_dir_object.AddEntry(object_to_rename)
# line: 1977
def RemoveObject(self, file_path):
# line: 1987
"Remove an existing file or directory.\n\n Args:\n file_path: the path to the file relative to self.\n\n Raises:\n IOError: if file_path does not correspond to an existing file, or if part\n of the path refers to something other than a directory.\n OSError: if the directory is in use (eg, if it is '/').\n "
# line: 1988
file_path = self.NormalizePath(self.NormalizeCase(file_path))
# line: 1989
if self._IsRootPath(file_path):
# line: 1990
raise OSError(errno.EBUSY, 'Fake device or resource busy', file_path)
# line: 1992
try:
# line: 1993
(dirname, basename) = self.SplitPath(file_path)
# line: 1994
target_directory = self.ResolveObject(dirname)
# line: 1995
target_directory.RemoveEntry(basename)
# line: 1996
except KeyError:
# line: 1997
raise IOError(errno.ENOENT, 'No such file or directory in the fake filesystem', file_path)
# line: 2000
except AttributeError:
# line: 2001
raise IOError(errno.ENOTDIR, 'Not a directory in the fake filesystem', file_path)
# line: 2005
def CreateDirectory(self, directory_path, perm_bits=PERM_DEF):
# line: 2019
'Create directory_path, and all the parent directories.\n\n Helper method to set up your test faster.\n\n Args:\n directory_path: The full directory path to create.\n perm_bits: The permission bits as set by `chmod`.\n\n Returns:\n the newly created FakeDirectory object.\n\n Raises:\n OSError: if the directory already exists.\n '
# line: 2020
directory_path = self.NormalizePath(directory_path)
# line: 2021
self._AutoMountDriveIfNeeded(directory_path)
# line: 2022
if self.Exists(directory_path):
# line: 2023
raise OSError(errno.EEXIST, 'Directory exists in fake filesystem', directory_path)
# line: 2026
path_components = self.GetPathComponents(directory_path)
# line: 2027
current_dir = self.root
# line: 2029
new_dirs = []
# line: 2030
for component in path_components:
# line: 2031
directory = self._DirectoryContent(current_dir, component)[1]
# line: 2032
if (not directory):
# line: 2033
new_dir = FakeDirectory(component, filesystem=self)
# line: 2034
new_dirs.append(new_dir)
# line: 2035
current_dir.AddEntry(new_dir)
# line: 2036
current_dir = new_dir
else:
# line: 2038
if stat.S_ISLNK(directory.st_mode):
# line: 2039
directory = self.ResolveObject(directory.contents)
# line: 2040
current_dir = directory
# line: 2041
if ((directory.st_mode & stat.S_IFDIR) != stat.S_IFDIR):
# line: 2042
raise OSError(errno.ENOTDIR, 'Not a directory', current_dir.GetPath())
# line: 2046
for new_dir in new_dirs:
# line: 2047
new_dir.st_mode = (stat.S_IFDIR | perm_bits)
# line: 2049
self._last_ino += 1
# line: 2050
current_dir.SetIno(self._last_ino)
# line: 2051
return current_dir
# line: 2053
def CreateFile(self, file_path, st_mode=(stat.S_IFREG | PERM_DEF_FILE), contents='', st_size=None, create_missing_dirs=True, apply_umask=False, encoding=None, errors=None):
# line: 2079
'Create file_path, including all the parent directories along the way.\n\n This helper method can be used to set up tests more easily.\n\n Args:\n file_path: The path to the file to create.\n st_mode: The stat constant representing the file type.\n contents: The contents of the file.\n st_size: The file size; only valid if contents not given.\n create_missing_dirs: If `True`, auto create missing directories.\n apply_umask: `True` if the current umask must be applied on st_mode.\n encoding: Ff contents is a unicode string, the encoding used\n for serialization.\n New in pyfakefs 2.9.\n errors: The error mode used for encoding/decoding errors.\n New in pyfakefs 3.2.\n\n Returns:\n the newly created FakeFile object.\n\n Raises:\n IOError: if the file already exists.\n IOError: if the containing directory is required and missing.\n '
# line: 2080
return self.CreateFileInternally(file_path, st_mode, contents, st_size, create_missing_dirs, apply_umask, encoding, errors)
# line: 2084
def add_real_file(self, file_path, read_only=True):
# line: 2109
"Create file_path, including all the parent directories along the way, for an existing\n real file. The contents of the real file are read only on demand.\n New in pyfakefs 3.2.\n\n Args:\n file_path: Path to an existing file in the real file system\n read_only: If `True` (the default), writing to the fake file\n raises an exception. Otherwise, writing to the file changes\n the fake file only.\n\n Returns:\n the newly created FakeFile object.\n\n Raises:\n OSError: if the file does not exist in the real file system.\n IOError: if the file already exists in the fake file system.\n\n .. note:: On MacOS and BSD, accessing the fake file's contents will update both the real and fake files' `atime.` (access time). In this particular case, `add_real_file()` violates the rule that `pyfakefs` must not modify the real file system. Further, Windows offers the option to enable atime, and older versions of Linux may also modify atime.\n "
# line: 2110
return self.CreateFileInternally(file_path, read_from_real_fs=True, read_only=read_only)
# line: 2114
def add_real_directory(self, dir_path, read_only=True, lazy_read=True):
# line: 2139
'Create a fake directory corresponding to the real directory at the specified\n path. Add entries in the fake directory corresponding to the entries in the\n real directory.\n New in pyfakefs 3.2.\n\n Args:\n dir_path: The path to the existing directory.\n read_only: If set, all files under the directory are treated as\n read-only, e.g. a write access raises an exception;\n otherwise, writing to the files changes the fake files only\n as usually.\n lazy_read: If set (default), directory contents are only read when\n accessed, and only until the needed subdirectory level.\n *Note:* this means that the file system size is only updated\n at the time the directory contents are read; set this to\n `False` only if you are dependent on accurate file system\n size in your test\n\n Returns:\n the newly created FakeDirectory object.\n\n Raises:\n OSError: if the directory does not exist in the real file system.\n IOError: if the directory already exists in the fake file system.\n '
# line: 2140
if (not os.path.exists(dir_path)):
# line: 2141
raise IOError(errno.ENOENT, 'No such directory', dir_path)
# line: 2142
if lazy_read:
# line: 2143
parent_path = os.path.split(dir_path)[0]
# line: 2144
if self.Exists(parent_path):
# line: 2145
parent_dir = self.GetObject(parent_path)
else:
# line: 2147
parent_dir = self.CreateDirectory(parent_path)
# line: 2148
new_dir = FakeDirectoryFromRealDirectory(dir_path, filesystem=self, read_only=read_only)
# line: 2149
parent_dir.AddEntry(new_dir)
# line: 2150
self._last_ino += 1
# line: 2151
new_dir.SetIno(self._last_ino)
else:
# line: 2153
new_dir = self.CreateDirectory(dir_path)
# line: 2154
for (base, _, files) in os.walk(dir_path):
# line: 2155
for fileEntry in files:
# line: 2156
self.add_real_file(os.path.join(base, fileEntry), read_only)
# line: 2157
return new_dir
# line: 2159
def add_real_paths(self, path_list, read_only=True, lazy_dir_read=True):
# line: 2176
'This convenience method adds multiple files and/or directories from the\n real file system to the fake file system. See `add_real_file()` and\n `add_real_directory()`.\n New in pyfakefs 3.2.\n\n Args:\n path_list: List of file and directory paths in the real file system.\n read_only: If set, all files and files under under the directories are treated as read-only,\n e.g. a write access raises an exception;\n otherwise, writing to the files changes the fake files only as usually.\n lazy_dir_read: Uses lazy reading of directory contents if set\n (see `add_real_directory`)\n\n Raises:\n OSError: if any of the files and directories in the list does not exist in the real file system.\n OSError: if any of the files and directories in the list already exists in the fake file system.\n '
# line: 2177
for path in path_list:
# line: 2178
if os.path.isdir(path):
# line: 2179
self.add_real_directory(path, read_only, lazy_dir_read)
else:
# line: 2181
self.add_real_file(path, read_only)
# line: 2183
def CreateFileInternally(self, file_path, st_mode=(stat.S_IFREG | PERM_DEF_FILE), contents='', st_size=None, create_missing_dirs=True, apply_umask=False, encoding=None, errors=None, read_from_real_fs=False, read_only=True, raw_io=False):
# line: 2203
'Internal fake file creator that supports both normal fake files and fake\n files based on real files.\n\n Args:\n file_path: path to the file to create.\n st_mode: the stat.S_IF constant representing the file type.\n contents: the contents of the file.\n st_size: file size; only valid if contents not given.\n create_missing_dirs: if True, auto create missing directories.\n apply_umask: whether or not the current umask must be applied on st_mode.\n encoding: if contents is a unicode string, the encoding used for serialization.\n errors: the error mode used for encoding/decoding errors\n read_from_real_fs: if True, the contents are reaf from the real file system on demand.\n read_only: if set, the file is treated as read-only, e.g. a write access raises an exception;\n otherwise, writing to the file changes the fake file only as usually.\n raw_io: `True` if called from low-level API (`os.open`)\n '
# line: 2204
error_class = (OSError if raw_io else IOError)
# line: 2205
file_path = self.NormalizePath(file_path)
# line: 2208
if (self.Exists(file_path) or self.IsLink(file_path)):
# line: 2209
raise OSError(errno.EEXIST, 'File already exists in fake filesystem', file_path)
# line: 2212
(parent_directory, new_file) = self.SplitPath(file_path)
# line: 2213
if (not parent_directory):
# line: 2214
parent_directory = self.cwd
# line: 2215
self._AutoMountDriveIfNeeded(parent_directory)
# line: 2216
if (not self.Exists(parent_directory)):
# line: 2217
if (not create_missing_dirs):
# line: 2218
raise error_class(errno.ENOENT, 'No such fake directory', parent_directory)
# line: 2219
self.CreateDirectory(parent_directory)
else:
# line: 2221
parent_directory = self.NormalizeCase(parent_directory)
# line: 2222
if apply_umask:
# line: 2223
st_mode &= (~ self.umask)
# line: 2224
if read_from_real_fs:
# line: 2225
file_object = FakeFileFromRealFile(file_path, filesystem=self, read_only=read_only)
else:
# line: 2227
file_object = FakeFile(new_file, st_mode, filesystem=self, encoding=encoding, errors=errors)
# line: 2229
self._last_ino += 1
# line: 2230
file_object.SetIno(self._last_ino)
# line: 2231
self.AddObject(parent_directory, file_object, error_class)
# line: 2233
if ((not read_from_real_fs) and ((contents is not None) or (st_size is not None))):
# line: 2234
try:
# line: 2235
if (st_size is not None):
# line: 2236
file_object.SetLargeFileSize(st_size)
else:
# line: 2238
file_object._set_initial_contents(contents)
# line: 2239
except IOError:
# line: 2240
self.RemoveObject(file_path)
# line: 2241
raise
# line: 2243
return file_object
# line: 2246
def CreateLink(self, file_path, link_target, create_missing_dirs=True):
# line: 2261
'Create the specified symlink, pointed at the specified link target.\n\n Args:\n file_path: path to the symlink to create\n link_target: the target of the symlink\n create_missing_dirs: If `True`, any missing parent directories of\n file_path will be created\n\n Returns:\n the newly created FakeFile object.\n\n Raises:\n OSError: if the symlink could not be created (see `CreateFile`).\n OSError: if on Windows before Python 3.2.\n '
# line: 2262
if (not self._IsLinkSupported()):
# line: 2263
raise OSError('Symbolic links are not supported on Windows before Python 3.2')
# line: 2265
if (not self.IsLink(file_path)):
# line: 2266
file_path = self.ResolvePath(file_path)
# line: 2267
if (sys.version_info >= (3, 6)):
# line: 2268
link_target = os.fspath(link_target)
# line: 2269
return self.CreateFileInternally(file_path, st_mode=(stat.S_IFLNK | PERM_DEF), contents=link_target, create_missing_dirs=create_missing_dirs, raw_io=True)
# line: 2273
def CreateHardLink(self, old_path, new_path):
# line: 2289
"Create a hard link at new_path, pointing at old_path.\n New in pyfakefs 2.9.\n\n Args:\n old_path: an existing link to the target file.\n new_path: the destination path to create a new link at.\n\n Returns:\n the FakeFile object referred to by old_path.\n\n Raises:\n OSError: if something already exists at new_path.\n OSError: if old_path is a directory.\n OSError: if the parent directory doesn't exist.\n OSError: if on Windows before Python 3.2.\n "
# line: 2290
if (not self._IsLinkSupported()):
# line: 2291
raise OSError('Links are not supported on Windows before Python 3.2')
# line: 2292
new_path_normalized = self.NormalizePath(new_path)
# line: 2293
if self.Exists(new_path_normalized):
# line: 2294
raise OSError(errno.EEXIST, 'File already exists in fake filesystem', new_path)
# line: 2298
(new_parent_directory, new_basename) = self.SplitPath(new_path_normalized)
# line: 2299
if (not new_parent_directory):
# line: 2300
new_parent_directory = self.cwd
# line: 2302
if (not self.Exists(new_parent_directory)):
# line: 2303
raise OSError(errno.ENOENT, 'No such fake directory', new_parent_directory)
# line: 2307
try:
# line: 2308
old_file = self.ResolveObject(old_path)
# line: 2309
except:
# line: 2310
raise OSError(errno.ENOENT, 'No such file or directory in fake filesystem', old_path)
# line: 2314
if (old_file.st_mode & stat.S_IFDIR):
# line: 2315
raise OSError((errno.EACCES if self.is_windows_fs else errno.EPERM), 'Cannot create hard link to directory', old_path)
# line: 2320
old_file.name = new_basename
# line: 2321
self.AddObject(new_parent_directory, old_file)
# line: 2322
return old_file
# line: 2324
def ReadLink(self, path):
# line: 2338
'Read the target of a symlink.\n New in pyfakefs 3.0.\n\n Args:\n path: symlink to read the target of.\n\n Returns:\n the string representing the path to which the symbolic link points.\n\n Raises:\n TypeError: if path is None\n OSError: (with errno=ENOENT) if path is not a valid path, or\n (with errno=EINVAL) if path is valid, but is not a symlink.\n '
# line: 2339
if (path is None):
# line: 2340
raise TypeError
# line: 2341
try:
# line: 2342
link_obj = self.LResolveObject(path)
# line: 2343
except IOError as exc:
# line: 2344
raise OSError(exc.errno, 'Fake path does not exist', path)
# line: 2345
if (stat.S_IFMT(link_obj.st_mode) != stat.S_IFLNK):
# line: 2346
raise OSError(errno.EINVAL, 'Fake filesystem: not a symlink', path)
# line: 2347
return link_obj.contents
# line: 2349
def MakeDirectory(self, dir_name, mode=PERM_DEF):
# line: 2362
"Create a leaf Fake directory.\n New in pyfakefs 3.0.\n\n Args:\n dir_name: (str) Name of directory to create. Relative paths are assumed\n to be relative to '/'.\n mode: (int) Mode to create directory with. This argument defaults to\n 0o777. The umask is applied to this mode.\n\n Raises:\n OSError: if the directory name is invalid or parent directory is read only\n or as per `FakeFilesystem.AddObject()`.\n "
# line: 2363
if (sys.version_info >= (3, 6)):
# line: 2364
dir_name = os.fspath(dir_name)
# line: 2365
if self._EndsWithPathSeparator(dir_name):
# line: 2366
dir_name = dir_name[:(-1)]
# line: 2367
if (not dir_name):
# line: 2368
raise OSError(errno.ENOENT, 'Empty directory name')
# line: 2370
(parent_dir, _) = self.SplitPath(dir_name)
# line: 2371
if parent_dir:
# line: 2372
base_dir = self.CollapsePath(parent_dir)
# line: 2373
ellipsis = self._matching_string(parent_dir, (self.path_separator + '..'))
# line: 2374
if parent_dir.endswith(ellipsis):
# line: 2375
(base_dir, dummy_dotdot, _) = parent_dir.partition(ellipsis)
# line: 2376
if (not self.Exists(base_dir)):
# line: 2377
raise OSError(errno.ENOENT, 'No such fake directory', base_dir)
# line: 2379
dir_name = self.NormalizePath(dir_name)
# line: 2380
if self.Exists(dir_name):
# line: 2381
raise OSError(errno.EEXIST, 'Fake object already exists', dir_name)
# line: 2382
(head, tail) = self.SplitPath(dir_name)
# line: 2384
self.AddObject(head, FakeDirectory(tail, (mode & (~ self.umask)), filesystem=self))
# line: 2387
def MakeDirectories(self, dir_name, mode=PERM_DEF, exist_ok=False):
# line: 2402
'Create a leaf Fake directory and create any non-existent parent dirs.\n New in pyfakefs 3.0.\n\n Args:\n dir_name: (str) Name of directory to create.\n mode: (int) Mode to create directory (and any necessary parent\n directories) with. This argument defaults to 0o777. The umask is\n applied to this mode.\n exist_ok: (boolean) If exist_ok is False (the default), an OSError is\n raised if the target directory already exists. New in Python 3.2.\n\n Raises:\n OSError: if the directory already exists and exist_ok=False, or as per\n `FakeFilesystem.CreateDirectory()`.\n '
# line: 2403
dir_name = self.NormalizePath(dir_name)
# line: 2404
path_components = self.GetPathComponents(dir_name)
# line: 2408
current_dir = self.root
# line: 2409
for component in path_components:
# line: 2410
if ((component not in current_dir.contents) or (not isinstance(current_dir.contents, dict))):
# line: 2412
break
else:
# line: 2414
current_dir = current_dir.contents[component]
# line: 2415
try:
# line: 2416
self.CreateDirectory(dir_name, (mode & (~ self.umask)))
# line: 2417
except (IOError, OSError) as e:
# line: 2418
if ((not exist_ok) or (not isinstance(self.ResolveObject(dir_name), FakeDirectory))):
# line: 2420
if isinstance(e, OSError):
# line: 2421
raise
# line: 2422
raise OSError(e.errno, e.strerror, e.filename)
# line: 2424
def _IsType(self, path, st_flag, follow_symlinks=True):
# line: 2438
"Helper function to implement isdir(), islink(), etc.\n\n See the stat(2) man page for valid stat.S_I* flag values\n\n Args:\n path: path to file to stat and test\n st_flag: the stat.S_I* flag checked for the file's st_mode\n\n Returns:\n boolean (the st_flag is set in path's st_mode)\n\n Raises:\n TypeError: if path is None\n "
# line: 2439
if (sys.version_info >= (3, 6)):
# line: 2440
path = os.fspath(path)
# line: 2441
if (path is None):
# line: 2442
raise TypeError
# line: 2443
try:
# line: 2444
obj = self.ResolveObject(path, follow_symlinks)
# line: 2445
if obj:
# line: 2446
return (stat.S_IFMT(obj.st_mode) == st_flag)
# line: 2447
except (IOError, OSError):
# line: 2448
return False
# line: 2449
return False
# line: 2451
def IsDir(self, path, follow_symlinks=True):
# line: 2463
'Determine if path identifies a directory.\n New in pyfakefs 3.0.\n\n Args:\n path: path to filesystem object.\n\n Returns:\n True if path points to a directory (following symlinks).\n\n Raises:\n TypeError: if path is None.\n '
# line: 2464
return self._IsType(path, stat.S_IFDIR, follow_symlinks)
# line: 2466
def IsFile(self, path, follow_symlinks=True):
# line: 2478
'Determine if path identifies a regular file.\n New in pyfakefs 3.0.\n\n Args:\n path: path to filesystem object.\n\n Returns:\n True if path points to a regular file (following symlinks).\n\n Raises:\n TypeError: if path is None.\n '
# line: 2479
return self._IsType(path, stat.S_IFREG, follow_symlinks)
# line: 2481
def IsLink(self, path):
# line: 2493
'Determine if path identifies a symbolic link.\n New in pyfakefs 3.0.\n\n Args:\n path: path to filesystem object.\n\n Returns:\n True if path points to a symlink (S_IFLNK set in st_mode)\n\n Raises:\n TypeError: if path is None.\n '
# line: 2494
return self._IsType(path, stat.S_IFLNK, follow_symlinks=False)
# line: 2496
def ConfirmDir(self, target_directory):
# line: 2508
'Test that the target is actually a directory, raising OSError if not.\n New in pyfakefs 3.0.\n\n Args:\n target_directory: path to the target directory within the fake filesystem.\n\n Returns:\n the FakeDirectory object corresponding to target_directory.\n\n Raises:\n OSError: if the target is not a directory.\n '
# line: 2509
try:
# line: 2510
directory = self.ResolveObject(target_directory)
# line: 2511
except IOError as exc:
# line: 2512
raise OSError(exc.errno, exc.strerror, target_directory)
# line: 2513
if (not (directory.st_mode & stat.S_IFDIR)):
# line: 2514
raise OSError(errno.ENOTDIR, 'Fake os module: not a directory', target_directory)
# line: 2517
return directory
# line: 2519
def RemoveFile(self, path):
# line: 2530
'Remove the FakeFile object at the specified file path.\n New in pyfakefs 3.0.\n\n Args:\n path: path to file to be removed.\n\n Raises:\n OSError: if path points to a directory.\n OSError: if path does not exist.\n OSError: if removal failed.\n '
# line: 2531
path = self.NormalizePath(path)
# line: 2532
if self.Exists(path):
# line: 2533
obj = self.ResolveObject(path)
# line: 2534
if (stat.S_IFMT(obj.st_mode) == stat.S_IFDIR):
# line: 2535
link_obj = self.LResolveObject(path)
# line: 2536
if (stat.S_IFMT(link_obj.st_mode) != stat.S_IFLNK):
# line: 2537
raise OSError(errno.EISDIR, ("Is a directory: '%s'" % path))
# line: 2539
try:
# line: 2540
self.RemoveObject(path)
# line: 2541
except IOError as exc:
# line: 2542
raise OSError(exc.errno, exc.strerror, exc.filename)
# line: 2544
def RemoveDirectory(self, target_directory, allow_symlink=False):
# line: 2557
"Remove a leaf Fake directory.\n New in pyfakefs 3.0.\n\n Args:\n target_directory: (str) Name of directory to remove.\n allow_symlink: (bool) if `target_directory` is a symlink,\n the function just returns, otherwise it raises (Posix only)\n\n Raises:\n OSError: if target_directory does not exist.\n OSError: if target_directory does not point to a directory.\n OSError: if removal failed per FakeFilesystem.RemoveObject. Cannot remove '.'.\n "
# line: 2558
if (target_directory in ('.', u'.')):
# line: 2559
raise OSError(errno.EINVAL, "Invalid argument: '.'")
# line: 2560
target_directory = self.NormalizePath(target_directory)
# line: 2561
if self.ConfirmDir(target_directory):
# line: 2562
if ((not self.is_windows_fs) and self.IsLink(target_directory)):
# line: 2563
if allow_symlink:
# line: 2564
return
# line: 2565
raise OSError(errno.ENOTDIR, 'Cannot remove symlink', target_directory)
# line: 2567
dir_object = self.ResolveObject(target_directory)
# line: 2568
if dir_object.contents:
# line: 2569
raise OSError(errno.ENOTEMPTY, 'Fake Directory not empty', target_directory)
# line: 2571
try:
# line: 2572
self.RemoveObject(target_directory)
# line: 2573
except IOError as exc:
# line: 2574
raise OSError(exc.errno, exc.strerror, exc.filename)
# line: 2576
def ListDir(self, target_directory):
# line: 2588
'Return a list of file names in target_directory.\n New in pyfakefs 3.0.\n\n Args:\n target_directory: path to the target directory within the fake filesystem.\n\n Returns:\n a list of file names within the target directory in arbitrary order.\n\n Raises:\n OSError: if the target is not a directory.\n '
# line: 2589
target_directory = self.ResolvePath(target_directory, allow_fd=True)
# line: 2590
directory = self.ConfirmDir(target_directory)
# line: 2591
directory_contents = directory.contents
# line: 2592
return list(directory_contents.keys())
# line: 2594
if (sys.version_info >= (3, 5)):
# line: 2595
class DirEntry:
# line: 2596
'Emulates os.DirEntry. Note that we did not enforce keyword only arguments.'
# line: 2598
def __init__(self, filesystem):
# line: 2603
'Initialize the dir entry with unset values.\n\n Args:\n filesystem: the fake filesystem used for implementation.\n '
# line: 2604
self._filesystem = filesystem
# line: 2605
self.name = ''
# line: 2606
self.path = ''
# line: 2607
self._inode = None
# line: 2608
self._islink = False
# line: 2609
self._isdir = False
# line: 2610
self._statresult = None
# line: 2611
self._statresult_symlink = None
# line: 2613
def inode(self):
# line: 2614
'Return the inode number of the entry.'
# line: 2615
if (self._inode is None):
# line: 2616
self.stat(follow_symlinks=False)
# line: 2617
return self._inode
# line: 2619
def is_dir(self, follow_symlinks=True):
# line: 2629
'Return True if this entry is a directory entry.\n\n Args:\n follow_symlinks: If True, also return True if this entry is a symlink\n pointing to a directory.\n\n Returns:\n True if this entry is an existing directory entry, or if\n follow_symlinks is set, and this entry points to an existing directory entry.\n '
# line: 2630
return (self._isdir and (follow_symlinks or (not self._islink)))
# line: 2632
def is_file(self, follow_symlinks=True):
# line: 2642
'Return True if this entry is a regular file entry.\n\n Args:\n follow_symlinks: If True, also return True if this entry is a symlink\n pointing to a regular file.\n\n Returns:\n True if this entry is an existing file entry, or if\n follow_symlinks is set, and this entry points to an existing file entry.\n '
# line: 2643
return ((not self._isdir) and (follow_symlinks or (not self._islink)))
# line: 2645
def is_symlink(self):
# line: 2646
'Return True if this entry is a symbolic link (even if broken).'
# line: 2647
return self._islink
# line: 2649
def stat(self, follow_symlinks=True):
# line: 2655
'Return a stat_result object for this entry.\n\n Args:\n follow_symlinks: If False and the entry is a symlink, return the\n result for the symlink, otherwise for the object it points to.\n '
# line: 2656
if follow_symlinks:
# line: 2657
if (self._statresult_symlink is None):
# line: 2658
file_object = self._filesystem.ResolveObject(self.path)
# line: 2659
if self._filesystem.is_windows_fs:
# line: 2662
file_object.st_ino = 0
# line: 2663
file_object.st_dev = 0
# line: 2664
file_object.st_nlink = 0
# line: 2665
self._statresult_symlink = file_object.stat_result.copy()
# line: 2666
return self._statresult_symlink
# line: 2668
if (self._statresult is None):
# line: 2669
file_object = self._filesystem.LResolveObject(self.path)
# line: 2670
self._inode = file_object.st_ino
# line: 2671
if self._filesystem.is_windows_fs:
# line: 2672
file_object.st_ino = 0
# line: 2673
file_object.st_dev = 0
# line: 2674
file_object.st_nlink = 0
# line: 2675
self._statresult = file_object.stat_result.copy()
# line: 2676
return self._statresult
# line: 2678
class ScanDirIter:
# line: 2681
'Iterator for DirEntry objects returned from `scandir()` function.\n New in pyfakefs 3.0.\n '
# line: 2683
def __init__(self, filesystem, path):
# line: 2684
self.filesystem = filesystem
# line: 2685
self.path = self.filesystem.ResolvePath(path)
# line: 2686
contents = {}
# line: 2687
try:
# line: 2688
contents = self.filesystem.ConfirmDir(path).contents
# line: 2689
except OSError:
# line: 2690
pass
# line: 2691
self.contents_iter = iter(contents)
# line: 2693
def __iter__(self):
# line: 2694
return self
# line: 2696
def __next__(self):
# line: 2697
entry = self.contents_iter.__next__()
# line: 2698
dir_entry = self.filesystem.DirEntry(self.filesystem)
# line: 2699
dir_entry.name = entry
# line: 2700
dir_entry.path = self.filesystem.JoinPaths(self.path, dir_entry.name)
# line: 2701
dir_entry._isdir = self.filesystem.IsDir(dir_entry.path)
# line: 2702
dir_entry._islink = self.filesystem.IsLink(dir_entry.path)
# line: 2703
return dir_entry
# line: 2705
if (sys.version_info >= (3, 6)):
# line: 2706
def __enter__(self):
# line: 2707
return self
# line: 2709
def __exit__(self, exc_type, exc_val, exc_tb):
# line: 2710
self.close()
# line: 2712
def close(self):
# line: 2713
pass
# line: 2715
def ScanDir(self, path=''):
# line: 2728
'Return an iterator of DirEntry objects corresponding to the entries\n in the directory given by path.\n New in pyfakefs 3.0.\n\n Args:\n path: path to the target directory within the fake filesystem.\n\n Returns:\n an iterator to an unsorted list of os.DirEntry objects for each entry in path.\n\n Raises:\n OSError: if the target is not a directory.\n '
# line: 2729
return self.ScanDirIter(self, path)
# line: 2731
def __str__(self):
# line: 2732
return str(self.root)
# line: 2735
class FakePathModule(object):
# line: 2740
'Faked os.path module replacement.\n\n FakePathModule should *only* be instantiated by FakeOsModule. See the\n FakeOsModule docstring for details.\n '
# line: 2741
_OS_PATH_COPY = CopyModule(os.path)
# line: 2743
def __init__(self, filesystem, os_module=None):
# line: 2749
'Init.\n\n Args:\n filesystem: FakeFilesystem used to provide file system information\n os_module: (deprecated) FakeOsModule to assign to self.os\n '
# line: 2750
self.filesystem = filesystem
# line: 2751
self._os_path = self._OS_PATH_COPY
# line: 2752
if (os_module is None):
# line: 2753
warnings.warn(FAKE_PATH_MODULE_DEPRECATION, DeprecationWarning, stacklevel=2)
# line: 2755
self._os_path.os = self.os = os_module
# line: 2756
self.sep = self.filesystem.path_separator
# line: 2757
self.altsep = self.filesystem.alternative_path_separator
# line: 2759
def exists(self, path):
# line: 2767
'Determine whether the file object exists within the fake filesystem.\n\n Args:\n path: path to the file object.\n\n Returns:\n bool (if file exists).\n '
# line: 2768
return self.filesystem.Exists(path)
# line: 2770
def lexists(self, path):
# line: 2778
'Test whether a path exists. Returns True for broken symbolic links.\n\n Args:\n path: path to the symlink object.\n\n Returns:\n bool (if file exists).\n '
# line: 2779
return (self.exists(path) or self.islink(path))
# line: 2781
def getsize(self, path):
# line: 2789
'Return the file object size in bytes.\n\n Args:\n path: path to the file object.\n\n Returns:\n file size in bytes.\n '
# line: 2790
try:
# line: 2791
file_obj = self.filesystem.ResolveObject(path)
# line: 2792
return file_obj.st_size
# line: 2793
except IOError as exc:
# line: 2794
raise os.error(exc.errno, exc.strerror)
# line: 2796
def isabs(self, path):
# line: 2797
'Return True if path is an absolute pathname.'
# line: 2798
if self.filesystem.is_windows_fs:
# line: 2799
path = self.splitdrive(path)[1]
# line: 2800
if (sys.version_info >= (3, 6)):
# line: 2801
path = os.fspath(path)
# line: 2802
sep = self.filesystem._path_separator(path)
# line: 2803
altsep = self.filesystem._alternative_path_separator(path)
# line: 2804
if self.filesystem.is_windows_fs:
# line: 2805
return ((len(path) > 0) and (path[:1] in (sep, altsep)))
else:
# line: 2807
return (path.startswith(sep) or ((altsep is not None) and path.startswith(altsep)))
# line: 2809
def isdir(self, path):
# line: 2810
'Determine if path identifies a directory.'
# line: 2811
return self.filesystem.IsDir(path)
# line: 2813
def isfile(self, path):
# line: 2814
'Determine if path identifies a regular file.'
# line: 2815
return self.filesystem.IsFile(path)
# line: 2817
def islink(self, path):
# line: 2828
'Determine if path identifies a symbolic link.\n\n Args:\n path: path to filesystem object.\n\n Returns:\n True if path points to a symbolic link.\n\n Raises:\n TypeError: if path is None.\n '
# line: 2829
return self.filesystem.IsLink(path)
# line: 2831
def getmtime(self, path):
# line: 2843
'Returns the modification time of the fake file.\n\n Args:\n path: the path to fake file.\n\n Returns:\n (int, float) the modification time of the fake file\n in number of seconds since the epoch.\n\n Raises:\n OSError: if the file does not exist.\n '
# line: 2844
try:
# line: 2845
file_obj = self.filesystem.ResolveObject(path)
# line: 2846
except IOError as exc:
# line: 2847
raise OSError(errno.ENOENT, str(exc))
# line: 2848
return file_obj.st_mtime
# line: 2850
def getatime(self, path):
# line: 2863
'Returns the last access time of the fake file.\n\n Note: Access time is not set automatically in fake filesystem on access.\n\n Args:\n path: the path to fake file.\n\n Returns:\n (int, float) the access time of the fake file in number of seconds since the epoch.\n\n Raises:\n OSError: if the file does not exist.\n '
# line: 2864
try:
# line: 2865
file_obj = self.filesystem.ResolveObject(path)
# line: 2866
except IOError as exc:
# line: 2867
raise OSError(errno.ENOENT, str(exc))
# line: 2868
return file_obj.st_atime
# line: 2870
def getctime(self, path):
# line: 2881
'Returns the creation time of the fake file.\n\n Args:\n path: the path to fake file.\n\n Returns:\n (int, float) the creation time of the fake file in number of seconds since the epoch.\n\n Raises:\n OSError: if the file does not exist.\n '
# line: 2882
try:
# line: 2883
file_obj = self.filesystem.ResolveObject(path)
# line: 2884
except IOError as exc:
# line: 2885
raise OSError(errno.ENOENT, str(exc))
# line: 2886
return file_obj.st_ctime
# line: 2888
def abspath(self, path):
# line: 2889
'Return the absolute version of a path.'
# line: 2891
def getcwd():
# line: 2892
'Return the current working directory.'
# line: 2894
if ((sys.version_info < (3,)) and isinstance(path, unicode)):
# line: 2895
return self.os.getcwdu()
elif ((sys.version_info >= (3,)) and isinstance(path, bytes)):
# line: 2897
return self.os.getcwdb()
else:
# line: 2899
return self.os.getcwd()
# line: 2901
if (sys.version_info >= (3, 6)):
# line: 2902
path = os.fspath(path)
# line: 2904
sep = self.filesystem._path_separator(path)
# line: 2905
altsep = self.filesystem._alternative_path_separator(path)
# line: 2906
if (not self.isabs(path)):
# line: 2907
path = self.join(getcwd(), path)
elif ((self.filesystem.is_windows_fs and path.startswith(sep)) or ((altsep is not None) and path.startswith(altsep))):
# line: 2911
cwd = getcwd()
# line: 2912
if self.filesystem.StartsWithDriveLetter(cwd):
# line: 2913
path = self.join(cwd[:2], path)
# line: 2914
return self.normpath(path)
# line: 2916
def join(self, *p):
# line: 2917
'Return the completed path with a separator of the parts.'
# line: 2918
return self.filesystem.JoinPaths(*p)
# line: 2920
def split(self, path):
# line: 2923
'Split the path into the directory and the filename of the path.\n New in pyfakefs 3.0.\n '
# line: 2924
return self.filesystem.SplitPath(path)
# line: 2926
def splitdrive(self, path):
# line: 2929
'Split the path into the drive part and the rest of the path, if supported.\n New in pyfakefs 2.9.\n '
# line: 2930
return self.filesystem.SplitDrive(path)
# line: 2932
def normpath(self, path):
# line: 2933
'Normalize path, eliminating double slashes, etc.'
# line: 2934
return self.filesystem.CollapsePath(path)
# line: 2936
def normcase(self, path):
# line: 2939
'Convert to lower case under windows, replaces additional path separator.\n New in pyfakefs 2.9.\n '
# line: 2940
path = self.filesystem.NormalizePathSeparator(path)
# line: 2941
if self.filesystem.is_windows_fs:
# line: 2942
path = path.lower()
# line: 2943
return path
# line: 2945
def relpath(self, path, start=None):
# line: 2946
'We mostly rely on the native implementation and adapt the path separator.'
# line: 2947
if (not path):
# line: 2948
raise ValueError('no path specified')
# line: 2949
if (sys.version_info >= (3, 6)):
# line: 2950
path = os.fspath(path)
# line: 2951
if (start is not None):
# line: 2952
start = os.fspath(start)
# line: 2953
if (start is None):
# line: 2954
start = self.filesystem.cwd
# line: 2955
if (self.filesystem.alternative_path_separator is not None):
# line: 2956
path = path.replace(self.filesystem.alternative_path_separator, self._os_path.sep)
# line: 2957
start = start.replace(self.filesystem.alternative_path_separator, self._os_path.sep)
# line: 2958
path = path.replace(self.filesystem.path_separator, self._os_path.sep)
# line: 2959
start = start.replace(self.filesystem.path_separator, self._os_path.sep)
# line: 2960
path = self._os_path.relpath(path, start)
# line: 2961
return path.replace(self._os_path.sep, self.filesystem.path_separator)
# line: 2963
def realpath(self, filename):
# line: 2967
'Return the canonical path of the specified filename, eliminating any\n symbolic links encountered in the path.\n New in pyfakefs 3.0.\n '
# line: 2968
if self.filesystem.is_windows_fs:
# line: 2969
return self.abspath(filename)
# line: 2970
if (sys.version_info >= (3, 6)):
# line: 2971
filename = os.fspath(filename)
# line: 2972
(path, ok) = self._joinrealpath(filename[:0], filename, {})
# line: 2973
return self.abspath(path)
# line: 2975
if ((sys.platform != 'win32') or (sys.version_info >= (3, 2))):
# line: 2976
def samefile(self, path1, path2):
# line: 2987
'Return whether path1 and path2 point to the same file.\n Windows support new in Python 3.2.\n New in pyfakefs 3.3.\n\n Args:\n path1: first file path or path object (Python >=3.6)\n path2: second file path or path object (Python >=3.6)\n\n Raises:\n OSError: if one of the paths does not point to an existing file system object.\n '
# line: 2988
stat1 = self.filesystem.GetStat(path1)
# line: 2989
stat2 = self.filesystem.GetStat(path2)
# line: 2990
return ((stat1.st_ino == stat2.st_ino) and (stat1.st_dev == stat2.st_dev))
# line: 2992
def _joinrealpath(self, path, rest, seen):
# line: 2996
'Join two paths, normalizing and eliminating any symbolic links\n encountered in the second path.\n Taken from Python source and adapted.\n '
# line: 2997
curdir = self.filesystem._matching_string(path, '.')
# line: 2998
pardir = self.filesystem._matching_string(path, '..')
# line: 3000
sep = self.filesystem._path_separator(path)
# line: 3001
if self.isabs(rest):
# line: 3002
rest = rest[1:]
# line: 3003
path = sep
# line: 3005
while rest:
# line: 3006
(name, _, rest) = rest.partition(sep)
# line: 3007
if ((not name) or (name == curdir)):
# line: 3009
continue
# line: 3010
if (name == pardir):
# line: 3012
if path:
# line: 3013
(path, name) = self.filesystem.SplitPath(path)
# line: 3014
if (name == pardir):
# line: 3015
path = self.filesystem.JoinPaths(path, pardir, pardir)
else:
# line: 3017
path = pardir
# line: 3018
continue
# line: 3019
newpath = self.filesystem.JoinPaths(path, name)
# line: 3020
if (not self.filesystem.IsLink(newpath)):
# line: 3021
path = newpath
# line: 3022
continue
# line: 3024
if (newpath in seen):
# line: 3026
path = seen[newpath]
# line: 3027
if (path is not None):
# line: 3029
continue
# line: 3032
return (self.filesystem.JoinPaths(newpath, rest), False)
# line: 3033
seen[newpath] = None
# line: 3034
(path, ok) = self._joinrealpath(path, self.filesystem.ReadLink(newpath), seen)
# line: 3035
if (not ok):
# line: 3036
return (self.filesystem.JoinPaths(path, rest), False)
# line: 3037
seen[newpath] = path
# line: 3038
return (path, True)
# line: 3040
def dirname(self, path):
# line: 3043
'Returns the first part of the result of `split()`.\n New in pyfakefs 3.0.\n '
# line: 3044
return self.split(path)[0]
# line: 3046
def expanduser(self, path):
# line: 3049
"Return the argument with an initial component of ~ or ~user\n replaced by that user's home directory.\n "
# line: 3050
return self._os_path.expanduser(path).replace(self._os_path.sep, self.sep)
# line: 3052
def ismount(self, path):
# line: 3062
'Return true if the given path is a mount point.\n New in pyfakefs 2.9.\n\n Args:\n path: path to filesystem object to be checked\n\n Returns:\n True if path is a mount point added to the fake file system.\n Under Windows also returns True for drive and UNC roots (independent of their existence).\n '
# line: 3063
if (sys.version_info >= (3, 6)):
# line: 3064
path = os.fspath(path)
# line: 3065
if (not path):
# line: 3066
return False
# line: 3067
normed_path = self.filesystem.NormalizePath(path)
# line: 3068
sep = self.filesystem._path_separator(path)
# line: 3069
if self.filesystem.is_windows_fs:
# line: 3070
if (self.filesystem.alternative_path_separator is not None):
# line: 3071
path_seps = (sep, self.filesystem._alternative_path_separator(path))
else:
# line: 3075
path_seps = (sep,)
# line: 3076
(drive, rest) = self.filesystem.SplitDrive(normed_path)
# line: 3077
if (drive and (drive[:1] in path_seps)):
# line: 3078
return ((not rest) or (rest in path_seps))
# line: 3079
if (rest in path_seps):
# line: 3080
return True
# line: 3081
for mount_point in self.filesystem.mount_points:
# line: 3082
if (normed_path.rstrip(sep) == mount_point.rstrip(sep)):
# line: 3083
return True
# line: 3084
return False
# line: 3086
if (sys.version_info < (3, 0)):
# line: 3087
def walk(self, top, func, arg):
# line: 3095
'Directory tree walk with callback function.\n New in pyfakefs 3.0.\n\n Args:\n top: root path to traverse. The root itself is not included in the called elements.\n func: function to be called for each visited path node.\n arg: first argument to be called with func (apart from dirname and filenames).\n '
# line: 3096
try:
# line: 3097
names = self.filesystem.ListDir(top)
# line: 3098
except os.error:
# line: 3099
return
# line: 3100
func(arg, top, names)
# line: 3101
for name in names:
# line: 3102
name = self.filesystem.JoinPaths(top, name)
# line: 3103
if self.filesystem.is_windows_fs:
# line: 3104
if self.filesystem.IsDir(name):
# line: 3105
self.walk(name, func, arg)
else:
# line: 3107
try:
# line: 3108
st = self.filesystem.GetStat(name, follow_symlinks=False)
# line: 3109
except os.error:
# line: 3110
continue
# line: 3111
if stat.S_ISDIR(st.st_mode):
# line: 3112
self.walk(name, func, arg)
# line: 3114
def __getattr__(self, name):
# line: 3115
'Forwards any non-faked calls to the real os.path.'
# line: 3116
return getattr(self._os_path, name)
# line: 3119
class FakeOsModule(object):
# line: 3130
'Uses FakeFilesystem to provide a fake os module replacement.\n\n Do not create os.path separately from os, as there is a necessary circular\n dependency between os and os.path to replicate the behavior of the standard\n Python modules. What you want to do is to just let FakeOsModule take care of\n os.path setup itself.\n\n # You always want to do this.\n filesystem = fake_filesystem.FakeFilesystem()\n my_os_module = fake_filesystem.FakeOsModule(filesystem)\n '
# line: 3132
_stat_float_times = (sys.version_info >= (2, 5))
# line: 3134
def __init__(self, filesystem, os_path_module=None):
# line: 3140
'Also exposes self.path (to fake os.path).\n\n Args:\n filesystem: FakeFilesystem used to provide file system information\n os_path_module: (deprecated) optional FakePathModule instance\n '
# line: 3141
self.filesystem = filesystem
# line: 3142
self.sep = filesystem.path_separator
# line: 3143
self.altsep = filesystem.alternative_path_separator
# line: 3144
self._os_module = os
# line: 3145
if (os_path_module is None):
# line: 3146
self.path = FakePathModule(self.filesystem, self)
else:
# line: 3148
warnings.warn(FAKE_PATH_MODULE_DEPRECATION, DeprecationWarning, stacklevel=2)
# line: 3150
self.path = os_path_module
# line: 3151
if (sys.version_info < (3, 0)):
# line: 3152
self.fdopen = self._fdopen_ver2
else:
# line: 3154
self.fdopen = self._fdopen
# line: 3156
def _fdopen(self, *args, **kwargs):
# line: 3168
'Redirector to open() builtin function.\n\n Args:\n *args: pass through args\n **kwargs: pass through kwargs\n\n Returns:\n File object corresponding to file_des.\n\n Raises:\n TypeError: if file descriptor is not an integer.\n '
# line: 3169
if (not isinstance(args[0], int)):
# line: 3170
raise TypeError('an integer is required')
# line: 3171
return FakeFileOpen(self.filesystem)(*args, **kwargs)
# line: 3173
def _fdopen_ver2(self, file_des, mode='r', bufsize=None):
# line: 3188
'Returns an open file object connected to the file descriptor file_des.\n\n Args:\n file_des: An integer file descriptor for the file object requested.\n mode: additional file flags. Currently checks to see if the mode matches\n the mode of the requested file object.\n bufsize: ignored. (Used for signature compliance with __builtin__.fdopen)\n\n Returns:\n File object corresponding to file_des.\n\n Raises:\n OSError: if bad file descriptor or incompatible mode is given.\n TypeError: if file descriptor is not an integer.\n '
# line: 3189
if (not isinstance(file_des, int)):
# line: 3190
raise TypeError('an integer is required')
# line: 3192
try:
# line: 3193
return FakeFileOpen(self.filesystem).Call(file_des, mode=mode)
# line: 3194
except IOError as exc:
# line: 3195
raise OSError(exc)
# line: 3197
def _umask(self):
# line: 3198
'Return the current umask.'
# line: 3199
if self.filesystem.is_windows_fs:
# line: 3201
return 0
# line: 3202
if (sys.platform == 'win32'):
# line: 3204
return 2
else:
# line: 3209
mask = os.umask(0)
# line: 3210
os.umask(mask)
# line: 3211
return mask
# line: 3214
def open(self, file_path, flags, mode=None, dir_fd=None):
# line: 3233
'Return the file descriptor for a FakeFile.\n\n Args:\n file_path: the path to the file\n flags: low-level bits to indicate io operation\n mode: bits to define default permissions\n Note: only basic modes are supported, OS-specific modes are ignored\n dir_fd: If not `None`, the file descriptor of a directory,\n with `file_path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Returns:\n A file descriptor.\n\n Raises:\n IOError: if the path cannot be found\n ValueError: if invalid mode is given\n NotImplementedError: if `os.O_EXCL` is used without `os.O_CREAT`\n '
# line: 3234
file_path = self._path_with_dir_fd(file_path, self.open, dir_fd)
# line: 3235
if (mode is None):
# line: 3236
if self.filesystem.is_windows_fs:
# line: 3237
mode = 438
else:
# line: 3239
mode = (511 & (~ self._umask()))
# line: 3241
open_modes = _OpenModes(must_exist=(not (flags & os.O_CREAT)), can_read=(not (flags & os.O_WRONLY)), can_write=(flags & (os.O_RDWR | os.O_WRONLY)), truncate=(flags & os.O_TRUNC), append=(flags & os.O_APPEND), must_not_exist=(flags & os.O_EXCL))
# line: 3249
if (open_modes.must_not_exist and open_modes.must_exist):
# line: 3250
raise NotImplementedError('O_EXCL without O_CREAT mode is not supported')
# line: 3252
if ((not self.filesystem.is_windows_fs) and (not open_modes.can_write) and self.filesystem.Exists(file_path)):
# line: 3256
obj = self.filesystem.ResolveObject(file_path)
# line: 3257
if isinstance(obj, FakeDirectory):
# line: 3258
dir_wrapper = FakeDirWrapper(obj, file_path, self.filesystem)
# line: 3259
file_des = self.filesystem.AddOpenFile(dir_wrapper)
# line: 3260
dir_wrapper.filedes = file_des
# line: 3261
return file_des
# line: 3264
str_flags = 'b'
# line: 3265
delete_on_close = False
# line: 3266
if hasattr(os, 'O_TEMPORARY'):
# line: 3267
delete_on_close = ((flags & os.O_TEMPORARY) == os.O_TEMPORARY)
# line: 3268
fake_file = FakeFileOpen(self.filesystem, delete_on_close=delete_on_close, raw_io=True)(file_path, str_flags, open_modes=open_modes)
# line: 3271
self.chmod(file_path, mode)
# line: 3272
return fake_file.fileno()
# line: 3274
def close(self, file_des):
# line: 3283
'Close a file descriptor.\n\n Args:\n file_des: An integer file descriptor for the file object requested.\n\n Raises:\n OSError: bad file descriptor.\n TypeError: if file descriptor is not an integer.\n '
# line: 3284
file_handle = self.filesystem.GetOpenFile(file_des)
# line: 3285
file_handle.close()
# line: 3287
def read(self, file_des, num_bytes):
# line: 3300
'Read number of bytes from a file descriptor, returns bytes read.\n\n Args:\n file_des: An integer file descriptor for the file object requested.\n num_bytes: Number of bytes to read from file.\n\n Returns:\n Bytes read from file.\n\n Raises:\n OSError: bad file descriptor.\n TypeError: if file descriptor is not an integer.\n '
# line: 3301
file_handle = self.filesystem.GetOpenFile(file_des)
# line: 3302
file_handle.raw_io = True
# line: 3303
return file_handle.read(num_bytes)
# line: 3305
def write(self, file_des, contents):
# line: 3318
'Write string to file descriptor, returns number of bytes written.\n\n Args:\n file_des: An integer file descriptor for the file object requested.\n contents: String of bytes to write to file.\n\n Returns:\n Number of bytes written.\n\n Raises:\n OSError: bad file descriptor.\n TypeError: if file descriptor is not an integer.\n '
# line: 3319
file_handle = self.filesystem.GetOpenFile(file_des)
# line: 3320
file_handle.raw_io = True
# line: 3321
file_handle._sync_io()
# line: 3322
file_handle.write(contents)
# line: 3323
file_handle.flush()
# line: 3324
return len(contents)
# line: 3326
@classmethod
# line: 3326
def stat_float_times(cls, newvalue=None):
# line: 3337
"Determine whether a file's time stamps are reported as floats or ints.\n New in pyfakefs 2.9.\n\n Calling without arguments returns the current value. The value is shared\n by all instances of FakeOsModule.\n\n Args:\n newvalue: if True, mtime, ctime, atime are reported as floats.\n Else, as ints (rounding down).\n "
# line: 3338
if (newvalue is not None):
# line: 3339
cls._stat_float_times = bool(newvalue)
# line: 3340
return cls._stat_float_times
# line: 3342
def fstat(self, file_des):
# line: 3353
"Return the os.stat-like tuple for the FakeFile object of file_des.\n\n Args:\n file_des: file descriptor of filesystem object to retrieve.\n\n Returns:\n the FakeStatResult object corresponding to entry_path.\n\n Raises:\n OSError: if the filesystem object doesn't exist.\n "
# line: 3355
file_object = self.filesystem.GetOpenFile(file_des).GetObject()
# line: 3356
return file_object.stat_result.copy()
# line: 3358
def umask(self, new_mask):
# line: 3369
'Change the current umask.\n\n Args:\n new_mask: An integer.\n\n Returns:\n The old mask.\n\n Raises:\n TypeError: new_mask is of an invalid type.\n '
# line: 3370
if (not isinstance(new_mask, int)):
# line: 3371
raise TypeError('an integer is required')
# line: 3372
old_umask = self.filesystem.umask
# line: 3373
self.filesystem.umask = new_mask
# line: 3374
return old_umask
# line: 3376
def chdir(self, target_directory):
# line: 3385
'Change current working directory to target directory.\n\n Args:\n target_directory: path to new current working directory.\n\n Raises:\n OSError: if user lacks permission to enter the argument directory or if\n the target is not a directory\n '
# line: 3386
target_directory = self.filesystem.ResolvePath(target_directory, allow_fd=True)
# line: 3387
self.filesystem.ConfirmDir(target_directory)
# line: 3388
directory = self.filesystem.ResolveObject(target_directory)
# line: 3390
if (not (directory.st_mode | PERM_EXE)):
# line: 3391
raise OSError(errno.EACCES, 'Fake os module: permission denied', directory)
# line: 3393
self.filesystem.cwd = target_directory
# line: 3395
def getcwd(self):
# line: 3396
'Return current working directory.'
# line: 3397
return self.filesystem.cwd
# line: 3399
if (sys.version_info < (3,)):
# line: 3400
def getcwdu(self):
# line: 3401
'Return current working directory as unicode. Python 2 only.'
# line: 3402
return unicode(self.filesystem.cwd)
else:
# line: 3405
def getcwdb(self):
# line: 3406
'Return current working directory as bytes. Python 3 only.'
# line: 3407
return bytes(self.filesystem.cwd, locale.getpreferredencoding(False))
# line: 3409
def listdir(self, target_directory):
# line: 3421
'Return a list of file names in target_directory.\n\n Args:\n target_directory: path to the target directory within the fake\n filesystem.\n\n Returns:\n a list of file names within the target directory in arbitrary order.\n\n Raises:\n OSError: if the target is not a directory.\n '
# line: 3422
return self.filesystem.ListDir(target_directory)
# line: 3424
if (sys.platform.startswith('linux') and (sys.version_info >= (3, 3))):
# line: 3425
def listxattr(self, path=None, follow_symlinks=True):
# line: 3426
'Dummy implementation that returns an empty list - used by shutil.'
# line: 3427
return []
# line: 3429
if (sys.version_info >= (3, 5)):
# line: 3430
def scandir(self, path=''):
# line: 3442
'Return an iterator of DirEntry objects corresponding to the entries\n in the directory given by path.\n\n Args:\n path: path to the target directory within the fake filesystem.\n\n Returns:\n an iterator to an unsorted list of os.DirEntry objects for each entry in path.\n\n Raises:\n OSError: if the target is not a directory.\n '
# line: 3443
return self.filesystem.ScanDir(path)
# line: 3445
def _ClassifyDirectoryContents(self, root):
# line: 3460
'Classify contents of a directory as files/directories.\n\n Args:\n root: (str) Directory to examine.\n\n Returns:\n (tuple) A tuple consisting of three values: the directory examined, a\n list containing all of the directory entries, and a list containing all\n of the non-directory entries. (This is the same format as returned by\n the os.walk generator.)\n\n Raises:\n Nothing on its own, but be ready to catch exceptions generated by\n underlying mechanisms like os.listdir.\n '
# line: 3461
dirs = []
# line: 3462
files = []
# line: 3463
for entry in self.listdir(root):
# line: 3464
if self.path.isdir(self.path.join(root, entry)):
# line: 3465
dirs.append(entry)
else:
# line: 3467
files.append(entry)
# line: 3468
return (root, dirs, files)
# line: 3470
def walk(self, top, topdown=True, onerror=None, followlinks=False):
# line: 3486
'Perform an os.walk operation over the fake filesystem.\n\n Args:\n top: root directory from which to begin walk.\n topdown: determines whether to return the tuples with the root as the\n first entry (True) or as the last, after all the child directory\n tuples (False).\n onerror: if not None, function which will be called to handle the\n os.error instance provided when os.listdir() fails.\n followlinks: if True, symbolic links are followed. New in pyfakefs 2.9.\n\n Yields:\n (path, directories, nondirectories) for top and each of its\n subdirectories. See the documentation for the builtin os module for\n further details.\n '
# line: 3487
def do_walk(top, topMost=False):
# line: 3488
top = self.path.normpath(top)
# line: 3489
if ((not topMost) and (not followlinks) and self.path.islink(top)):
# line: 3490
return
# line: 3491
try:
# line: 3492
top_contents = self._ClassifyDirectoryContents(top)
# line: 3493
except OSError as exc:
# line: 3494
top_contents = None
# line: 3495
if (onerror is not None):
# line: 3496
onerror(exc)
# line: 3498
if (top_contents is not None):
# line: 3499
if topdown:
# line: 3500
yield top_contents
# line: 3502
for directory in top_contents[1]:
# line: 3503
if ((not followlinks) and self.path.islink(directory)):
# line: 3504
continue
# line: 3505
for contents in do_walk(self.path.join(top, directory)):
# line: 3506
yield contents
# line: 3508
if (not topdown):
# line: 3509
yield top_contents
# line: 3511
return do_walk(top, topMost=True)
# line: 3514
def readlink(self, path, dir_fd=None):
# line: 3530
'Read the target of a symlink.\n\n Args:\n path: Symlink to read the target of.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Returns:\n the string representing the path to which the symbolic link points.\n\n Raises:\n TypeError: if `path` is None\n OSError: (with errno=ENOENT) if path is not a valid path, or\n (with errno=EINVAL) if path is valid, but is not a symlink.\n '
# line: 3531
path = self._path_with_dir_fd(path, self.readlink, dir_fd)
# line: 3532
return self.filesystem.ReadLink(path)
# line: 3534
def stat(self, entry_path, dir_fd=None, follow_symlinks=None):
# line: 3551
"Return the os.stat-like tuple for the FakeFile object of entry_path.\n\n Args:\n entry_path: path to filesystem object to retrieve.\n dir_fd: (int) If not `None`, the file descriptor of a directory,\n with `entry_path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n follow_symlinks: (bool) If `False` and `entry_path` points to a symlink,\n the link itself is changed instead of the linked object.\n New in Python 3.3. New in pyfakefs 3.0.\n\n Returns:\n the FakeStatResult object corresponding to entry_path.\n\n Raises:\n OSError: if the filesystem object doesn't exist.\n "
# line: 3552
if (follow_symlinks is None):
# line: 3553
follow_symlinks = True
elif (sys.version_info < (3, 3)):
# line: 3555
raise TypeError("stat() got an unexpected keyword argument 'follow_symlinks'")
# line: 3556
entry_path = self._path_with_dir_fd(entry_path, self.stat, dir_fd)
# line: 3557
return self.filesystem.GetStat(entry_path, follow_symlinks)
# line: 3559
def lstat(self, entry_path, dir_fd=None):
# line: 3573
"Return the os.stat-like tuple for entry_path, not following symlinks.\n\n Args:\n entry_path: path to filesystem object to retrieve.\n dir_fd: If not `None`, the file descriptor of a directory, with `entry_path`\n being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Returns:\n the FakeStatResult object corresponding to `entry_path`.\n\n Raises:\n OSError: if the filesystem object doesn't exist.\n "
# line: 3575
entry_path = self._path_with_dir_fd(entry_path, self.lstat, dir_fd)
# line: 3576
return self.filesystem.GetStat(entry_path, follow_symlinks=False)
# line: 3578
def remove(self, path, dir_fd=None):
# line: 3591
'Remove the FakeFile object at the specified file path.\n\n Args:\n path: Path to file to be removed.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if path points to a directory.\n OSError: if path does not exist.\n OSError: if removal failed.\n '
# line: 3592
path = self._path_with_dir_fd(path, self.remove, dir_fd)
# line: 3593
self.filesystem.RemoveFile(path)
# line: 3595
def unlink(self, path, dir_fd=None):
# line: 3608
'Remove the FakeFile object at the specified file path.\n\n Args:\n path: Path to file to be removed.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if path points to a directory.\n OSError: if path does not exist.\n OSError: if removal failed.\n '
# line: 3609
path = self._path_with_dir_fd(path, self.unlink, dir_fd)
# line: 3610
self.filesystem.RemoveFile(path)
# line: 3612
def rename(self, old_file_path, new_file_path, dir_fd=None):
# line: 3631
'Rename a FakeFile object at old_file_path to new_file_path,\n preserving all properties.\n Also replaces existing new_file_path object, if one existed (Unix only).\n\n Args:\n old_file_path: Path to filesystem object to rename.\n new_file_path: Path to where the filesystem object will live after this call.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `old_file_path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if old_file_path does not exist.\n OSError: if new_file_path is an existing directory.\n OSError: if new_file_path is an existing file (Windows only)\n OSError: if new_file_path is an existing file and could not be removed (Unix)\n OSError: if `dirname(new_file)` does not exist\n OSError: if the file would be moved to another filesystem (e.g. mount point)\n '
# line: 3632
old_file_path = self._path_with_dir_fd(old_file_path, self.rename, dir_fd)
# line: 3633
self.filesystem.RenameObject(old_file_path, new_file_path)
# line: 3635
if (sys.version_info >= (3, 3)):
# line: 3636
def replace(self, old_file_path, new_file_path):
# line: 3652
'Renames a FakeFile object at old_file_path to new_file_path,\n preserving all properties.\n Also replaces existing new_file_path object, if one existed.\n New in pyfakefs 3.0.\n\n Args:\n old_file_path: path to filesystem object to rename\n new_file_path: path to where the filesystem object will live after this call\n\n Raises:\n OSError: if old_file_path does not exist.\n OSError: if new_file_path is an existing directory.\n OSError: if new_file_path is an existing file and could not be removed\n OSError: if `dirname(new_file)` does not exist\n OSError: if the file would be moved to another filesystem (e.g. mount point)\n '
# line: 3653
self.filesystem.RenameObject(old_file_path, new_file_path, force_replace=True)
# line: 3655
def rmdir(self, target_directory, dir_fd=None):
# line: 3667
"Remove a leaf Fake directory.\n\n Args:\n target_directory: (str) Name of directory to remove.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `target_directory` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if target_directory does not exist or is not a directory,\n or as per FakeFilesystem.RemoveObject. Cannot remove '.'.\n "
# line: 3668
target_directory = self._path_with_dir_fd(target_directory, self.rmdir, dir_fd)
# line: 3669
self.filesystem.RemoveDirectory(target_directory)
# line: 3671
def removedirs(self, target_directory):
# line: 3680
'Remove a leaf fake directory and all empty intermediate ones.\n\n Args:\n target_directory: the directory to be removed.\n\n Raises:\n OSError: if target_directory does not exist or is not a directory.\n OSError: if target_directory is not empty.\n '
# line: 3681
target_directory = self.filesystem.NormalizePath(target_directory)
# line: 3682
directory = self.filesystem.ConfirmDir(target_directory)
# line: 3683
if directory.contents:
# line: 3684
raise OSError(errno.ENOTEMPTY, 'Fake Directory not empty', self.path.basename(target_directory))
else:
# line: 3687
self.rmdir(target_directory)
# line: 3688
(head, tail) = self.path.split(target_directory)
# line: 3689
if (not tail):
# line: 3690
(head, tail) = self.path.split(head)
# line: 3691
while (head and tail):
# line: 3692
head_dir = self.filesystem.ConfirmDir(head)
# line: 3693
if head_dir.contents:
# line: 3694
break
# line: 3696
self.filesystem.RemoveDirectory(head, allow_symlink=True)
# line: 3697
(head, tail) = self.path.split(head)
# line: 3699
def mkdir(self, dir_name, mode=PERM_DEF, dir_fd=None):
# line: 3714
"Create a leaf Fake directory.\n\n Args:\n dir_name: (str) Name of directory to create.\n Relative paths are assumed to be relative to '/'.\n mode: (int) Mode to create directory with. This argument defaults to\n 0o777. The umask is applied to this mode.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `dir_name` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if the directory name is invalid or parent directory is read only\n or as per FakeFilesystem.AddObject.\n "
# line: 3715
dir_name = self._path_with_dir_fd(dir_name, self.mkdir, dir_fd)
# line: 3716
self.filesystem.MakeDirectory(dir_name, mode)
# line: 3718
def makedirs(self, dir_name, mode=PERM_DEF, exist_ok=None):
# line: 3733
'Create a leaf Fake directory + create any non-existent parent dirs.\n\n Args:\n dir_name: (str) Name of directory to create.\n mode: (int) Mode to create directory (and any necessary parent\n directories) with. This argument defaults to 0o777. The umask is\n applied to this mode.\n exist_ok: (boolean) If exist_ok is False (the default), an OSError is\n raised if the target directory already exists. New in Python 3.2.\n New in pyfakefs 2.9.\n\n Raises:\n OSError: if the directory already exists and exist_ok=False, or as per\n `FakeFilesystem.CreateDirectory()`.\n '
# line: 3734
if (exist_ok is None):
# line: 3735
exist_ok = False
elif (sys.version_info < (3, 2)):
# line: 3737
raise TypeError("makedir() got an unexpected keyword argument 'exist_ok'")
# line: 3738
self.filesystem.MakeDirectories(dir_name, mode, exist_ok)
# line: 3740
def _path_with_dir_fd(self, path, fct, dir_fd):
# line: 3741
'Return the path considering dir_fd. Raise on nmvalid parameters.'
# line: 3742
if (dir_fd is not None):
# line: 3743
if (sys.version_info < (3, 3)):
# line: 3744
raise TypeError(("%s() got an unexpected keyword argument 'dir_fd'" % fct.__name__))
# line: 3747
real_fct = getattr(os, fct.__name__)
# line: 3748
if (real_fct not in self.supports_dir_fd):
# line: 3749
raise NotImplementedError('dir_fd unavailable on this platform')
# line: 3750
if isinstance(path, int):
# line: 3751
raise ValueError(("%s: Can't specify dir_fd without matching path" % fct.__name__))
# line: 3753
if (not self.path.isabs(path)):
# line: 3754
return self.path.join(self.filesystem.GetOpenFile(dir_fd).GetObject().GetPath(), path)
# line: 3756
return path
# line: 3758
def access(self, path, mode, dir_fd=None, follow_symlinks=None):
# line: 3774
'Check if a file exists and has the specified permissions.\n\n Args:\n path: (str) Path to the file.\n mode: (int) Permissions represented as a bitwise-OR combination of\n os.F_OK, os.R_OK, os.W_OK, and os.X_OK.\n dir_fd: If not `None`, the file descriptor of a directory, with `path`\n being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n follow_symlinks: (bool) If `False` and `path` points to a symlink,\n the link itself is queried instead of the linked object.\n New in Python 3.3. New in pyfakefs 3.0.\n\n Returns:\n bool, `True` if file is accessible, `False` otherwise.\n '
# line: 3775
if ((follow_symlinks is not None) and (sys.version_info < (3, 3))):
# line: 3776
raise TypeError("access() got an unexpected keyword argument 'follow_symlinks'")
# line: 3777
path = self._path_with_dir_fd(path, self.access, dir_fd)
# line: 3778
try:
# line: 3779
stat_result = self.stat(path, follow_symlinks=follow_symlinks)
# line: 3780
except OSError as os_error:
# line: 3781
if (os_error.errno == errno.ENOENT):
# line: 3782
return False
# line: 3783
raise
# line: 3784
return ((mode & ((stat_result.st_mode >> 6) & 7)) == mode)
# line: 3786
def chmod(self, path, mode, dir_fd=None, follow_symlinks=None):
# line: 3798
'Change the permissions of a file as encoded in integer mode.\n\n Args:\n path: (str) Path to the file.\n mode: (int) Permissions.\n dir_fd: If not `None`, the file descriptor of a directory, with `path`\n being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n follow_symlinks: (bool) If `False` and `path` points to a symlink,\n the link itself is queried instead of the linked object.\n New in Python 3.3. New in pyfakefs 3.0.\n '
# line: 3799
if (follow_symlinks is None):
# line: 3800
follow_symlinks = True
elif (sys.version_info < (3, 3)):
# line: 3802
raise TypeError("chmod() got an unexpected keyword argument 'follow_symlinks'")
# line: 3803
path = self._path_with_dir_fd(path, self.chmod, dir_fd)
# line: 3804
self.filesystem.ChangeMode(path, mode, follow_symlinks)
# line: 3806
def lchmod(self, path, mode):
# line: 3813
'Change the permissions of a file as encoded in integer mode.\n If the file is a link, the permissions of the link are changed.\n\n Args:\n path: (str) Path to the file.\n mode: (int) Permissions.\n '
# line: 3814
if self.filesystem.is_windows_fs:
# line: 3815
raise (NameError, "name 'lchmod' is not defined")
# line: 3816
self.filesystem.ChangeMode(path, mode, follow_symlinks=False)
# line: 3818
def utime(self, path, times=None, ns=None, dir_fd=None, follow_symlinks=None):
# line: 3842
'Change the access and modified times of a file.\n\n Args:\n path: (str) Path to the file.\n times: 2-tuple of int or float numbers, of the form (atime, mtime) \n which is used to set the access and modified times in seconds. \n If None, both times are set to the current time.\n ns: 2-tuple of int numbers, of the form (atime, mtime) which is \n used to set the access and modified times in nanoseconds. \n If None, both times are set to the current time.\n New in Python 3.3. New in pyfakefs 3.3.\n dir_fd: If not `None`, the file descriptor of a directory, with `path`\n being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n follow_symlinks: (bool) If `False` and `path` points to a symlink,\n the link itself is queried instead of the linked object.\n New in Python 3.3. New in pyfakefs 3.0.\n \n Raises:\n TypeError: If anything other than the expected types is \n specified in the passed `times` or `ns` tuple, \n or if the tuple length is not equal to 2.\n ValueError: If both times and ns are specified.\n '
# line: 3843
if (follow_symlinks is None):
# line: 3844
follow_symlinks = True
elif (sys.version_info < (3, 3)):
# line: 3846
raise TypeError("utime() got an unexpected keyword argument 'follow_symlinks'")
# line: 3847
path = self._path_with_dir_fd(path, self.utime, dir_fd)
# line: 3848
if ((ns is not None) and (sys.version_info < (3, 3))):
# line: 3849
raise TypeError("utime() got an unexpected keyword argument 'ns'")
# line: 3851
self.filesystem.UpdateTime(path, times, ns, follow_symlinks)
# line: 3853
def chown(self, path, uid, gid, dir_fd=None, follow_symlinks=None):
# line: 3872
'Set ownership of a faked file.\n\n Args:\n path: (str) Path to the file or directory.\n uid: (int) Numeric uid to set the file or directory to.\n gid: (int) Numeric gid to set the file or directory to.\n dir_fd: (int) If not `None`, the file descriptor of a directory,\n with `path` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n follow_symlinks: (bool) If `False` and path points to a symlink,\n the link itself is changed instead of the linked object.\n New in Python 3.3. New in pyfakefs 3.0.\n\n Raises:\n OSError: if path does not exist.\n\n `None` is also allowed for `uid` and `gid`. This permits `os.rename` to\n use `os.chown` even when the source file `uid` and `gid` are `None` (unset).\n '
# line: 3873
if (follow_symlinks is None):
# line: 3874
follow_symlinks = True
elif (sys.version_info < (3, 3)):
# line: 3876
raise TypeError("chown() got an unexpected keyword argument 'follow_symlinks'")
# line: 3877
path = self._path_with_dir_fd(path, self.chown, dir_fd)
# line: 3878
try:
# line: 3879
file_object = self.filesystem.ResolveObject(path, follow_symlinks, allow_fd=True)
# line: 3880
except IOError as io_error:
# line: 3881
if (io_error.errno == errno.ENOENT):
# line: 3882
raise OSError(errno.ENOENT, 'No such file or directory in fake filesystem', path)
# line: 3885
raise
# line: 3886
if (not ((isinstance(uid, int) or (uid is None)) and (isinstance(gid, int) or (gid is None)))):
# line: 3888
raise TypeError('An integer is required')
# line: 3889
if (uid != (-1)):
# line: 3890
file_object.st_uid = uid
# line: 3891
if (gid != (-1)):
# line: 3892
file_object.st_gid = gid
# line: 3894
def mknod(self, filename, mode=None, device=None, dir_fd=None):
# line: 3914
"Create a filesystem node named 'filename'.\n\n Does not support device special files or named pipes as the real os\n module does.\n\n Args:\n filename: (str) Name of the file to create\n mode: (int) Permissions to use and type of file to be created.\n Default permissions are 0o666. Only the stat.S_IFREG file type\n is supported by the fake implementation. The umask is applied\n to this mode.\n device: not supported in fake implementation\n dir_fd: If not `None`, the file descriptor of a directory,\n with `filename` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if called with unsupported options or the file can not be\n created.\n "
# line: 3915
if self.filesystem.is_windows_fs:
# line: 3916
raise (AttributeError, "module 'os' has no attribute 'mknode'")
# line: 3917
if (mode is None):
# line: 3918
mode = (stat.S_IFREG | PERM_DEF_FILE)
# line: 3919
if (device or (not (mode & stat.S_IFREG))):
# line: 3920
raise OSError(errno.ENOENT, 'Fake os mknod implementation only supports regular files.')
# line: 3924
filename = self._path_with_dir_fd(filename, self.mknod, dir_fd)
# line: 3925
(head, tail) = self.path.split(filename)
# line: 3926
if (not tail):
# line: 3927
if self.filesystem.Exists(head):
# line: 3928
raise OSError(errno.EEXIST, ('Fake filesystem: %s: %s' % (os.strerror(errno.EEXIST), filename)))
# line: 3930
raise OSError(errno.ENOENT, ('Fake filesystem: %s: %s' % (os.strerror(errno.ENOENT), filename)))
# line: 3932
if (tail in ('.', u'.', '..', u'..')):
# line: 3933
raise OSError(errno.ENOENT, ('Fake fileystem: %s: %s' % (os.strerror(errno.ENOENT), filename)))
# line: 3935
if self.filesystem.Exists(filename):
# line: 3936
raise OSError(errno.EEXIST, ('Fake fileystem: %s: %s' % (os.strerror(errno.EEXIST), filename)))
# line: 3938
try:
# line: 3939
self.filesystem.AddObject(head, FakeFile(tail, (mode & (~ self.filesystem.umask)), filesystem=self.filesystem))
# line: 3942
except IOError as e:
# line: 3943
raise OSError(e.errno, ('Fake filesystem: %s: %s' % (os.strerror(e.errno), filename)))
# line: 3946
def symlink(self, link_target, path, dir_fd=None):
# line: 3958
'Creates the specified symlink, pointed at the specified link target.\n\n Args:\n link_target: The target of the symlink.\n path: Path to the symlink to create.\n dir_fd: If not `None`, the file descriptor of a directory,\n with `link_target` being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Raises:\n OSError: if the file already exists.\n '
# line: 3959
link_target = self._path_with_dir_fd(link_target, self.symlink, dir_fd)
# line: 3960
self.filesystem.CreateLink(path, link_target, create_missing_dirs=False)
# line: 3962
def link(self, oldpath, newpath, dir_fd=None):
# line: 3980
"Create a hard link at new_path, pointing at old_path.\n New in pyfakefs 2.9.\n\n Args:\n old_path: An existing link to the target file.\n new_path: The destination path to create a new link at.\n dir_fd: If not `None`, the file descriptor of a directory, with `oldpath`\n being relative to this directory.\n New in Python 3.3. New in pyfakefs 3.3.\n\n Returns:\n the FakeFile object referred to by `oldpath`.\n\n Raises:\n OSError: if something already exists at new_path.\n OSError: if the parent directory doesn't exist.\n OSError: if on Windows before Python 3.2.\n "
# line: 3981
oldpath = self._path_with_dir_fd(oldpath, self.link, dir_fd)
# line: 3982
self.filesystem.CreateHardLink(oldpath, newpath)
# line: 3984
def fsync(self, file_des):
# line: 3994
'Perform fsync for a fake file (in other words, do nothing).\n New in pyfakefs 2.9.\n\n Args:\n file_des: file descriptor of the open file.\n\n Raises:\n OSError: file_des is an invalid file descriptor.\n TypeError: file_des is not an integer.\n '
# line: 3996
self.filesystem.GetOpenFile(file_des)
# line: 3998
def fdatasync(self, file_des):
# line: 4008
'Perform fdatasync for a fake file (in other words, do nothing).\n New in pyfakefs 2.9.\n\n Args:\n file_des: file descriptor of the open file.\n\n Raises:\n OSError: file_des is an invalid file descriptor.\n TypeError: file_des is not an integer.\n '
# line: 4010
self.filesystem.GetOpenFile(file_des)
# line: 4012
def __getattr__(self, name):
# line: 4013
'Forwards any unfaked calls to the standard os module.'
# line: 4014
return getattr(self._os_module, name)
# line: 4017
class FakeIoModule(object):
# line: 4026
'Uses FakeFilesystem to provide a fake io module replacement.\n New in pyfakefs 2.9.\n\n Currently only used to wrap `io.open()` which is an alias to `open()`.\n\n You need a fake_filesystem to use this:\n filesystem = fake_filesystem.FakeFilesystem()\n my_io_module = fake_filesystem.FakeIoModule(filesystem)\n '
# line: 4028
def __init__(self, filesystem):
# line: 4032
'\n Args:\n filesystem: FakeFilesystem used to provide file system information\n '
# line: 4033
self.filesystem = filesystem
# line: 4034
self._io_module = io
# line: 4036
def open(self, file_path, mode='r', buffering=(-1), encoding=None, errors=None, newline=None, closefd=True, opener=None):
# line: 4040
'Redirect the call to FakeFileOpen.\n See FakeFileOpen.Call() for description.\n '
# line: 4041
if ((opener is not None) and (sys.version_info < (3, 3))):
# line: 4042
raise TypeError("open() got an unexpected keyword argument 'opener'")
# line: 4043
fake_open = FakeFileOpen(self.filesystem, use_io=True)
# line: 4044
return fake_open(file_path, mode, buffering, encoding, errors, newline, closefd, opener)
# line: 4046
def __getattr__(self, name):
# line: 4047
'Forwards any unfaked calls to the standard io module.'
# line: 4048
return getattr(self._io_module, name)
# line: 4051
class FakeFileWrapper(object):
# line: 4056
'Wrapper for a stream object for use by a FakeFile object.\n\n If the wrapper has any data written to it, it will propagate to\n the FakeFile object on close() or flush().\n '
# line: 4057
def __init__(self, file_object, file_path, update=False, read=False, append=False, delete_on_close=False, filesystem=None, newline=None, binary=True, closefd=True, encoding=None, errors=None, raw_io=False, is_stream=False, use_io=True):
# line: 4061
self._file_object = file_object
# line: 4062
self._file_path = file_path
# line: 4063
self._append = append
# line: 4064
self._read = read
# line: 4065
self.allow_update = update
# line: 4066
self._closefd = closefd
# line: 4067
self._file_epoch = file_object.epoch
# line: 4068
self.raw_io = raw_io
# line: 4069
self._binary = binary
# line: 4070
self.is_stream = is_stream
# line: 4071
contents = file_object.byte_contents
# line: 4072
self._encoding = encoding
# line: 4073
errors = (errors or 'strict')
# line: 4074
if encoding:
# line: 4075
file_wrapper = FakeFileWrapper(file_object, file_path, update, read, append, delete_on_close=False, filesystem=filesystem, newline=None, binary=True, closefd=closefd, is_stream=True)
# line: 4079
codec_info = codecs.lookup(encoding)
# line: 4080
self._io = codecs.StreamReaderWriter(file_wrapper, codec_info.streamreader, codec_info.streamwriter, errors)
else:
# line: 4083
if ((not binary) and (sys.version_info >= (3, 0))):
# line: 4084
io_class = io.StringIO
else:
# line: 4086
io_class = io.BytesIO
# line: 4087
io_args = ({} if binary else {'newline': newline, })
# line: 4088
if (contents and (not binary)):
# line: 4089
contents = contents.decode((encoding or locale.getpreferredencoding(False)), errors=errors)
# line: 4091
if (contents and (not update)):
# line: 4092
self._io = io_class(contents, **io_args)
else:
# line: 4094
self._io = io_class(**io_args)
# line: 4096
if contents:
# line: 4097
if update:
# line: 4098
if (not encoding):
# line: 4099
self._io.write(contents)
# line: 4100
if (not append):
# line: 4101
self._io.seek(0)
else:
# line: 4103
self._read_whence = 0
# line: 4104
if (read and (not use_io)):
# line: 4105
self._read_seek = 0
else:
# line: 4107
self._read_seek = self._io.tell()
else:
# line: 4109
self._read_whence = 0
# line: 4110
self._read_seek = 0
# line: 4112
if delete_on_close:
# line: 4113
assert filesystem, 'delete_on_close=True requires filesystem'
# line: 4114
self._filesystem = filesystem
# line: 4115
self.delete_on_close = delete_on_close
# line: 4118
self.name = file_object.opened_as
# line: 4119
self.filedes = None
# line: 4121
def __enter__(self):
# line: 4122
"To support usage of this fake file with the 'with' statement."
# line: 4123
return self
# line: 4125
def __exit__(self, type, value, traceback):
# line: 4126
"To support usage of this fake file with the 'with' statement."
# line: 4127
self.close()
# line: 4129
def _raise(self, message):
# line: 4130
if self.raw_io:
# line: 4131
raise OSError(errno.EBADF, message)
# line: 4132
if (sys.version_info < (3, 0)):
# line: 4133
raise IOError(message)
# line: 4134
raise io.UnsupportedOperation(message)
# line: 4136
def GetObject(self):
# line: 4137
'Return the FakeFile object that is wrapped by the current instance.'
# line: 4138
return self._file_object
# line: 4140
def fileno(self):
# line: 4141
'Return the file descriptor of the file object.'
# line: 4142
return self.filedes
# line: 4144
def close(self):
# line: 4145
'Close the file.'
# line: 4147
if (self not in self._filesystem.open_files):
# line: 4148
return
# line: 4150
if (self.allow_update and (not self.raw_io)):
# line: 4151
self._file_object.SetContents(self._io.getvalue(), self._encoding)
# line: 4152
if self._closefd:
# line: 4153
self._filesystem.CloseOpenFile(self.filedes)
# line: 4154
if self.delete_on_close:
# line: 4155
self._filesystem.RemoveObject(self.GetObject().GetPath())
# line: 4157
def flush(self):
# line: 4158
"Flush file contents to 'disk'."
# line: 4159
self._check_open_file()
# line: 4160
if self.allow_update:
# line: 4161
self._io.flush()
# line: 4162
self._file_object.SetContents(self._io.getvalue(), self._encoding)
# line: 4163
self._file_epoch = self._file_object.epoch
# line: 4165
def seek(self, offset, whence=0):
# line: 4166
"Move read/write pointer in 'file'."
# line: 4167
self._check_open_file()
# line: 4168
if (not self._append):
# line: 4169
self._io.seek(offset, whence)
else:
# line: 4171
self._read_seek = offset
# line: 4172
self._read_whence = whence
# line: 4173
if (not self.is_stream):
# line: 4174
self.flush()
# line: 4176
def tell(self):
# line: 4181
"Return the file's current position.\n\n Returns:\n int, file's current position in bytes.\n "
# line: 4182
self._check_open_file()
# line: 4183
self._flush_for_read()
# line: 4184
if (not self._append):
# line: 4185
return self._io.tell()
# line: 4186
if self._read_whence:
# line: 4187
write_seek = self._io.tell()
# line: 4188
self._io.seek(self._read_seek, self._read_whence)
# line: 4189
self._read_seek = self._io.tell()
# line: 4190
self._read_whence = 0
# line: 4191
self._io.seek(write_seek)
# line: 4192
return self._read_seek
# line: 4194
def _flush_for_read(self):
# line: 4196
if self._flushes_after_read():
# line: 4197
self.flush()
# line: 4199
def _flushes_after_read(self):
# line: 4200
return ((not self.is_stream) and ((not self._filesystem.is_windows_fs) or (sys.version_info[0] > 2)))
# line: 4204
def _sync_io(self):
# line: 4205
'Update the stream with changes to the file object contents.'
# line: 4206
if (self._file_epoch == self._file_object.epoch):
# line: 4207
return
# line: 4209
if isinstance(self._io, io.BytesIO):
# line: 4210
contents = self._file_object.byte_contents
else:
# line: 4212
contents = self._file_object.contents
# line: 4214
is_stream_reader_writer = isinstance(self._io, codecs.StreamReaderWriter)
# line: 4215
if is_stream_reader_writer:
# line: 4216
self._io.stream.allow_update = True
# line: 4217
whence = self._io.tell()
# line: 4218
self._io.seek(0)
# line: 4219
self._io.truncate()
# line: 4220
self._io.write(contents)
# line: 4221
if self._append:
# line: 4222
self._io.seek(0, os.SEEK_END)
else:
# line: 4224
self._io.seek(whence)
# line: 4226
if is_stream_reader_writer:
# line: 4227
self._io.stream.allow_update = False
# line: 4228
self._file_epoch = self._file_object.epoch
# line: 4230
def _ReadWrapper(self, name):
# line: 4241
'Wrap a stream attribute in a read wrapper.\n\n Returns a read_wrapper which tracks our own read pointer since the\n stream object has no concept of a different read and write pointer.\n\n Args:\n name: the name of the attribute to wrap. Should be a read call.\n\n Returns:\n either a read_error or read_wrapper function.\n '
# line: 4242
io_attr = getattr(self._io, name)
# line: 4244
def read_wrapper(*args, **kwargs):
# line: 4256
"Wrap all read calls to the stream object.\n\n We do this to track the read pointer separate from the write\n pointer. Anything that wants to read from the stream object\n while we're in append mode goes through this.\n\n Args:\n *args: pass through args\n **kwargs: pass through kwargs\n Returns:\n Wrapped stream object method\n "
# line: 4257
self._io.seek(self._read_seek, self._read_whence)
# line: 4258
ret_value = io_attr(*args, **kwargs)
# line: 4259
self._read_seek = self._io.tell()
# line: 4260
self._read_whence = 0
# line: 4261
self._io.seek(0, 2)
# line: 4262
return ret_value
# line: 4264
return read_wrapper
# line: 4266
def _OtherWrapper(self, name, writing):
# line: 4274
'Wrap a stream attribute in an other_wrapper.\n\n Args:\n name: the name of the stream attribute to wrap.\n\n Returns:\n other_wrapper which is described below.\n '
# line: 4275
io_attr = getattr(self._io, name)
# line: 4277
def other_wrapper(*args, **kwargs):
# line: 4289
'Wrap all other calls to the stream Object.\n\n We do this to track changes to the write pointer. Anything that\n moves the write pointer in a file open for appending should move\n the read pointer as well.\n\n Args:\n *args: pass through args\n **kwargs: pass through kwargs\n Returns:\n Wrapped stream object method\n '
# line: 4290
write_seek = self._io.tell()
# line: 4291
ret_value = io_attr(*args, **kwargs)
# line: 4292
if (write_seek != self._io.tell()):
# line: 4293
self._read_seek = self._io.tell()
# line: 4294
self._read_whence = 0
# line: 4295
if ((not writing) or (sys.version_info >= (3,))):
# line: 4296
return ret_value
# line: 4298
return other_wrapper
# line: 4300
def _TruncateWrapper(self):
# line: 4305
'Wrap truncate() to allow flush after truncate.\n\n Returns:\n wrapper which is described below.\n '
# line: 4306
io_attr = getattr(self._io, 'truncate')
# line: 4308
def truncate_wrapper(*args, **kwargs):
# line: 4309
'Wrap truncate call to call flush after truncate.'
# line: 4310
if self._append:
# line: 4311
self._io.seek(self._read_seek, self._read_whence)
# line: 4312
size = io_attr(*args, **kwargs)
# line: 4313
self.flush()
# line: 4314
if (not self.is_stream):
# line: 4315
self._file_object.SetSize(size)
# line: 4316
buffer_size = len(self._io.getvalue())
# line: 4317
if (buffer_size < size):
# line: 4318
self._io.seek(buffer_size)
# line: 4319
self._io.write(('\x00' * (size - buffer_size)))
# line: 4320
self._file_object.SetContents(self._io.getvalue(), self._encoding)
# line: 4321
if (sys.version_info >= (3,)):
# line: 4322
return size
# line: 4324
return truncate_wrapper
# line: 4326
def _WriteWrapper(self, name):
# line: 4331
'Wrap write() to adapt return value for Python 2.\n\n Returns:\n wrapper which is described below.\n '
# line: 4332
io_attr = getattr(self._io, name)
# line: 4334
def write_wrapper(*args, **kwargs):
# line: 4335
'Wrap trunctae call to call flush after truncate.'
# line: 4336
ret_value = io_attr(*args, **kwargs)
# line: 4337
if (sys.version_info >= (3,)):
# line: 4338
return ret_value
# line: 4340
return write_wrapper
# line: 4342
def Size(self):
# line: 4343
'Return the content size in bytes of the wrapped file.'
# line: 4344
return self._file_object.st_size
# line: 4346
def __getattr__(self, name):
# line: 4347
if self._file_object.IsLargeFile():
# line: 4348
raise FakeLargeFileIoException(self._file_path)
# line: 4350
reading = (name.startswith('read') or (name == 'next'))
# line: 4351
truncate = (name == 'truncate')
# line: 4352
writing = (name.startswith('write') or truncate)
# line: 4353
if (reading or writing):
# line: 4354
self._check_open_file()
# line: 4355
if ((not self._read) and reading):
# line: 4356
def read_error(*args, **kwargs):
# line: 4357
'Throw an error unless the argument is zero.'
# line: 4358
if (args and (args[0] == 0)):
# line: 4359
if (self._filesystem.is_windows_fs and self.raw_io):
# line: 4360
return ('' if self._binary else u'')
# line: 4361
self._raise('File is not open for reading.')
# line: 4363
return read_error
# line: 4365
if ((not self.allow_update) and writing):
# line: 4366
def write_error(*args, **kwargs):
# line: 4367
'Throw an error.'
# line: 4368
if self.raw_io:
# line: 4369
if (self._filesystem.is_windows_fs and args and (len(args[0]) == 0)):
# line: 4370
return 0
# line: 4371
self._raise('File is not open for writing.')
# line: 4373
return write_error
# line: 4375
if reading:
# line: 4376
self._sync_io()
# line: 4377
self._flush_for_read()
# line: 4378
if truncate:
# line: 4379
return self._TruncateWrapper()
# line: 4380
if self._append:
# line: 4381
if reading:
# line: 4382
return self._ReadWrapper(name)
else:
# line: 4384
return self._OtherWrapper(name, writing)
# line: 4385
if writing:
# line: 4386
return self._WriteWrapper(name)
# line: 4388
return getattr(self._io, name)
# line: 4390
def _check_open_file(self):
# line: 4391
if ((not self.is_stream) and (not (self in self._filesystem.open_files))):
# line: 4392
raise ValueError('I/O operation on closed file')
# line: 4394
def __iter__(self):
# line: 4395
if (not self._read):
# line: 4396
self._raise('File is not open for reading')
# line: 4397
return self._io.__iter__()
# line: 4400
class FakeDirWrapper(object):
# line: 4402
'Wrapper for a FakeDirectory object to be used in open files list.\n '
# line: 4403
def __init__(self, file_object, file_path, filesystem):
# line: 4404
self._file_object = file_object
# line: 4405
self._file_path = file_path
# line: 4406
self._filesystem = filesystem
# line: 4407
self.filedes = None
# line: 4409
def GetObject(self):
# line: 4410
'Return the FakeFile object that is wrapped by the current instance.'
# line: 4411
return self._file_object
# line: 4413
def fileno(self):
# line: 4414
'Return the file descriptor of the file object.'
# line: 4415
return self.filedes
# line: 4417
def close(self):
# line: 4418
'Close the directory.'
# line: 4419
self._filesystem.CloseOpenFile(self.filedes)
# line: 4422
class FakeFileOpen(object):
# line: 4427
'Faked `file()` and `open()` function replacements.\n\n Returns FakeFile objects in a FakeFilesystem in place of the `file()`\n or `open()` function.\n '
# line: 4428
__name__ = 'FakeFileOpen'
# line: 4430
def __init__(self, filesystem, delete_on_close=False, use_io=False, raw_io=False):
# line: 4438
'init.\n\n Args:\n filesystem: FakeFilesystem used to provide file system information\n delete_on_close: optional boolean, deletes file on close()\n use_io: if True, the io.open() version is used (ignored for Python 3,\n where io.open() is an alias to open() )\n '
# line: 4439
self.filesystem = filesystem
# line: 4440
self._delete_on_close = delete_on_close
# line: 4441
self._use_io = (use_io or (sys.version_info >= (3, 0)) or (platform.python_implementation() == 'PyPy'))
# line: 4443
self.raw_io = raw_io
# line: 4445
def __call__(self, *args, **kwargs):
# line: 4446
'Redirects calls to file() or open() to appropriate method.'
# line: 4447
if self._use_io:
# line: 4448
return self.Call(*args, **kwargs)
else:
# line: 4450
return self._call_ver2(*args, **kwargs)
# line: 4452
def _call_ver2(self, file_path, mode='r', buffering=(-1), flags=None, open_modes=None):
# line: 4453
'Limits args of open() or file() for Python 2.x versions.'
# line: 4455
mode = (flags or mode)
# line: 4456
return self.Call(file_path, mode, buffering, open_modes=open_modes)
# line: 4458
def Call(self, file_, mode='r', buffering=(-1), encoding=None, errors=None, newline=None, closefd=True, opener=None, open_modes=None):
# line: 4484
"Return a file-like object with the contents of the target file object.\n\n Args:\n file_: path to target file or a file descriptor.\n mode: additional file modes. All r/w/a/x r+/w+/a+ modes are supported.\n 't', and 'U' are ignored, e.g., 'wU' is treated as 'w'. 'b' sets\n binary mode, no end of line translations in StringIO.\n buffering: ignored. (Used for signature compliance with __builtin__.open)\n encoding: the encoding used to encode unicode strings / decode bytes.\n New in pyfakefs 2.9.\n errors: ignored, this relates to encoding.\n newline: controls universal newlines, passed to stream object.\n closefd: if a file descriptor rather than file name is passed, and set\n to false, then the file descriptor is kept open when file is closed.\n opener: not supported.\n open_modes: Modes for opening files if called from low-level API\n\n Returns:\n a file-like object containing the contents of the target file.\n\n Raises:\n IOError: if the target object is a directory, the path is invalid or\n permission is denied.\n "
# line: 4485
orig_modes = mode
# line: 4487
binary = ((sys.version_info < (3, 0)) or ('b' in mode))
# line: 4489
mode = mode.replace('t', '').replace('b', '')
# line: 4490
mode = mode.replace('rU', 'r').replace('U', 'r')
# line: 4492
if (not self.raw_io):
# line: 4493
if (mode not in _OPEN_MODE_MAP):
# line: 4494
raise ValueError(('Invalid mode: %r' % orig_modes))
# line: 4495
open_modes = _OpenModes(*_OPEN_MODE_MAP[mode])
# line: 4497
file_object = None
# line: 4498
filedes = None
# line: 4500
if isinstance(file_, int):
# line: 4501
filedes = file_
# line: 4502
wrapper = self.filesystem.GetOpenFile(filedes)
# line: 4503
self._delete_on_close = wrapper.delete_on_close
# line: 4504
file_object = self.filesystem.GetOpenFile(filedes).GetObject()
# line: 4505
file_path = file_object.name
else:
# line: 4507
file_path = file_
# line: 4508
real_path = self.filesystem.ResolvePath(file_path, raw_io=self.raw_io)
# line: 4509
if self.filesystem.Exists(file_path):
# line: 4510
file_object = self.filesystem.GetObjectFromNormalizedPath(real_path)
# line: 4511
closefd = True
# line: 4513
error_class = (OSError if self.raw_io else IOError)
# line: 4514
if (open_modes.must_not_exist and (file_object or self.filesystem.IsLink(file_path))):
# line: 4515
raise error_class(errno.EEXIST, 'File exists', file_path)
# line: 4516
if file_object:
# line: 4517
if ((open_modes.can_read and (not (file_object.st_mode & PERM_READ))) or (open_modes.can_write and (not (file_object.st_mode & PERM_WRITE)))):
# line: 4519
raise error_class(errno.EACCES, 'Permission denied', file_path)
# line: 4520
if open_modes.can_write:
# line: 4521
if open_modes.truncate:
# line: 4522
file_object.SetContents('')
else:
# line: 4524
if open_modes.must_exist:
# line: 4525
raise error_class(errno.ENOENT, 'No such file or directory', file_path)
# line: 4526
file_object = self.filesystem.CreateFileInternally(real_path, create_missing_dirs=False, apply_umask=True, raw_io=self.raw_io)
# line: 4529
if stat.S_ISDIR(file_object.st_mode):
# line: 4530
if self.filesystem.is_windows_fs:
# line: 4531
raise OSError(errno.EPERM, 'Fake file object: is a directory', file_path)
else:
# line: 4533
raise error_class(errno.EISDIR, 'Fake file object: is a directory', file_path)
# line: 4537
file_object.opened_as = file_path
# line: 4539
fakefile = FakeFileWrapper(file_object, file_path, update=open_modes.can_write, read=open_modes.can_read, append=open_modes.append, delete_on_close=self._delete_on_close, filesystem=self.filesystem, newline=newline, binary=binary, closefd=closefd, encoding=encoding, errors=errors, raw_io=self.raw_io, use_io=self._use_io)
# line: 4553
if (filedes is not None):
# line: 4554
fakefile.filedes = filedes
# line: 4556
self.filesystem.open_files[filedes] = fakefile
else:
# line: 4558
fakefile.filedes = self.filesystem.AddOpenFile(fakefile)
# line: 4559
return fakefile
# line: 4562
def _RunDoctest():
# line: 4563
import doctest
# line: 4564
from pyfakefs import fake_filesystem
# line: 4565
return doctest.testmod(fake_filesystem)
# line: 4568
if (__name__ == '__main__'):
# line: 4569
_RunDoctest() | [
"[email protected]"
] | |
e617b920f9e2568d05f0b9b81923724255ed6437 | dcbef06d5a00f07756339b9e62c684dec2fee425 | /nuitka/build/inline_copy/lib/scons-4.3.0/SCons/Platform/darwin.py | f997a7d9e6f3b45fabc0a4a6ede8551a69bcf4bd | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | Nuitka/Nuitka | f9543d8d95bfa0b81d4e60af0dfad99fb72893a4 | d87faf2f7e1d6ed9bfe4cf8c1d648f34307e33f2 | refs/heads/develop | 2023-08-28T14:00:32.861328 | 2023-08-27T09:16:45 | 2023-08-27T09:16:45 | 9,626,741 | 8,573 | 599 | Apache-2.0 | 2023-09-13T02:49:41 | 2013-04-23T15:40:33 | Python | UTF-8 | Python | false | false | 2,630 | py | # MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
from . import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
env['HOST_OS'] = 'darwin'
# put macports paths at front to override Apple's versions, fink path is after
# For now let people who want Macports or Fink tools specify it!
# env['ENV']['PATH'] = '/opt/local/bin:/opt/local/sbin:' + env['ENV']['PATH'] + ':/sw/bin'
# Store extra system paths in env['ENV']['PATHOSX']
filelist = ['/etc/paths',]
# make sure this works on Macs with Tiger or earlier
try:
dirlist = os.listdir('/etc/paths.d')
except FileNotFoundError:
dirlist = []
for file in dirlist:
filelist.append('/etc/paths.d/'+file)
for file in filelist:
if os.path.isfile(file):
with open(file, 'r') as f:
lines = f.readlines()
for line in lines:
if line:
env.AppendENVPath('PATHOSX', line.strip('\n'))
# Not sure why this wasn't the case all along?
if env['ENV'].get('PATHOSX', False) and os.environ.get('SCONS_USE_MAC_PATHS', False):
env.AppendENVPath('PATH',env['ENV']['PATHOSX'])
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| [
"[email protected]"
] | |
b399c45fda01596366d609a87fd0320da5d59894 | f7d1f11fd4fa5e71f3951b7fd7b4b20fef2a21b1 | /venv/Scripts/pip3-script.py | 10837b59bbe74b22be04f86ffc0e12c90479edd6 | [] | no_license | ferry-luo/ferry_pycharm_projects | 130ea7ccd5d605b0965cd1bbc9b5511daa333afb | 13a1a4a50f1c9fddf76ff0f56bf11f5d9a940467 | refs/heads/master | 2022-12-30T10:17:07.921729 | 2020-10-18T01:58:23 | 2020-10-18T01:58:23 | 268,465,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 400 | py | #!F:\AAA-ferry\FerryProject\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
)
| [
"[email protected]"
] | |
f29c5f5a8fd629051b2a7cb2e2d5a0557af1f558 | 2d9e5a30ac44bf15296a058a198b97fa1e6574c0 | /galaxy_ml/binarize_target/_binarize_estimators.py | 0ba49fca7485267cc0fb472bafdd5ef680a5af56 | [
"MIT"
] | permissive | kxk302/Galaxy-ML | 8397e533b1529354fc1d5e7b147cd808b129efd8 | d42bea8591f691c44fd0523d567c1dfa8a87bd01 | refs/heads/master | 2023-04-13T21:44:18.478631 | 2021-03-23T17:54:46 | 2021-03-23T17:54:46 | 346,071,096 | 0 | 0 | MIT | 2021-03-12T23:08:31 | 2021-03-09T16:28:41 | null | UTF-8 | Python | false | false | 10,288 | py | import numpy as np
from sklearn.base import (BaseEstimator, clone, RegressorMixin,
TransformerMixin)
from sklearn.utils.validation import (check_array, check_is_fitted,
column_or_1d)
class BinarizeTargetClassifier(BaseEstimator, RegressorMixin):
"""
Convert continuous target to binary labels (True and False)
and apply a classification estimator.
Parameters
----------
classifier : object
Estimator object such as derived from sklearn `ClassifierMixin`.
z_score : float, default=-1.0
Threshold value based on z_score. Will be ignored when
fixed_value is set
value : float, default=None
Threshold value
less_is_positive : boolean, default=True
When target is less the threshold value, it will be converted
to True, False otherwise.
verbose : int, default=0
If greater than 0, print discretizing info.
Attributes
----------
classifier_ : object
Fitted classifier
discretize_value : float
The threshold value used to discretize True and False targets
"""
def __init__(self, classifier, z_score=-1, value=None,
less_is_positive=True, verbose=0):
self.classifier = classifier
self.z_score = z_score
self.value = value
self.less_is_positive = less_is_positive
self.verbose = verbose
def fit(self, X, y, sample_weight=None, **fit_params):
"""
Convert y to True and False labels and then fit the classifier
with X and new y
Returns
------
self: object
"""
y = check_array(y, accept_sparse=False, force_all_finite=True,
ensure_2d=False, dtype='numeric')
y = column_or_1d(y)
if self.value is None:
discretize_value = y.mean() + y.std() * self.z_score
else:
discretize_value = self.Value
self.discretize_value = discretize_value
if self.less_is_positive:
y_trans = y < discretize_value
else:
y_trans = y > discretize_value
n_positives = np.sum(y_trans)
# for older version compatibility
if self.verbose and self.verbose > 0:
print("{0} out of total {1} samples are discretized into "
"positive.".format(n_positives, X.shape[0]))
self.classifier_ = clone(self.classifier)
keys = list(fit_params.keys())
for key in keys:
if not key.startswith('classifier__'):
raise ValueError("fit_params for BinarizeTargetClassifier "
"must start with `classifier__`")
fit_params[key[12:]] = fit_params.pop(key)
if sample_weight is not None:
self.classifier_.fit(X, y_trans,
sample_weight=sample_weight,
**fit_params)
else:
self.classifier_.fit(X, y_trans, **fit_params)
# Used in RFE or SelectFromModel
if hasattr(self.classifier_, 'feature_importances_'):
self.feature_importances_ = self.classifier_.feature_importances_
if hasattr(self.classifier_, 'coef_'):
self.coef_ = self.classifier_.coef_
if hasattr(self.classifier_, 'n_outputs_'):
self.n_outputs_ = self.classifier_.n_outputs_
if hasattr(self.classifier_, 'n_features_'):
self.n_features_ = self.classifier_.n_features_
return self
def predict(self, X):
"""Predict using a fitted estimator
"""
return self.classifier_.predict(X)
def decision_function(self, X):
"""Predict using a fitted estimator
"""
return self.classifier_.decision_function(X)
def predict_proba(self, X):
"""Predict using a fitted estimator
"""
return self.classifier_.predict_proba(X)
class BinarizeTargetRegressor(BaseEstimator, RegressorMixin):
"""
Extend regression estimator to have discretize_value
Parameters
----------
regressor : object
Estimator object such as derived from sklearn `RegressionMixin`.
z_score : float, default=-1.0
Threshold value based on z_score. Will be ignored when
value is set
value : float, default=None
Threshold value
less_is_positive : boolean, default=True
When target is less the threshold value, it will be converted
to True, False otherwise.
verbose : int, default=0
If greater than 0, print discretizing info.
Attributes
----------
regressor_ : object
Fitted regressor
discretize_value : float
The threshold value used to discretize True and False targets
"""
def __init__(self, regressor, z_score=-1, value=None,
less_is_positive=True, verbose=0):
self.regressor = regressor
self.z_score = z_score
self.value = value
self.less_is_positive = less_is_positive
self.verbose = verbose
def fit(self, X, y, sample_weight=None, **fit_params):
"""
Calculate the discretize_value fit the regressor with traning data
Returns
------
self: object
"""
y = check_array(y, accept_sparse=False, force_all_finite=True,
ensure_2d=False, dtype='numeric')
y = column_or_1d(y)
if not np.all((y >= 0) & (y <= 1)):
raise ValueError("The target value of BinarizeTargetRegressor "
"must be in the range [0, 1]")
if self.value is None:
discretize_value = y.mean() + y.std() * self.z_score
else:
discretize_value = self.Value
self.discretize_value = discretize_value
if self.less_is_positive:
n_positives = np.sum(y < discretize_value)
else:
n_positives = np.sum(y > discretize_value)
# for older version compatibility
if self.verbose and self.verbose > 0:
print("{0} out of total {1} samples are discretized into "
"positive.".format(n_positives, X.shape[0]))
self.regressor_ = clone(self.regressor)
keys = list(fit_params.keys())
for key in keys:
if not key.startswith('regressor__'):
raise ValueError("fit_params for BinarizeTargetClassifier "
"must start with `regressor__`")
fit_params[key[11:]] = fit_params.pop(key)
if sample_weight is not None:
self.regressor_.fit(X, y,
sample_weight=sample_weight,
**fit_params)
else:
self.regressor_.fit(X, y, **fit_params)
# attach classifier attributes
if hasattr(self.regressor_, 'feature_importances_'):
self.feature_importances_ = self.regressor_.feature_importances_
if hasattr(self.regressor_, 'coef_'):
self.coef_ = self.regressor_.coef_
if hasattr(self.regressor_, 'n_outputs_'):
self.n_outputs_ = self.regressor_.n_outputs_
if hasattr(self.regressor_, 'n_features_'):
self.n_features_ = self.regressor_.n_features_
return self
def predict(self, X):
"""Predict target value of X
"""
check_is_fitted(self, 'regressor_')
return self.regressor_.predict(X)
def decision_function(self, X):
"""
Output the proba for True label
For use in the binarize target scorers.
"""
pred = self.predict(X)
if self.less_is_positive:
pred = 1 - pred
return pred
def predict_label(self, X, cutoff):
""" output a label based on cutoff value
Parameters
----------
cutoff : float
"""
scores = self.decision_function(X)
return scores > cutoff
class BinarizeTargetTransformer(BaseEstimator, TransformerMixin):
"""
Extend transformaer to work for binarized target.
Parameters
----------
transformer : object
Estimator object such as derived from sklearn `TransformerMixin`,
including feature_selector and preprocessor
z_score : float, default=-1.0
Threshold value based on z_score. Will be ignored when
fixed_value is set
value : float, default=None
Threshold value
less_is_positive : boolean, default=True
When target is less the threshold value, it will be converted
to True, False otherwise.
Attributes
----------
transformer_ : object
Fitted regressor
discretize_value : float
The threshold value used to discretize True and False targets
"""
def __init__(self, transformer, z_score=-1, value=None,
less_is_positive=True):
self.transformer = transformer
self.z_score = z_score
self.value = value
self.less_is_positive = less_is_positive
def fit(self, X, y):
"""
Convert y to True and False labels and then fit the transformer
with X and new y
Returns
------
self: object
"""
y = check_array(y, accept_sparse=False, force_all_finite=True,
ensure_2d=False, dtype='numeric')
y = column_or_1d(y)
if self.value is None:
discretize_value = y.mean() + y.std() * self.z_score
else:
discretize_value = self.Value
self.discretize_value = discretize_value
if self.less_is_positive:
y_trans = y < discretize_value
else:
y_trans = y > discretize_value
self.transformer_ = clone(self.transformer)
self.transformer_.fit(X, y_trans)
return self
def transform(self, X):
"""Transform X
Parameters
----------
X : array of shape [n_samples, n_features]
Returns
-------
X_r : array
"""
check_is_fitted(self, 'transformer_')
X = check_array(X, dtype=None, accept_sparse='csr')
return self.transformer_.transform(X)
| [
"[email protected]"
] | |
f06d43fb2d6f3095a730d2a824cce59219131f2c | 63ff60a5fde00c79fc4cd72ebd88595d14925921 | /picker/migrations/0001_initial.py | 06d0748e23c3c998488055b09f20e15d79015dab | [
"MIT"
] | permissive | dakrauth/picker | e797e99cb7859b2f33998aca2e84e432c0c19d8d | 084bc22cf50b200333a6c76d9577463eda6a0948 | refs/heads/main | 2022-09-15T03:43:28.400665 | 2022-09-03T05:57:56 | 2022-09-03T05:57:56 | 42,045,430 | 2 | 2 | MIT | 2022-01-07T17:21:16 | 2015-09-07T09:58:39 | Python | UTF-8 | Python | false | false | 10,602 | py | # -*- coding: utf-8 -*-
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Alias',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=50)),
],
),
migrations.CreateModel(
name='Conference',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=50)),
('abbr', models.CharField(max_length=8)),
],
),
migrations.CreateModel(
name='Division',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=50)),
('conference', models.ForeignKey(on_delete=models.CASCADE, to='picker.Conference')),
],
),
migrations.CreateModel(
name='Game',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('kickoff', models.DateTimeField()),
('tv', models.CharField(max_length=8, verbose_name=b'TV', blank=True)),
('notes', models.TextField(blank=True)),
('category', models.CharField(default='REG', max_length=4, choices=[('REG', b'Regular Season'), ('POST', b'Post Season')])),
('status', models.CharField(default='U', max_length=1, choices=[('U', b'Unplayed'), ('T', b'Tie'), ('H', b'Home Win'), ('A', b'Away Win')])),
('location', models.CharField(max_length=50, blank=True)),
],
options={
'ordering': ('kickoff', 'away'),
},
),
migrations.CreateModel(
name='GamePick',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('game', models.ForeignKey(on_delete=models.CASCADE, related_name='gamepicks', to='picker.Game')),
],
options={
'ordering': ('game__kickoff', 'game__away'),
},
),
migrations.CreateModel(
name='GameSet',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('season', models.PositiveSmallIntegerField()),
('week', models.PositiveSmallIntegerField()),
('points', models.PositiveSmallIntegerField(default=0)),
('opens', models.DateTimeField()),
('closes', models.DateTimeField()),
],
options={
'ordering': ('season', 'week'),
},
),
migrations.CreateModel(
name='League',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=50)),
('abbr', models.CharField(max_length=8)),
('logo', models.ImageField(null=True, upload_to=b'picker/logos', blank=True)),
('is_pickable', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='PickSet',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('points', models.PositiveSmallIntegerField(default=0)),
('correct', models.PositiveSmallIntegerField(default=0)),
('wrong', models.PositiveSmallIntegerField(default=0)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('strategy', models.CharField(default='USER', max_length=4, choices=[('USER', b'User'), ('RAND', b'Random'), ('HOME', b'Home Team'), ('BEST', b'Best Record')])),
('user', models.ForeignKey(on_delete=models.CASCADE, related_name='picksets', to=settings.AUTH_USER_MODEL)),
('week', models.ForeignKey(on_delete=models.CASCADE, related_name='picksets', to='picker.GameSet')),
],
),
migrations.CreateModel(
name='Playoff',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('season', models.PositiveSmallIntegerField()),
('kickoff', models.DateTimeField()),
('league', models.ForeignKey(on_delete=models.CASCADE, to='picker.League')),
],
),
migrations.CreateModel(
name='PlayoffPicks',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('picks', models.TextField(blank=True)),
('playoff', models.ForeignKey(on_delete=models.CASCADE, to='picker.Playoff')),
('user', models.ForeignKey(on_delete=models.SET_NULL, blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
),
migrations.CreateModel(
name='PlayoffTeam',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('seed', models.PositiveSmallIntegerField()),
('playoff', models.ForeignKey(on_delete=models.CASCADE, to='picker.Playoff')),
],
options={
'ordering': ('seed',),
},
),
migrations.CreateModel(
name='Preference',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status', models.CharField(default='ACTV', max_length=4, choices=[('ACTV', b'Active'), ('IDLE', b'Inactive'), ('SUSP', b'Suspended')])),
('autopick', models.CharField(default='RAND', max_length=4, choices=[('NONE', b'None'), ('RAND', b'Random')])),
],
),
migrations.CreateModel(
name='Team',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=50)),
('abbr', models.CharField(max_length=8, blank=True)),
('nickname', models.CharField(max_length=50)),
('location', models.CharField(max_length=100, blank=True)),
('image', models.CharField(max_length=50, blank=True)),
('colors', models.CharField(max_length=40, blank=True)),
('logo', models.ImageField(null=True, upload_to=b'picker/logos', blank=True)),
('conference', models.ForeignKey(on_delete=models.CASCADE, to='picker.Conference')),
('division', models.ForeignKey(on_delete=models.SET_NULL, blank=True, to='picker.Division', null=True)),
('league', models.ForeignKey(on_delete=models.CASCADE, to='picker.League')),
],
options={
'ordering': ('name',),
},
),
migrations.AddField(
model_name='preference',
name='favorite_team',
field=models.ForeignKey(on_delete=models.SET_NULL, blank=True, to='picker.Team', null=True),
),
migrations.AddField(
model_name='preference',
name='league',
field=models.ForeignKey(on_delete=models.CASCADE, to='picker.League'),
),
migrations.AddField(
model_name='preference',
name='user',
field=models.OneToOneField(on_delete=models.CASCADE, related_name='picker_preferences', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='playoffteam',
name='team',
field=models.ForeignKey(on_delete=models.CASCADE, to='picker.Team'),
),
migrations.AddField(
model_name='gameset',
name='byes',
field=models.ManyToManyField(related_name='bye_set', verbose_name=b'Bye Teams', to='picker.Team'),
),
migrations.AddField(
model_name='gameset',
name='league',
field=models.ForeignKey(on_delete=models.CASCADE, related_name='game_set', to='picker.League'),
),
migrations.AddField(
model_name='gamepick',
name='pick',
field=models.ForeignKey(on_delete=models.CASCADE, to='picker.PickSet'),
),
migrations.AddField(
model_name='gamepick',
name='winner',
field=models.ForeignKey(on_delete=models.SET_NULL, blank=True, to='picker.Team', null=True),
),
migrations.AddField(
model_name='game',
name='away',
field=models.ForeignKey(on_delete=models.CASCADE, related_name='away_games', to='picker.Team'),
),
migrations.AddField(
model_name='game',
name='home',
field=models.ForeignKey(on_delete=models.CASCADE, related_name='home_games', to='picker.Team'),
),
migrations.AddField(
model_name='game',
name='week',
field=models.ForeignKey(on_delete=models.CASCADE, related_name='games', to='picker.GameSet'),
),
migrations.AddField(
model_name='conference',
name='league',
field=models.ForeignKey(on_delete=models.CASCADE, to='picker.League'),
),
migrations.AddField(
model_name='alias',
name='team',
field=models.ForeignKey(on_delete=models.CASCADE, to='picker.Team'),
),
migrations.AlterUniqueTogether(
name='pickset',
unique_together=set([('user', 'week')]),
),
]
| [
"[email protected]"
] | |
545c240dc43ec38cffd97004bd6125bf765692d6 | 5e49afd9c6ca73d7074c7ae220d5186fe4f44c08 | /setup.py | 100a3637c77fb07f8f43449aadc017a221620a02 | [
"MIT"
] | permissive | pylover/sharedlists | c2842618e7f6f9fea9dfefd710b9f94f36c19e7c | b020be26d3a64a3cdb9417a066a454b5b92006c5 | refs/heads/master | 2020-06-22T02:55:47.892070 | 2019-08-04T20:37:20 | 2019-08-04T20:37:20 | 197,615,918 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 801 | py | import re
from os.path import join, dirname
from setuptools import setup, find_packages
# reading package version (same way the sqlalchemy does)
with open(join(dirname(__file__), 'sharedlists', '__init__.py')) as v_file:
package_version = re.compile('.*__version__ = \'(.*?)\'', re.S).\
match(v_file.read()).group(1)
dependencies = [
'restfulpy >= 3.4, < 4',
'easycli >= 1.4, < 2',
# Deployment
'gunicorn',
]
setup(
name='sharedlists',
version=package_version,
packages=find_packages(exclude=['tests']),
install_requires=dependencies,
include_package_data=True,
license='MIT',
entry_points={
'console_scripts': [
'sharedlists = sharedlists:server_main',
'bee = sharedlists:client_main'
]
}
)
| [
"[email protected]"
] | |
87e06c5f092bc078e57470af3c2e97ccb8e14638 | 6c137e70bb6b1b618fbbceddaeb74416d387520f | /lantz/lantz/drivers/agilent/__init__.py | 6cb05fee840da445ceb8ceea76d2bfa2c2dd3fe9 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | zhong-lab/code | fe497c75662f8c3b7ab3c01e7e351bff6d5e8d15 | b810362e06b44387f0768353c602ec5d29b551a2 | refs/heads/master | 2023-01-28T09:46:01.448833 | 2022-06-12T22:53:47 | 2022-06-12T22:53:47 | 184,670,765 | 2 | 7 | BSD-2-Clause | 2022-12-08T21:46:15 | 2019-05-02T23:37:39 | Python | UTF-8 | Python | false | false | 593 | py | # -*- coding: utf-8 -*-
"""
lantz.drivers.agilent
~~~~~~~~~~~~~~~~~~~~~~
:company: Agilent Technologies.
:description: Manufactures test instruments for research and industrial applications
:website: http://www.agilent.com/home
----
:copyright: 2015 by Lantz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from .n51xx import N51xx
from .ag33220A import Ag33220A
from .ag81130a import Ag81130A
from .e8257c import E8257C
from .AG33522a import AG33522A
__all__ = ['N51xx', 'Ag33220A', 'Ag81130A', 'AG33522A', 'E8257C']
| [
"none"
] | none |
1ebf265851834e1dbde7d2c4c549c1c82252350c | 02a535bbe64f52c112aef2b6b2abce5e2bf46933 | /Alquileres/Alquileres/urls.py | ae334f9df5de595c8a6305904a4afacd6ebe427a | [] | no_license | RicardoWebProject/BackendAlquileres | ff05e8a46c9cdf0c12edc9e8ff04c07d3e9b4961 | 72b12a459830bd4348caf763afa3d65ad60b8603 | refs/heads/master | 2022-11-22T04:07:45.020079 | 2020-07-27T01:10:05 | 2020-07-27T01:10:05 | 282,762,709 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,028 | py | """Alquileres URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('clientes.urls')),
path('', include('maquinaria.urls')),
path('', include('malquileres.urls')),
] + static (settings.MEDIA_URL, document_root= settings.MEDIA_ROOT)
| [
"[email protected]"
] | |
7107ab73e45047060a6a8580092971ab13b86db0 | ab616e26a623fe7e81d30ba7b86fabe4a3658794 | /LibriSpeech/Get_Meta_LibriSpeech.py | 39a801bf12ffad5efc95d8bb95ea6ef3ab2b9afa | [] | no_license | ruclion/linears_decoder | 1d2367fbfa8fdde3ae0a8c53e5e82ed7035d1eed | 93cf874f87a601584c07ba5e4b673e401e9e7c90 | refs/heads/master | 2022-12-16T14:25:34.373534 | 2020-09-22T14:42:58 | 2020-09-22T14:42:58 | 289,808,115 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,973 | py | import os
# import numpy as np
# from audio import wav2mfcc_v2, load_wav
wavs_dir = 'wavs'
ppgs_dir = 'alignments'
zhaoxt_train = 'train.txt'
zhaoxt_test = 'test.txt'
meta_list_fromWavs = []
meta_list_fromPPGs = []
meta_list_fromZhaoxt = []
meta_list = []
meta_path = 'meta.txt'
def main():
# 2391-145015-0048
f = open(zhaoxt_train, 'r')
a = [t.strip() for t in f.readlines()]
meta_list_fromZhaoxt.extend(a)
f = open(zhaoxt_test, 'r')
a = [t.strip() for t in f.readlines()]
meta_list_fromZhaoxt.extend(a)
print('Zhaoxts:', len(meta_list_fromZhaoxt), meta_list_fromZhaoxt[0])
# wavs
for second_dir in os.listdir(wavs_dir):
for third_dir in os.listdir(os.path.join(wavs_dir,second_dir)):
third_wavs_dir = os.path.join(os.path.join(wavs_dir,second_dir),third_dir)
wav_files = [f[:-4] for f in os.listdir(third_wavs_dir) if f.endswith('.wav')]
# print('Extracting MFCC from {}...'.format(third_wavs_dir))
meta_list_fromWavs.extend(wav_files)
print('Wavs:', len(meta_list_fromWavs), meta_list_fromWavs[0])
# 100-121669-0000 1 1 1 1 1 1 1
for f_path in os.listdir(ppgs_dir):
f = open(os.path.join(ppgs_dir, f_path), 'r')
a = f.readlines()
for line in a:
line = line.strip().split(' ')
meta_list_fromPPGs.append(line[0])
print('PPGs:', len(meta_list_fromPPGs), meta_list_fromPPGs[0])
# 主要用欣陶的list,辅助看看wavs和ppgs有没有;会跑1分钟,也就暴力看又没有了
for idx in meta_list_fromZhaoxt:
if idx in meta_list_fromPPGs and idx in meta_list_fromWavs:
meta_list.append(idx)
else:
print('为什么不用:', idx)
# break
f = open(meta_path, 'w')
for idx in meta_list:
f.write(idx + '\n')
return
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
39b4713bb06e115f5fef7f696c1b2c73fcf47adf | 1ed536ef1527e6655217e731f622d643ece49c2b | /scripts/align_all_vs_all.py | c152b8b783b8dffd40812fc5cb7771efc2c163fb | [] | no_license | siping/cgat | de0f7af124eb38c72d7dece78fff83ff92ddbf96 | aa4cc85ffdc53998ea1a5ac5516df2d16c254d2e | refs/heads/master | 2021-01-22T13:03:18.060139 | 2013-10-07T15:53:55 | 2013-10-07T15:53:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,781 | py | '''
align_all_vs_all.py - all-vs-all pairwise alignment
===================================================
:Author: Andreas Heger
:Release: $Id$
:Date: |today|
:Tags: Python
Purpose
-------
This script computes all-vs-all alignments between
sequences in a :term:`fasta` formatted file.
Currently only Smith-Waterman protein alignment is
implemented.
Usage
-----
Example::
python align_all_vs_all.py --help
Type::
python align_all_vs_all.py --help
for command line help.
Command line options
--------------------
'''
import os
import sys
import string
import re
import getopt
import time
import optparse
import math
import tempfile
import CGAT.Experiment as E
import alignlib
import CGAT.FastaIterator as FastaIterator
""" program $Id: align_all_vs_all.py 2782 2009-09-10 11:40:29Z andreas $
"""
if __name__ == "__main__":
parser = E.OptionParser( version = "%prog version: $Id: align_all_vs_all.py 2782 2009-09-10 11:40:29Z andreas $")
parser.add_option("-s", "--sequences", dest="filename_sequences", type="string",
help="input file with sequences" )
parser.set_defaults(
filename_sequences = None,
gop = -10.0,
gep = -1.0,
)
(options, args) = E.Start( parser, add_pipe_options = True )
if options.filename_sequences:
infile = open(options.filename_sequences, "r")
else:
infile = sys.stdin
parser = FastaIterator.FastaIterator( infile )
sequences = []
while 1:
cur_record = iterator.next()
if cur_record is None: break
sequences.append( (cur_record.title, alignlib.makeSequence(re.sub( " ", "", cur_record.sequence)) ) )
if options.filename_sequences:
infile.close()
alignator = alignlib.makeAlignatorFullDP( options.gop, options.gep )
map_a2b = alignlib.makeAlignataVector()
nsequences = len(sequences)
for x in range(0,nsequences-1):
for y in range(x+1, nsequences):
alignator.Align( sequences[x][1], sequences[y][1], map_a2b)
row_ali, col_ali = alignlib.writeAlignataCompressed( map_a2b )
options.stdout.write( "%s\t%s\t%i\t%i\t%i\t%s\t%i\t%i\t%s\t%i\t%i\t%i\t%i\n" % (\
sequences[x][0], sequences[y][0],
map_a2b.getScore(),
map_a2b.getRowFrom(),
map_a2b.getRowTo(),
row_ali,
map_a2b.getColFrom(),
map_a2b.getColTo(),
col_ali,
map_a2b.getScore(),
100 * alignlib.calculatePercentIdentity( map_a2b, sequences[x][1], sequences[y][1]),
sequences[x][1].getLength(),
sequences[y][1].getLength() ))
E.Stop()
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.